Huge refactor, much improvements, wow

This commit is contained in:
Jonatan Nilsson 2021-02-05 11:50:01 +00:00
parent b63d0ca06d
commit e4c4e3b7ed
34 changed files with 1012 additions and 879 deletions

View file

@ -1,4 +1,4 @@
import bookshelf from '../bookshelf.mjs' import { createPrototype, safeColumns } from '../knex.mjs'
import Media from '../media/model.mjs' import Media from '../media/model.mjs'
import File from '../file/model.mjs' import File from '../file/model.mjs'
import Staff from '../staff/model.mjs' import Staff from '../staff/model.mjs'
@ -21,10 +21,136 @@ Article model:
*/ */
const Article = bookshelf.createModel({ function ArticleItem(data) {
tableName: 'articles', Object.assign(this, data)
}
parent() { function Article() {
this.tableName = 'articles'
this.Model = ArticleItem
this.includes = {
staff: Staff.includeHasOne('articles.staff_id', 'id'),
media: Media.includeHasOne('articles.media_id', 'id'),
banner: Media.includeHasOne('articles.banner_id', 'id'),
parent: Page.includeHasOne('articles.parent_id', 'id'),
files: File.includeHasMany('article_id', 'articles.id'),
}
this.publicFields = this.privateFields = safeColumns([
'staff_id',
'parent_id',
'name',
'path',
'description',
'banner_id',
'media_id',
'published_at',
'is_featured',
])
this.init()
}
Article.prototype = createPrototype({
getAll(ctx, where = null, includes = [], orderBy = 'id', limitToday = false) {
return this._getAll(ctx, (qb) => {
if (where) qb.where(where)
if (limitToday) {
qb.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
}
}, includes, orderBy, [])
},
getAllFromPage(ctx, pageId, includes = [], orderBy = 'id', limitToday = false) {
return this._getAll(ctx, (qb) => {
qb = qb.innerJoin('pages', 'articles.parent_id', 'pages.id')
qb.where(subq => {
subq.where('pages.id', pageId)
.orWhere('pages.parent_id', pageId)
})
if (limitToday) {
qb.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
}
return qb
}, includes, orderBy, [])
},
getSingle(id, includes = [], require = true, ctx = null, limitToday = false) {
return this._getSingle(qb => {
qb.where(subq => {
subq.where(this.tableName + '.id', '=', Number(id) || 0)
.orWhere(this.tableName + '.path', '=', id)
})
if (limitToday && (!ctx || !ctx.state.user || ctx.state.user.level < 10)) {
qb.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
}
}, includes, require, ctx)
},
getFeaturedArticle(includes = [], ctx = null) {
return this._getSingle(qb => {
qb.where({ is_featured: true })
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
.orderBy(this.tableName + '.published_at', 'DESC')
.select(this.knex.raw('1 as __group'))
.limit(1)
}, includes, false, ctx)
},
async getFrontpageArticles(orgPage = 1) {
let page = Math.max(orgPage, 1)
let out = {
featured: null,
items: [],
total: 0,
}
let qFeatured = this.query(qb => {
return qb.where({ is_featured: true })
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
.orderBy(this.tableName + '.published_at', 'DESC')
.select(this.knex.raw('1 as __group'))
.limit(1)
}, ['staff', 'media', 'banner'])
let qArticles = this.query(qb => {
return qb
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
.select(this.knex.raw('2 as __group'))
.orderBy(this.tableName + '.published_at', 'DESC')
.limit(10)
.offset((page - 1) * 10)
}, ['staff', 'media', 'banner'], null, qFeatured)
let [articles, total] = await Promise.all([
this.getAllQuery(
this.knex
.unionAll(qFeatured, true)
.unionAll(qArticles, true),
qFeatured
),
this.knex('articles')
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
.where({ is_deleted: false })
.count('* as count'),
])
out.total = total[0].count
if (articles.length > 0 && articles[0].is_featured) {
out.featured = articles[0]
out.items = articles.slice(1)
} else {
out.items = articles
}
return out
},
setAllUnfeatured() {
return knex('articles')
.where({ is_featured: true })
.update({
is_featured: false,
})
},
/*parent() {
return this.belongsTo(Page, 'parent_id') return this.belongsTo(Page, 'parent_id')
}, },
@ -45,9 +171,9 @@ const Article = bookshelf.createModel({
.query(qb => { .query(qb => {
qb.orderBy('id', 'asc') qb.orderBy('id', 'asc')
}) })
}, },*/
}, {
getAll(ctx, where = {}, withRelated = [], orderBy = 'id', limitToday = false) { /*getAll(ctx, where = {}, withRelated = [], orderBy = 'id', limitToday = false) {
return this.query(qb => { return this.query(qb => {
this.baseQueryAll(ctx, qb, where, orderBy) this.baseQueryAll(ctx, qb, where, orderBy)
if (limitToday) { if (limitToday) {
@ -138,7 +264,12 @@ const Article = bookshelf.createModel({
page: page, page: page,
withRelated: ['files', 'media', 'banner', 'parent', 'staff'], withRelated: ['files', 'media', 'banner', 'parent', 'staff'],
}) })
}, },*/
}) })
export default Article const articleInstance = new Article()
// Hook into includes for Page
// Page.addInclude('news', articleInstance.includeHasMany('parent_id', 'pages.id'))
export default articleInstance

View file

@ -16,13 +16,6 @@ export default class ArticleRoutes {
ctx.body = await this.Article.getAll(ctx, { }, ctx.state.filter.includes, ctx.query.sort || '-published_at') ctx.body = await this.Article.getAll(ctx, { }, ctx.state.filter.includes, ctx.query.sort || '-published_at')
} }
/** GET: /api/pages/:pageId/articles */
async getAllPageArticles(ctx) {
await this.security.ensureIncludes(ctx)
ctx.body = await this.Article.getAllFromPage(ctx, ctx.params.pageId, ctx.state.filter.includes, ctx.query.sort || '-published_at')
}
/** GET: /api/articles/:id */ /** GET: /api/articles/:id */
async getSingleArticle(ctx) { async getSingleArticle(ctx) {
await this.security.ensureIncludes(ctx) await this.security.ensureIncludes(ctx)
@ -70,22 +63,14 @@ export default class ArticleRoutes {
await Article.setAllUnfeatured() await Article.setAllUnfeatured()
} }
let page = await this.Article.getSingle(ctx.params.id) let article = await this.Article.updateSingle(ctx, ctx.params.id, ctx.request.body)
page.set(ctx.request.body) ctx.body = article
await page.save()
ctx.body = page
} }
/** DELETE: /api/articles/:id */ /** DELETE: /api/articles/:id */
async removeArticle(ctx) { async removeArticle(ctx) {
let page = await this.Article.getSingle(ctx.params.id) await this.Article.updateSingle(ctx, ctx.params.id, { is_deleted: true })
page.set({ is_deleted: true })
await page.save()
ctx.status = 204 ctx.status = 204
} }

View file

@ -14,13 +14,12 @@ export default class AuthHelper {
try { try {
staff = await this.Staff staff = await this.Staff
.query(qb => { .getSingleQuery(
qb.where({ email: ctx.request.body.username }) this.Staff.query(qb => qb.where({ email: ctx.request.body.username }), [], ['*']),
qb.select('*') true
}) )
.fetch({ require: true })
await this.Staff.compare(ctx.request.body.password, staff.get('password')) await this.Staff.compare(ctx.request.body.password, staff.password)
} catch (err) { } catch (err) {
if (err.message === 'EmptyResponse' || err.message === 'PasswordMismatch') { if (err.message === 'EmptyResponse' || err.message === 'PasswordMismatch') {
ctx.throw(422, 'The email or password did not match') ctx.throw(422, 'The email or password did not match')
@ -28,6 +27,6 @@ export default class AuthHelper {
throw err throw err
} }
return this.jwt.createToken(staff.id, staff.get('email'), staff.get('level')) return this.jwt.createToken(staff.id, staff.email, staff.level)
} }
} }

View file

@ -1,590 +0,0 @@
import _ from 'lodash'
import knex from 'knex-core'
import bookshelf from 'bookshelf'
import config from './config.mjs'
import defaults from './defaults.mjs'
import log from './log.mjs'
let connections = [config.get('knex:connection')]
if (config.get('knex:connectionslave')) {
connections.push(config.get('knex:connectionslave'))
}
let isRecovering = false
let isUrgent = false
let currentIndex = 0
let nextIndex = currentIndex + 1
let client
let secondaryClient
/**
* Semi-gracefully shift the current active client connection from the
* current connected client and switch to the selected index server.
*/
async function shiftConnection(index) {
// Update our variables
isUrgent = false
currentIndex = index
log.warn('DB: Destroying current pool')
await client.destroy()
// Update connection settings to the new server and re-initialize the pool.
log.warn(connections[currentIndex], 'DB: Connecting to next server')
client.client.connectionSettings = connections[currentIndex]
client.initialize()
}
/**
* Start a graceful server migration. Creates a secondary database connection
* and checks other available servers we have if they're up and can be used.
*/
async function gracefulServerMigrate() {
// Check if we're already recovering and exit then.
if (isRecovering) return
// Urgent means we don't have ANY active database connectiong and need one quickly.
if (isUrgent) {
log.error(connections[currentIndex], `DB: Server connected to is offline.`)
} else {
log.warn(connections[currentIndex], `DB: Successfully connected to a server but its status was recovering (slave).`)
}
log.warn('DB: Attempting to gracefully connecting to different server')
isRecovering = true
// Load up next server into a new knex connection and start connecting.
if (nextIndex === connections.length) {
nextIndex = 0
}
secondaryClient = knex(getConfig(nextIndex, false))
// Keep on trying :)
while (true) {
// Make multiple attempts when we're connecting to downed or timed out databases.
let attempts = 0
while (attempts++ < 5) {
try {
log.warn(connections[nextIndex], `DB: Gracefully attempting to connect to server (attempt ${attempts}/5).`)
// Connect to the database (this creates a new pool connection) and check if it's in recovery mode
let data = await secondaryClient.raw('select pg_is_in_recovery()')
// If we reach here, we got data which means the database is up and running.
// As such, there's no need to make more attempts to same server
attempts = 6
// Check if it's master or if we are desperate
if (!data.rows[0].pg_is_in_recovery || isUrgent) {
// Found a viable server to connect to. Shift our active client to it.
log.info(connections[nextIndex], 'DB: Found available server, connecting to it')
await shiftConnection(nextIndex)
// Check if we're connected to master or just a slave.
if (!data.rows[0].pg_is_in_recovery) {
// We found a master, stop recovering
log.info(connections[nextIndex], 'DB: Connection established with master.')
isRecovering = false
break
}
}
} catch (err) {
// We only care to log weird errors like postgresql errors or such.
if (err.code !== 'ECONNREFUSED' && err.code !== 'ETIMEDOUT') {
log.error({ code: err.code, message: err.message }, `DB: Unknown error while gracefully connecting to ${connections[nextIndex].host}`)
}
// Make a next attempt after 10 seconds
await new Promise(res => setTimeout(res, 10000))
}
}
// Check if we found a master and break if we did.
if (isRecovering === false) break
// Didn't find a master :( wait 60 seconds before running another attempt
log.warn(connections[nextIndex], 'DB: Connected server was deemeed unable to fit master role')
log.warn('DB: waiting 60 seconds before attempting next server')
await new Promise(res => setTimeout(res, 60000))
// Move to next server
nextIndex++
if (nextIndex === connections.length) {
nextIndex = 0
}
// Time to destroy our active pool on our current server and update
// the connection settings to the next server and re-initialise.
await secondaryClient.destroy()
secondaryClient.client.connectionSettings = connections[nextIndex]
secondaryClient.initialize()
}
// We got here means we have stopped recovery process.
// Shut down the secondary knex client and destroy it and
// remove reference to it so GC can collect it eventually, hopefully.
await secondaryClient.destroy()
nextIndex = currentIndex + 1
secondaryClient = null
}
/**
* Event handler after our pool is created and we are creating a connection.
* Here we check if the database is in recovery mode (a.k.a. slave) and if so
* start the graceful migration to migrate back to master once it's up and running.
*/
function afterCreate(conn, done) {
conn.query('select pg_is_in_recovery()', (e, res) => {
if (e) return done(e, conn)
if (res.rows[0].pg_is_in_recovery) gracefulServerMigrate().then()
done(null, conn)
})
}
/**
* Event handler for when the pool gets destroyed. Here we check
* if the connection has been marked with _ending = true.
* There are some checks available we can use to check if current
* connection was abrubtly disconnected. Among those from my testing
* are as follows:
*
* conn.__knex__disposed = 'Connection ended unexpectedly'
* conn.connection._ending = true
*
* I went with connection._ending one as I feel that one's the safest.
*
*/
function beforeDestroy(conn) {
if (conn.connection._ending) {
checkActiveConnection()
}
}
/**
* Return a valid confic for knex based on specific connection index.
* Note that we don't wanna hook into afterCreate or beforeDestroy
* in our secondary knex connection doing the recovery checking.
*/
function getConfig(index = 0, addEvents = true) {
return {
'client': 'pg',
'connection': connections[index],
'migrations': {
},
pool: {
afterCreate: addEvents && afterCreate || null,
min: 2,
max: 10,
// beforeDestroy: addEvents && beforeDestroy || null,
},
acquireConnectionTimeout: 10000,
}
}
client = knex(getConfig(currentIndex))
/**
* Make sure no update or delete queries are run while we're recovering.
* This allows knex to connect to a slave and only process select queries.
*
* Note: Probably does not support complicated select queries that cause
* updates on trigger or other such things.
*/
client.on('query', data => {
if (isRecovering && data.method !== 'select') {
throw new Error('Database is in read-only mode')
}
})
function checkActiveConnection(attempt = 1) {
if (attempt > 5) {
isUrgent = true
return gracefulServerMigrate().then()
}
// log.info(`DB: (Attempt ${attempt}/5) Checking connection is active.`)
client.raw('select 1').catch(err => {
if (err.code === 'ECONNREFUSED') { // err.code === 'ETIMEDOUT'
isUrgent = true
return gracefulServerMigrate().then()
}
if (err) {
let wait = 3000 // err.code like '57P03' and such.
if (err.code === 'ETIMEDOUT') {
wait = 10000
}
log.error({ code: err.code, message: err.message }, `DB: (Attempt ${attempt}/5) Error while checking connection status`)
if (attempt < 5) {
log.warn(`DB: (Attempt ${attempt}/5) Attempting again in ${wait / 1000} seconds.`)
setTimeout(() => checkActiveConnection(attempt + 1), wait)
} else {
checkActiveConnection(attempt + 1)
}
}
})
}
// Only way to check startup connection errors
log.info(getConfig(currentIndex).connection, 'DB: Connecting to server')
setTimeout(() => checkActiveConnection(), 100)
// Check if we're running tests while connected to
// potential production environment.
/* istanbul ignore if */
if (config.get('NODE_ENV') === 'test' &&
(config.get('knex:connection:database') !== 'kisildalur_test' ||
config.get('knex:connection:connection'))) {
// There is an offchance that we're running tests on
// production database. Exit NOW!
log.error('Critical: potentially running test on production enviroment. Shutting down.')
process.exit(1)
}
const shelf = bookshelf(client)
shelf.plugin('virtuals')
shelf.plugin('pagination')
// Helper method to create models
shelf.createModel = (attr, opts) => {
// Create default attributes to all models
let attributes = defaults(attr, {
/**
* Always include created_at and updated_at for all models default.
*/
hasTimestamps: true,
/**
* Columns selected in get single queries.
*/
privateFields: ['*'],
/**
* Event handler when fetch() is called. This gets called for both
* when getSingle() or just manual fetch() is called as well as
* when relation models through belongsTo() resources get fetched.
*
* @param {Model} model - The model instance if fetch() was used. For
* belongsTo this is the relation model thingy.
* @param {Array} columns - Array of columns to select if fetch() was used.
* Otherwise this is null.
* @param {Object} options - Options for the fetch. Includes the query
* builder object.
*/
checkFetching(model, columns, options) {
// First override that is_deleted always gets filtered out.
options.query.where({ is_deleted: false })
// If we have columns, fetch() or getSingle is the caller and no
// custom select() was called on the query.
if (columns) {
// We override columns default value of 'table_name.*' select and
// replace it with actual fields. This allows us to hide columns in
// public results.
columns.splice(...[0, columns.length].concat(
model.privateFields.map(item => `${model.tableName}.${item}`)
))
// If we have relatedData in the model object, then we're dealing with a
// belongsTo relation query. If not, then we're dealing with a custom
// fetch() with select() query.
} else if (model.relatedData) {
// We are dealing with belongsTo relation query. Override the default
// 'relation_table.*' with public select columns.
// We override the actual value in the query because doing select()
// does not override or replace the previous value during testing.
let relatedColums = options.query._statements[0].value
// During some Model.relatedDAta() queries, the select statement
// is actually hidden in the third statement so we grab that instead
if (options.query._statements[0].grouping === 'where') {
relatedColums = options.query._statements[2].value
}
relatedColums.splice(...[0, relatedColums.length].concat(
model.relatedData.target.publicFields.map(item => `${model.relatedData.targetTableName}.${item}`)
))
}
},
/**
* Event handler after a fetch() operation and finished.
*
* @param {Model} model - The model instance.
* @param {Object} response - Knex query response.
* @param {Object} options - Options for the fetched.
*/
checkFetched(model, response, options) {
model._ctx = options.ctx
},
/**
* Event handler when fetchALL() is called. This gets called for both
* when getAll() or just manual fetchAll().
*
* @param {CollectionBase} collection - The collection base for the model.
* This does not contain a model
* instance so privateFields is not
* accessible here.
* @param {Array} columns - Array of columns to select if fetchAll() was
* used. Otherwise this is null.
* @param {Object} options - Options for the fetch. Includes the query
* builder object.
*/
checkFetchingCollection(collection, columns, options) {
// I really really apologise for this.
if (!options.query._statements[0] ||
!options.query._statements[0].column ||
!options.query._statements[0].column.indexOf ||
options.query._statements[0].column.indexOf('is_deleted') === -1) {
// First override that is_deleted always gets filtered out.
options.query.where(`${collection.tableName()}.is_deleted`, false)
}
// If we have columns, we're dealing with a normal basic fetchAll() or
// a getAll() caller.
if (columns) {
columns.splice(...[0, columns.length].concat(collection.model.publicFields))
}
},
/**
* Event handler when fetchAll() has been called and fetched.
*
* @param {CollectionBase} collection - The collection that has been fetched.
* @param {Array} columns - Array of columns to select if fetchAll() was
* used. Otherwise this is null.
* @param {Object} options - Options for the fetch.
*/
checkFetchedCollection(collection, columns, options) {
collection.forEach(item => (item._ctx = options.ctx))
},
/**
* Event handler for hasMany relation fetching. This gets called whenever
* hasMany related is being fetched.
*
* @param {CollectionBase} collection - The collection base for the model.
* This does not contain a model
* instance so privateFields is not
* accessible here.
* @param {Array} columns - Array of columns to select. This is
* always null.
* @param {Object} options - Options for the fetch. Includes the query
* builder object.
*/
checkFetchingHasMany(collection, columns, options) {
// First override that is_deleted always gets filtered out.
options.query.where({ is_deleted: false })
// Then we override the actual value in the query because doing select()
// does not override or replace the previous value during testing.
let relatedColums
if (options.query._statements[0].grouping === 'columns') {
relatedColums = options.query._statements[0].value
} else {
relatedColums = options.query._statements[1].value
}
relatedColums.splice(...[0, relatedColums.length]
.concat(collection.model.publicFields.map(
item => `${collection.relatedData.targetTableName}.${item}`
))
)
// check if pagination is being requested and we support it
if (collection.relatedName
&& options.ctx
&& options.ctx.state.pagination
&& options.ctx.state.pagination[collection.relatedName]) {
let pagination = options.ctx.state.pagination[collection.relatedName]
options.query.limit(pagination.perPage).offset((pagination.page - 1) * pagination.perPage)
}
},
/**
* Event handler for belongsTo relation fetching. This gets called whenever
* belongsTo related is being fetched.
*
* @param {CollectionBase} collection - The collection base for the model.
* This does not contain a model
* instance so privateFields is not
* accessible here.
* @param {Array} columns - Array of columns to select. This is
* always null.
* @param {Object} options - Options for the fetch. Includes the query
* builder object.
*/
checkFetchingBelongs(model, columns, options) {
// First override that is_deleted always gets filtered out.
options.query.where({ is_deleted: false })
// Then we override the actual value in the query because doing select()
// does not override or replace the previous value during testing.
// The difference between belongsTo and hasMany is in belongsTo, the
// actual 'table_name.*' value is in the second item in _statements as
// opposed to the first.
let relatedColums = options.query._statements[1].value
relatedColums.splice(...[0, relatedColums.length].concat(
model.model.publicFields.map(item => `${model.relatedData.targetTableName}.${item}`)
))
// check if pagination is being requested and we support it
if (model.relatedName
&& options.ctx
&& options.ctx.state.pagination
&& options.ctx.state.pagination[model.relatedName]) {
let pagination = options.ctx.state.pagination[model.relatedName]
options.query.limit(pagination.perPage).offset((pagination.page - 1) * pagination.perPage)
}
},
/**
* Initialize a new instance of model. This does not get called when
* relations to this model is being fetched though.
*/
initialize() {
this.on('fetching', this.checkFetching)
this.on('fetched', this.checkFetched)
this.on('fetching:collection', this.checkFetchingCollection)
this.on('fetched:collection', this.checkFetchedCollection)
},
/**
* Define a hasMany relations with the model. This version as opposed to
* the default hasMany has filtering enabled to filter is_deleted items
* out among other things.
*/
hasManyFiltered(model, relatedName, foreignKey) {
let out = this.hasMany(model, foreignKey)
// Hook to the fetching event on the relation
out.on('fetching', this.checkFetchingHasMany)
out.on('fetched', this.checkFetched)
// Add related name if specified to add pagination support
out.relatedName = relatedName
return out
},
/**
* Define belongsToMany relations with the model. This version as opposed
* to the default belongsToMany has filtering enabled to filter is_deleted items
* out among other things.
*/
belongsToManyFiltered(model, table, foreignKey, otherKey, relatedName) {
let out = this.belongsToMany(model, table, foreignKey, otherKey)
// Hook to the fetching event on the relation
out.on('fetching', this.checkFetchingBelongs)
out.on('fetched', this.checkFetched)
// Add related name if specified to add pagination support
out.relatedName = relatedName
return out
},
})
// Create default options for all models
let options = defaults(opts, {
/**
* Columns selected in get many queries and relation queries.
*/
publicFields: ['*'],
/**
* Create new model object in database.
*
* @param {Object} data - The values the new model should have
* @return {Model} The resulted model
*/
create(data) {
return this.forge(data).save()
},
/**
* Apply basic filtering to query builder object. Basic filtering
* applies stuff like custom filtering in the query and ordering and other stuff
*
* @param {Request} ctx - API Request object
* @param {QueryBuilder} qb - knex query builder object to apply filtering on
* @param {Object} [where={}] - Any additional filtering
* @param {string} [orderBy=id] - property to order result by
* @param {Object[]} [properties=[]] - Properties allowed to filter by from query
*/
_baseQueryAll(ctx, qb, where = {}, orderBy = 'id', properties = []) {
let orderProperty = orderBy
let sort = 'ASC'
if (orderProperty[0] === '-') {
orderProperty = orderProperty.slice(1)
sort = 'DESC'
}
qb.where(where)
_.forOwn(ctx.state.filter.where(properties), (value, key) => {
if (key.startsWith('is_')) {
qb.where(key, value === '0' ? false : true)
} else {
qb.where(key, 'LIKE', `%${value}%`)
}
})
_.forOwn(ctx.state.filter.whereNot(properties), (value, key) => {
if (key.startsWith('is_')) {
qb.whereNot(key, value === '0' ? false : true)
} else {
qb.where(key, 'NOT LIKE', `%${value}%`)
}
})
qb.orderBy(orderProperty, sort)
},
/**
* Wrapper for _baseQueryAll that can be overridden.
*/
baseQueryAll(ctx, qb, where, orderBy, properties) {
return this._baseQueryAll(ctx, qb, where, orderBy, properties)
},
getSingle(id, withRelated = [], require = true, ctx = null) {
let where = { id: Number(id) || 0 }
return this.query({ where })
.fetch({ require, withRelated, ctx })
},
getAll(ctx, where = {}, withRelated = [], orderBy = 'id') {
return this.query(qb => {
this.baseQueryAll(ctx, qb, where, orderBy)
})
.fetchPage({
pageSize: ctx.state.pagination.perPage,
page: ctx.state.pagination.page,
withRelated,
ctx: ctx,
})
.then(result => {
ctx.state.pagination.total = result.pagination.rowCount
return result
})
},
})
return shelf.Model.extend(attributes, options)
}
shelf.safeColumns = (extra) =>
['id', 'is_deleted', 'created_at', 'updated_at'].concat(extra || [])
export default shelf

View file

@ -1,4 +1,4 @@
import bookshelf from '../bookshelf.mjs' import { createPrototype, safeColumns } from '../knex.mjs'
import config from '../config.mjs' import config from '../config.mjs'
/* /*
@ -20,26 +20,40 @@ File model:
*/ */
const File = bookshelf.createModel({ const baseUrl = config.get('upload:baseurl')
tableName: 'files',
virtuals: { function FileItem(data) {
url() { Object.assign(this, data)
return `${File.baseUrl}${this.get('path')}` this.url = `${baseUrl}${this.path}`
},
magnet() { let meta = this.meta
let meta = this.get('meta') if (!meta.torrent) {
if (!meta.torrent) return '' this.magnet = ''
return 'magnet:?' } else {
+ 'xl=' + this.get('size') this.magnet = 'magnet:?'
+ '&dn=' + encodeURIComponent(meta.torrent.name) + 'xl=' + this.size
+ '&xt=urn:btih:' + meta.torrent.hash + '&dn=' + encodeURIComponent(meta.torrent.name)
+ meta.torrent.announce.map(item => ('&tr=' + encodeURIComponent(item))).join('') + '&xt=urn:btih:' + meta.torrent.hash
}, + meta.torrent.announce.map(item => ('&tr=' + encodeURIComponent(item))).join('')
}, }
}, { }
baseUrl: config.get('upload:baseurl'),
function File() {
this.tableName = 'files'
this.Model = FileItem
this.publicFields = this.privateFields = safeColumns([
'article_id',
'filename',
'filetype',
'path',
'size',
'staff_id',
'meta',
])
this.init()
}
File.prototype = createPrototype({
}) })
export default File export default new File()

View file

@ -53,18 +53,8 @@ export default class FileRoutes {
}) })
} }
async getAllFiles(ctx) {
ctx.body = await this.File.getAll(ctx)
}
async removeFile(ctx) { async removeFile(ctx) {
let file = await this.File.getSingle(ctx.params.id) await this.File.updateSingle(ctx, ctx.params.id, { is_deleted: true })
file.set({
is_deleted: true,
})
await file.save()
ctx.status = 200 ctx.status = 200
} }

View file

@ -1,13 +1,11 @@
import _ from 'lodash' import _ from 'lodash'
import jwt from 'jsonwebtoken' import jwt from 'jsonwebtoken'
import koaJwt from 'koa-jwt' import koaJwt from 'koa-jwt'
import Staff from './staff/model.mjs'
import config from './config.mjs' import config from './config.mjs'
export default class Jwt { export default class Jwt {
constructor(opts = {}) { constructor(opts = {}) {
Object.assign(this, { Object.assign(this, {
Staff: opts.Staff || Staff,
jwt: opts.jwt || jwt, jwt: opts.jwt || jwt,
}) })
} }

414
api/knex.mjs Normal file
View file

@ -0,0 +1,414 @@
import _ from 'lodash'
import knexCore from 'knex-core'
import config from './config.mjs'
import defaults from './defaults.mjs'
import log from './log.mjs'
const knex = knexCore(config.get('knex'))
const functionMap = new Map()
let joinPostFix = 1
// Helper method to create models
export function createPrototype(opts) {
return defaults(opts, {
knex: knex,
init() {
if (!this.tableName) throw new Error('createModel was called with missing tableName')
if (!this.Model) throw new Error('createModel was called with missing Model')
if (!this.includes) this.includes = {}
if (!this.publicFields) throw new Error(this.tableName + ' was missing publicFields')
if (!this.privateFields) throw new Error(this.tableName + ' was missing privateFields')
this.__includeFields = this.publicFields.map(x => x)
this.publicFields = this.publicFields.map(x => `${this.tableName}.${x} as ${this.tableName}.${x}`)
if (this.publicFields !== this.privateFields) {
this.privateFields = this.privateFields.map(x => `${this.tableName}.${x} as ${this.tableName}.${x}`)
}
},
addInclude(name, include) {
this.includes[name] = include
},
_includeBase(type, subq) {
let self = this
let postfix = '_' + joinPostFix++
let table = this.tableName + postfix
return {
type: type,
postfix: postfix,
table: table,
fields: this.__includeFields.map(x => `${table}.${x} as ${table}.${x}`),
model: self,
qb: function(qb) {
return subq(self, table, qb)
}
}
},
includeHasOne(source_id, target_id) {
return this._includeBase(1, function(self, table, qb) {
return qb.leftOuterJoin(`${self.tableName} as ${table}`, function() {
this.on(source_id, '=', table + '.' + target_id)
.andOn(table + '.is_deleted', '=', knex.raw('false'))
})
})
},
includeHasMany(source_id, target_id, subq = null) {
return this._includeBase(2, function(self, table, qb) {
return qb.leftOuterJoin(`${self.tableName} as ${table}`, function() {
this.on(table + '.' + source_id, '=', target_id)
.andOn(table + '.is_deleted', '=', knex.raw('false'))
if (subq) {
subq(this, self)
}
})
})
},
async getAllQuery(query, queryContext = null) {
let context = (queryContext || query).queryContext()
if (!context.tables) throw new Error('getAll was called before query')
let tables = context.tables
let tableMap = new Map(tables)
let data = await query
if (data.length === 0) {
return data
}
let keys = Object.keys(data[0])
for (let i = 0; i < keys.length; i++) {
let parts = keys[i].split('.')
if (parts.length === 1) {
if (parts[0] !== '__group') {
tables[0][1].builder += `'${parts[0]}': data.${keys[i]},`
}
} else {
let builder = tableMap.get(parts[0])
if (builder) {
builder.builder += `'${parts[1]}': data['${keys[i]}'],`
}
}
}
tableMap.forEach(table => {
table.builder += '}'
table.fn = functionMap.get(table.builder)
if (!table.fn) {
table.fn = new Function('data', table.builder)
functionMap.set(table.builder, table.fn)
}
})
let out = []
let includesTwoSet = new Set()
for (let i = 0; i < data.length; i++) {
let baseItem = null
for (var t = 0; t < tables.length; t++) {
let table = tables[t][1]
let propertyName = table.include
let formattedData = table.fn(data[i])
if (!formattedData) {
if (propertyName && baseItem[propertyName] === undefined) {
console.log('emptying')
baseItem[propertyName] = (table.includeType.type === 1 ? null : [])
}
continue
}
let row = new table.Model(table.fn(data[i]))
let rowId = row.id
if (table.isRoot && data[i].__group) {
rowId = data[i].__group + '_' + row.id
}
let foundItem = table.map.get(rowId)
// If we didn't find this item, current table moble or joined table model
// is new, therefore we need to create it
if (!foundItem) {
// Create a reference to it if we're dealing with the root object
if (table.isRoot) {
baseItem = row
}
table.map.set(rowId, row)
if (table.isRoot) {
// Add item to root array since this is a root array
out.push(baseItem)
} else if (table.includeType.type === 1) {
// This is a single instance join for the root mode,
// set it directly to the root
baseItem[propertyName] = row
} else if (table.includeType.type === 2) {
// This is an array instance for the root model. Time to dig in.
/* if (!baseItem[propertyName]) {
baseItem[propertyName] = []
} */
if (!includesTwoSet.has(baseItem.id + '_' + propertyName + '_' + row.id)) {
baseItem[propertyName].push(row)
includesTwoSet.add(baseItem.id + '_' + propertyName + '_' + row.id)
}
}
} else if (table.isRoot) {
baseItem = foundItem
} else if (propertyName) {
if (table.includeType.type === 1 && !baseItem[propertyName]) {
baseItem[propertyName] = foundItem
} else if (table.includeType.type === 2 && !includesTwoSet.has(baseItem.id + '_' + propertyName + '_' + row.id)) {
/* if (!baseItem[propertyName]) {
baseItem[propertyName] = []
} */
baseItem[propertyName].push(foundItem)
includesTwoSet.add(baseItem.id + '_' + propertyName + '_' + row.id)
}
}
}
}
return out
},
async getSingleQuery(query, require = true) {
let data = await this.getAllQuery(query)
if (data.length) return data[0]
if (require) throw new Error('EmptyResponse')
return null
},
query(qb, includes = [], customFields = null, parent = null, pagination = null, paginationOrderBy = null) {
let query
let fields
if (customFields === true) {
fields = this.publicFields
} else {
fields = customFields ? customFields : this.publicFields
}
if (pagination) {
query = knex.with(this.tableName, subq => {
subq.select(this.tableName + '.*')
.from(this.tableName)
.where(this.tableName + '.is_deleted', '=', 'false')
qb(subq)
subq.orderBy(pagination.orderProperty, pagination.sort)
.limit(pagination.perPage)
.offset((pagination.page - 1) * pagination.perPage)
}).from(this.tableName)
} else {
query = knex(this.tableName).where(this.tableName + '.is_deleted', '=', 'false')
qb(query)
}
let tables = parent && parent.queryContext().tables || []
let tableMap = new Map(tables)
if (!tables.length) {
tables.push([this.tableName, {
builder: 'return {',
fn: null,
map: new Map(),
Model: this.Model,
isRoot: true,
include: null,
includeType: {},
}])
}
query.select(fields)
for (let i = 0; i < includes.length; i++) {
let includeType = this.includes[includes[i]]
if (!includeType) {
throw new Error(`Model ${this.tableName} was missing includes ${includes[i]}`)
}
includeType.qb(query).select(includeType.fields)
if (tableMap.has(includeType.table)) {
continue
}
if (includeType.type === 1) {
tables[0][1].builder += `${includes[i]}: null,`
} else {
tables[0][1].builder += `${includes[i]}: [],`
}
let newTable = [
includeType.table,
{
builder: `if (!data.id && !data['${includeType.table}.id']) {/*console.log('${includeType.table}', data.id, data['${includeType.table}.id']);*/return null;} return {`,
fn: null,
map: new Map(),
isRoot: false,
Model: includeType.model.Model,
include: includes[i],
includeType: includeType,
}
]
tables.push(newTable)
tableMap.set(newTable[0], newTable[1])
}
if (pagination) {
query.orderBy(pagination.orderProperty, pagination.sort)
}
query.queryContext({ tables: tables })
return query
},
async _getAll(ctx, subq, includes = [], orderBy = 'id') {
let orderProperty = orderBy
let sort = 'ASC'
if (orderProperty[0] === '-') {
orderProperty = orderProperty.slice(1)
sort = 'DESC'
}
ctx.state.pagination.sort = sort
ctx.state.pagination.orderProperty = orderProperty
let [data, total] = await Promise.all([
this.getAllQuery(this.query(qb => {
let qbnow = qb
if (subq) {
qbnow = subq(qb) || qb
}
return qbnow
}, includes, null, null, ctx.state.pagination)),
(() => {
let qb = this.knex(this.tableName)
if (subq) {
qb = subq(qb) || qb
}
qb.where(this.tableName + '.is_deleted', '=', false)
return qb.count('* as count')
})(),
])
ctx.state.pagination.total = total[0].count
return data
},
getAll(ctx, subq, includes = [], orderBy = 'id') {
return this._getAll(ctx, subq, includes, orderBy)
},
_getSingle(subq, includes = [], require = true, ctx = null) {
return this.getSingleQuery(this.query(qb => {
return qb
.where(qb => {
if (subq) subq(qb)
})
}, includes), require)
},
getSingle(id, includes = [], require = true, ctx = null) {
return this._getSingle(qb => qb.where(this.tableName + '.id', '=', Number(id) || 0 ), includes, require, ctx)
},
async updateSingle(ctx, id, body) {
// Fetch the item in question, making sure it exists
let item = await this.getSingle(id, [], true, ctx)
// Paranoia checking
if (typeof(item.id) !== 'number') throw new Error('Item was missing id')
body.updated_at = new Date()
// Update our item in the database
let out = await knex(this.tableName)
.where({ id: item.id })
// Map out the 'as' from the private fields so it returns a clean
// response in the body
.update(body, this.privateFields.map(x => x.split('as')[0]))
// More paranoia checking
if (out.length < 1) throw new Error('Updated item returned empty result')
return out[0]
},
/**
* Create new entry in the database.
*
* @param {Object} data - The values the new item should have
* @return {Object} The resulting object
*/
async create(body) {
body.created_at = new Date()
body.updated_at = new Date()
let out = await knex(this.tableName)
// Map out the 'as' from the private fields so it returns a clean
// response in the body
.insert(body, this.privateFields.map(x => x.split('as')[0]))
// More paranoia checking
if (out.length < 1) throw new Error('Updated item returned empty result')
return out[0]
},
/**
* Apply basic filtering to query builder object. Basic filtering
* applies stuff like custom filtering in the query and ordering and other stuff
*
* @param {Request} ctx - API Request object
* @param {QueryBuilder} qb - knex query builder object to apply filtering on
* @param {Object} [where={}] - Any additional filtering
* @param {string} [orderBy=id] - property to order result by
* @param {Object[]} [properties=[]] - Properties allowed to filter by from query
*/
_baseQueryAll(ctx, qb, where = {}, orderBy = 'id', properties = []) {
let orderProperty = orderBy
let sort = 'ASC'
if (orderProperty[0] === '-') {
orderProperty = orderProperty.slice(1)
sort = 'DESC'
}
qb.where(where)
_.forOwn(ctx.state.filter.where(properties), (value, key) => {
if (key.startsWith('is_')) {
qb.where(key, value === '0' ? false : true)
} else {
qb.where(key, 'LIKE', `%${value}%`)
}
})
_.forOwn(ctx.state.filter.whereNot(properties), (value, key) => {
if (key.startsWith('is_')) {
qb.whereNot(key, value === '0' ? false : true)
} else {
qb.where(key, 'NOT LIKE', `%${value}%`)
}
})
qb.orderBy(orderProperty, sort)
},
/*async getSingle(id, require = true, ctx = null) {
let where = { id: Number(id) || 0 }
let data = await knex(this.tableName).where(where).first(this.publicFields)
if (!data && require) throw new Error('EmptyResponse')
return data
},*/
})
}
export function safeColumns(extra) {
return ['id', /*'is_deleted',*/ 'created_at', 'updated_at'].concat(extra || [])
}
/*shelf.safeColumns = (extra) =>
['id', 'is_deleted', 'created_at', 'updated_at'].concat(extra || [])*/

View file

@ -1,5 +1,5 @@
import path from 'path' import path from 'path'
import bookshelf from '../bookshelf.mjs' import { createPrototype, safeColumns } from '../knex.mjs'
import config from '../config.mjs' import config from '../config.mjs'
/* /*
@ -23,6 +23,55 @@ Media model:
*/ */
const baseUrl = config.get('upload:baseurl')
function MediaItem(data) {
Object.assign(this, data)
this.small_url = `${baseUrl}${this.small_image}`
this.medium_url = `${baseUrl}${this.medium_image}`
this.large_url = `${baseUrl}${this.large_image}`
this.small_url_avif = this.small_image_avif ? `${baseUrl}${this.small_image_avif}` : null
this.medium_url_avif = this.small_image_avif ? `${baseUrl}${this.medium_image_avif}` : null
this.large_url_avif = this.small_image_avif ? `${baseUrl}${this.large_image_avif}` : null
this.link = `${baseUrl}${this.org_image}`
}
function Media() {
this.tableName = 'media'
this.Model = MediaItem
this.publicFields = this.privateFields = safeColumns([
'filename',
'filetype',
'small_image',
'medium_image',
'large_image',
'org_image',
'size',
'staff_id',
'small_image_avif',
'medium_image_avif',
'large_image_avif',
])
this.init()
}
Media.prototype = createPrototype({
baseUrl: baseUrl,
getSubUrl(input, size, type = 'jpg') {
if (!input) return input
let output = input
if (path.extname(input)) {
let ext = path.extname(input).toLowerCase()
output = input.slice(0, -ext.length)
}
return `${output}.${size}.${type}`
},
})
/*
const Media = bookshelf.createModel({ const Media = bookshelf.createModel({
tableName: 'media', tableName: 'media',
@ -79,6 +128,6 @@ const Media = bookshelf.createModel({
} }
return `${output}.${size}.${type}` return `${output}.${size}.${type}`
}, },
}) })*/
export default Media export default new Media()

View file

@ -63,13 +63,7 @@ export default class MediaRoutes {
} }
async removeMedia(ctx) { async removeMedia(ctx) {
let media = await this.Media.getSingle(ctx.params.id) await this.Media.updateSingle(ctx, ctx.params.id, { is_deleted: true })
media.set({
is_deleted: true,
})
await media.save()
ctx.status = 200 ctx.status = 200
} }

View file

@ -1,8 +1,8 @@
import bookshelf from '../bookshelf.mjs' import { createPrototype, safeColumns } from '../knex.mjs'
import Media from '../media/model.mjs' import Media from '../media/model.mjs'
import Staff from '../staff/model.mjs' // import Staff from '../staff/model.mjs'
import Article from '../article/model.mjs' // import Article from '../article/model.mjs'
/* /*
@ -25,10 +25,36 @@ Page model:
*/ */
const Page = bookshelf.createModel({ function PageItem(data) {
tableName: 'pages', Object.assign(this, data)
this.children = []
}
banner() { function Page() {
this.tableName = 'pages'
this.Model = PageItem
this.includes = {
media: Media.includeHasOne('pages.media_id', 'id'),
banner: Media.includeHasOne('pages.banner_id', 'id'),
}
this.publicFields = this.privateFields = safeColumns([
'staff_id',
'parent_id',
'name',
'path',
'description',
'banner_id',
'media_id',
])
this.init()
}
Page.prototype = createPrototype({
/* includes: {
staff: Staff.includeHasOne('staff_id', 'id'),
}, */
/*banner() {
return this.belongsTo(Media, 'banner_id') return this.belongsTo(Media, 'banner_id')
}, },
@ -56,22 +82,44 @@ const Page = bookshelf.createModel({
staff() { staff() {
return this.belongsTo(Staff, 'staff_id') return this.belongsTo(Staff, 'staff_id')
}, },*/
}, {
getSingle(id, withRelated = [], require = true, ctx = null) { getSingle(id, includes = [], require = true, ctx = null) {
return this.query(qb => { return this._getSingle(qb => {
qb.where({ id: Number(id) || 0 }) qb.where(subq => {
.orWhere({ path: id }) subq.where(this.tableName + '.id', '=', Number(id) || 0)
.orWhere(this.tableName + '.path', '=', id)
}) })
.fetch({ require, withRelated, ctx }) }, includes, require, ctx)
}, },
getTree() {
return this.query(qb => { async getTree() {
qb.where({ parent_id: null }) let items = await this.getAllQuery(this.query(
qb.select(['id', 'name', 'path']) qb => qb.orderBy('name', 'ASC'),
qb.orderBy('name', 'ASC') [],
}).fetchAll({ withRelated: ['children'] }) ['parent_id', 'id', 'name', 'path']
))
let out = []
let map = new Map()
for (let i = 0; i < items.length; i++) {
if (!items[i].parent_id) {
out.push(items[i])
}
map.set(items[i].id, items[i])
}
for (let i = 0; i < items.length; i++) {
if (items[i].parent_id && map.has(items[i].parent_id)) {
map.get(items[i].parent_id).children.push(items[i])
}
}
return out
}, },
}) })
export default Page const pageInstance = new Page()
pageInstance.addInclude('children', pageInstance.includeHasMany('parent_id', 'pages.id'))
pageInstance.addInclude('parent', pageInstance.includeHasOne('pages.parent_id', 'id'))
export default pageInstance

View file

@ -9,16 +9,16 @@ export default class PageRoutes {
}) })
} }
/** GET: /api/pagetree */
async getPageTree(ctx) {
ctx.body = await this.Page.getTree()
}
/** GET: /api/pages */ /** GET: /api/pages */
async getAllPages(ctx) { async getAllPages(ctx) {
await this.security.ensureIncludes(ctx) await this.security.ensureIncludes(ctx)
let filter = {} ctx.body = await this.Page.getAll(ctx, null, ctx.state.filter.includes, 'name')
if (ctx.query.tree && ctx.query.tree === 'true') {
filter.parent_id = null
}
ctx.body = await this.Page.getAll(ctx, filter, ctx.state.filter.includes, 'name')
} }
/** GET: /api/pages/:id */ /** GET: /api/pages/:id */
@ -39,22 +39,14 @@ export default class PageRoutes {
async updatePage(ctx) { async updatePage(ctx) {
await this.security.validUpdate(ctx) await this.security.validUpdate(ctx)
let page = await this.Page.getSingle(ctx.params.id) let page = await this.Page.updateSingle(ctx, ctx.params.id, ctx.request.body)
page.set(ctx.request.body)
await page.save()
ctx.body = page ctx.body = page
} }
/** DELETE: /api/pages/:id */ /** DELETE: /api/pages/:id */
async removePage(ctx) { async removePage(ctx) {
let page = await this.Page.getSingle(ctx.params.id) await this.Page.updateSingle(ctx, ctx.params.id, { is_deleted: true })
page.set({ is_deleted: true })
await page.save()
ctx.status = 204 ctx.status = 204
} }

View file

@ -2,13 +2,14 @@
import Router from 'koa-router' import Router from 'koa-router'
import access from './access/index.mjs' import access from './access/index.mjs'
import { restrict } from './access/middleware.mjs'
import AuthRoutes from './authentication/routes.mjs' import AuthRoutes from './authentication/routes.mjs'
import MediaRoutes from './media/routes.mjs' // import MediaRoutes from './media/routes.mjs'
import FileRoutes from './file/routes.mjs' // import FileRoutes from './file/routes.mjs'
import PageRoutes from './page/routes.mjs' import PageRoutes from './page/routes.mjs'
import ArticleRoutes from './article/routes.mjs' import ArticleRoutes from './article/routes.mjs'
import StaffRoutes from './staff/routes.mjs' import StaffRoutes from './staff/routes.mjs'
import { restrict } from './access/middleware.mjs'
const router = new Router() const router = new Router()
@ -17,18 +18,19 @@ const authentication = new AuthRoutes()
router.post('/api/login/user', authentication.loginUser.bind(authentication)) router.post('/api/login/user', authentication.loginUser.bind(authentication))
// API Media // API Media
const media = new MediaRoutes() // const media = new MediaRoutes()
router.get('/api/media', restrict(access.Manager), media.getAllMedia.bind(media)) // router.get('/api/media', restrict(access.Manager), media.getAllMedia.bind(media))
router.post('/api/media', restrict(access.Manager), media.upload.bind(media)) // router.post('/api/media', restrict(access.Manager), media.upload.bind(media))
router.del('/api/media/:id', restrict(access.Manager), media.removeMedia.bind(media)) // router.del('/api/media/:id', restrict(access.Manager), media.removeMedia.bind(media))
// API File // API File
const file = new FileRoutes() // const file = new FileRoutes()
router.get('/api/file', restrict(access.Manager), file.getAllFiles.bind(file)) // router.get('/api/file', restrict(access.Manager), file.getAllFiles.bind(file))
router.post('/api/articles/:articleId/file', restrict(access.Manager), file.upload.bind(file)) // router.post('/api/articles/:articleId/file', restrict(access.Manager), file.upload.bind(file))
router.del('/api/file/:id', restrict(access.Manager), file.removeFile.bind(file)) // router.del('/api/file/:id', restrict(access.Manager), file.removeFile.bind(file))
const page = new PageRoutes() const page = new PageRoutes()
router.get('/api/pagetree', page.getPageTree.bind(page))
router.get('/api/pages', page.getAllPages.bind(page)) router.get('/api/pages', page.getAllPages.bind(page))
router.get('/api/pages/:id', page.getSinglePage.bind(page)) router.get('/api/pages/:id', page.getSinglePage.bind(page))
router.post('/api/pages', restrict(access.Manager), page.createPage.bind(page)) router.post('/api/pages', restrict(access.Manager), page.createPage.bind(page))
@ -40,14 +42,13 @@ router.get('/api/articles', restrict(access.Manager), article.getAllArticles.bin
router.get('/api/articles/public', article.getPublicAllArticles.bind(article)) router.get('/api/articles/public', article.getPublicAllArticles.bind(article))
router.get('/api/articles/public/:id', article.getPublicSingleArticle.bind(article)) router.get('/api/articles/public/:id', article.getPublicSingleArticle.bind(article))
router.get('/api/pages/:pageId/articles/public', article.getPublicAllPageArticles.bind(article)) router.get('/api/pages/:pageId/articles/public', article.getPublicAllPageArticles.bind(article))
router.get('/api/pages/:pageId/articles', restrict(access.Manager), article.getAllPageArticles.bind(article))
router.get('/api/articles/:id', restrict(access.Manager), article.getSingleArticle.bind(article)) router.get('/api/articles/:id', restrict(access.Manager), article.getSingleArticle.bind(article))
router.post('/api/articles', restrict(access.Manager), article.createArticle.bind(article)) router.post('/api/articles', restrict(access.Manager), article.createArticle.bind(article))
router.put('/api/articles/:id', restrict(access.Manager), article.updateArticle.bind(article)) router.put('/api/articles/:id', restrict(access.Manager), article.updateArticle.bind(article))
router.del('/api/articles/:id', restrict(access.Manager), article.removeArticle.bind(article)) router.del('/api/articles/:id', restrict(access.Manager), article.removeArticle.bind(article))
const staff = new StaffRoutes() const staff = new StaffRoutes()
router.get('/api/staff', restrict(access.Admin), staff.getAllStaff.bind(staff)) router.get('/api/staff', restrict(access.Manager), staff.getAllStaff.bind(staff))
router.get('/api/staff/:id', restrict(access.Admin), staff.getSingleStaff.bind(staff)) router.get('/api/staff/:id', restrict(access.Admin), staff.getSingleStaff.bind(staff))
router.post('/api/staff', restrict(access.Admin), staff.createStaff.bind(staff)) router.post('/api/staff', restrict(access.Admin), staff.createStaff.bind(staff))
router.put('/api/staff/:id', restrict(access.Admin), staff.updateStaff.bind(staff)) router.put('/api/staff/:id', restrict(access.Admin), staff.updateStaff.bind(staff))

View file

@ -4,6 +4,7 @@ import striptags from 'striptags'
import config from './config.mjs' import config from './config.mjs'
import Page from './page/model.mjs' import Page from './page/model.mjs'
// import Article from '../app/article/model.mjs'
import Article from './article/model.mjs' import Article from './article/model.mjs'
const body = readFileSync('./public/index.html').toString() const body = readFileSync('./public/index.html').toString()
@ -103,6 +104,7 @@ function mapPage(x) {
export async function serveIndex(ctx, path) { export async function serveIndex(ctx, path) {
let tree = null let tree = null
let data = null let data = null
let subdata = null
let links = null let links = null
let featured = null let featured = null
let url = frontend + ctx.request.url let url = frontend + ctx.request.url
@ -111,29 +113,26 @@ export async function serveIndex(ctx, path) {
let title = 'NFP Moe - Anime/Manga translation group' let title = 'NFP Moe - Anime/Manga translation group'
let description = 'Small fansubbing and scanlation group translating and encoding our favourite shows from Japan.' let description = 'Small fansubbing and scanlation group translating and encoding our favourite shows from Japan.'
try { try {
tree = (await Page.getTree()).toJSON() tree = await Page.getTree()
tree.forEach(item => ( let currPage = Number(ctx.query.page || '1')
item.children = item.children.map(x => (
{ id: x.id, name: x.name, path: x.path }
))
))
featured = await Article.getFeatured(['media', 'banner'])
if (featured) {
featured = mapArticle(true, featured.toJSON(), true, false)
}
if (path === '/') { if (path === '/') {
let currPage = Number(ctx.query.page || '1') let frontpage = await Article.getFrontpageArticles(currPage)
data = await Article.getFrontpageArticles(currPage) featured = frontpage.featured
data = frontpage.items.map(mapArticle.bind(null, true))
if (data.pagination.rowCount > 10) { if (frontpage.total > currPage * 10) {
links = { links = {
first: currPage > 1 ? { page: 1, title: 'First' } : null,
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
current: { title: 'Page ' + currPage }, current: { title: 'Page ' + currPage },
next: { page: 2, title: 'Next' }, next: { page: currPage + 1, title: 'Next' },
last: { page: Math.ceil(data.pagination.rowCount / 10), title: 'Last' }, last: { page: Math.ceil(frontpage.total / 10), title: 'Last' },
} }
} else { } else {
links = { links = {
first: currPage > 1 ? { page: 1, title: 'First' } : null,
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
current: { title: 'Page 1' }, current: { title: 'Page 1' },
} }
} }
@ -141,40 +140,63 @@ export async function serveIndex(ctx, path) {
links.previous = { page: currPage - 1, title: 'Previous' } links.previous = { page: currPage - 1, title: 'Previous' }
links.first = { page: 1, title: 'First' } links.first = { page: 1, title: 'First' }
} }
data = data.toJSON().map(mapArticle.bind(null, true))
} else if (path.startsWith('/article/') || path.startsWith('/page/')) { } else if (path.startsWith('/article/') || path.startsWith('/page/')) {
let id = path.split('/')[2] let id = path.split('/')[2]
if (id) { if (id) {
let found
if (path.startsWith('/article/')) { if (path.startsWith('/article/')) {
found = await Article.getSingle(id, ['media', 'parent', 'banner', 'files', 'staff'], false, null, true) data = await Article.getSingle(id, ['media', 'parent', 'banner', 'files', 'staff'], false, null, true)
if (found) { if (data) {
found = mapArticle(false, found.toJSON()) data = mapArticle(false, data)
} }
data = found
} else { } else {
found = await Page.getSingle(id, ['media', 'banner', 'children', 'parent']) data = await Page.getSingle(id, ['media', 'banner', 'children', 'parent'])
found = mapPage(found.toJSON()) data = mapPage(data)
data = found ctx.state.pagination = {
} perPage: 10,
if (found) { page: currPage,
if (found.media) {
image = found.media.large_url
image_avif = found.media.large_url_avifl
} else if (found.banner) {
image = found.banner.large_url
image_avif = found.banner.large_url_avifl
} }
if (found.description) { subdata = await Article.getAllFromPage(ctx, data.id, ['files', 'media'], '-published_at', true)
description = striptags(found.description) subdata = subdata.map(mapArticle.bind(null, true))
} if (ctx.state.pagination.total > currPage * 10) {
if (found.parent) { links = {
title = found.name + ' - ' + found.parent.name + ' - NFP Moe' first: currPage > 1 ? { page: 1, title: 'First' } : null,
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
current: { title: 'Page ' + currPage },
next: { page: currPage + 1, title: 'Next' },
last: { page: Math.ceil(ctx.state.pagination.total / 10), title: 'Last' },
}
} else { } else {
title = found.name + ' - NFP Moe' links = {
first: currPage > 1 ? { page: 1, title: 'First' } : null,
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
current: { title: 'Page 1' },
}
}
}
if (data) {
if (data.media) {
image = data.media.large_url
image_avif = data.media.large_url_avifl
} else if (data.banner) {
image = data.banner.large_url
image_avif = data.banner.large_url_avifl
}
if (data.description) {
description = striptags(data.description)
}
if (data.parent) {
title = data.name + ' - ' + data.parent.name + ' - NFP Moe'
} else {
title = data.name + ' - NFP Moe'
} }
} }
} }
}
if (!featured) {
featured = await Article.getFeaturedArticle(['media', 'banner'])
}
if (featured) {
featured = mapArticle(true, featured, true, false)
} }
} catch (e) { } catch (e) {
ctx.log.error(e) ctx.log.error(e)
@ -185,6 +207,7 @@ export async function serveIndex(ctx, path) {
v: config.get('CIRCLECI_VERSION'), v: config.get('CIRCLECI_VERSION'),
tree: JSON.stringify(tree), tree: JSON.stringify(tree),
data: JSON.stringify(data), data: JSON.stringify(data),
subdata: JSON.stringify(subdata),
links: JSON.stringify(links), links: JSON.stringify(links),
featured: JSON.stringify(featured), featured: JSON.stringify(featured),
url: url, url: url,

View file

@ -2,15 +2,15 @@ import Koa from 'koa-lite'
import bodyParser from 'koa-bodyparser' import bodyParser from 'koa-bodyparser'
import cors from '@koa/cors' import cors from '@koa/cors'
import config from './api/config.mjs' import config from './config.mjs'
import router from './api/router.mjs' import router from './router.mjs'
import Jwt from './api/jwt.mjs' import Jwt from './jwt.mjs'
import log from './api/log.mjs' import log from './log.mjs'
import { serve } from './api/serve.mjs' import { serve } from './serve.mjs'
import { mask } from './api/middlewares/mask.mjs' import { mask } from './middlewares/mask.mjs'
import { errorHandler } from './api/error/middleware.mjs' import { errorHandler } from './error/middleware.mjs'
import { accessChecks } from './api/access/middleware.mjs' import { accessChecks } from './access/middleware.mjs'
import ParserMiddleware from './api/parser/middleware.mjs' import ParserMiddleware from './parser/middleware.mjs'
const app = new Koa() const app = new Koa()
const parser = new ParserMiddleware() const parser = new ParserMiddleware()

View file

@ -1,6 +1,6 @@
import bookshelf from '../bookshelf.mjs' import { createPrototype, safeColumns } from '../knex.mjs'
import bcrypt from 'bcrypt' import bcrypt from 'bcrypt'
import config from '../config.mjs' /*import config from '../config.mjs'*/
/* Staff model: /* Staff model:
{ {
@ -16,18 +16,19 @@ import config from '../config.mjs'
*/ */
const Staff = bookshelf.createModel({ function StaffItem(data) {
tableName: 'staff', Object.assign(this, data)
}
privateFields: bookshelf.safeColumns([ function Staff() {
'fullname', this.tableName = 'staff'
'email', this.Model = StaffItem
'level', this.privateFields = safeColumns(['fullname','email','level',])
]), this.publicFields = ['id', 'fullname']
}, { this.init()
// Hide password from any relations and include requests. }
publicFields: ['id', 'fullname'],
Staff.prototype = createPrototype({
hash(password) { hash(password) {
return new Promise((resolve, reject) => return new Promise((resolve, reject) =>
bcrypt.hash(password, config.get('bcrypt'), (err, hashed) => { bcrypt.hash(password, config.get('bcrypt'), (err, hashed) => {
@ -47,7 +48,16 @@ const Staff = bookshelf.createModel({
) )
}, },
getAll(ctx, where = {}, withRelated = [], orderBy = 'id') { _getSingle(subq, includes = [], require = true, ctx = null) {
return this.getSingleQuery(this.query(qb => {
return qb
.where(qb => {
if (subq) subq(qb)
})
}, includes, this.privateFields), require)
},
/* getAll(ctx, where = {}, withRelated = [], orderBy = 'id') {
return this.query(qb => { return this.query(qb => {
this.baseQueryAll(ctx, qb, where, orderBy) this.baseQueryAll(ctx, qb, where, orderBy)
qb.select(bookshelf.safeColumns([ qb.select(bookshelf.safeColumns([
@ -66,7 +76,7 @@ const Staff = bookshelf.createModel({
ctx.state.pagination.total = result.pagination.rowCount ctx.state.pagination.total = result.pagination.rowCount
return result return result
}) })
}, }, */
}) })
export default Staff export default new Staff()

View file

@ -11,7 +11,7 @@ export default class StaffRoutes {
/** GET: /api/staff */ /** GET: /api/staff */
async getAllStaff(ctx) { async getAllStaff(ctx) {
ctx.body = await this.Staff.getAll(ctx, { }, []) ctx.body = await this.Staff.getAll(ctx, null, [])
} }
/** GET: /api/staff/:id */ /** GET: /api/staff/:id */
@ -30,22 +30,14 @@ export default class StaffRoutes {
async updateStaff(ctx) { async updateStaff(ctx) {
await this.security.validUpdate(ctx) await this.security.validUpdate(ctx)
let page = await this.Staff.getSingle(ctx.params.id) let staff = await this.Staff.updateSingle(ctx, ctx.params.id, ctx.request.body)
page.set(ctx.request.body) ctx.body = staff
await page.save()
ctx.body = page
} }
/** DELETE: /api/staff/:id */ /** DELETE: /api/staff/:id */
async removeStaff(ctx) { async removeStaff(ctx) {
let page = await this.Staff.getSingle(ctx.params.id) await this.Staff.updateSingle(ctx, ctx.params.id, { is_deleted: true })
page.set({ is_deleted: true })
await page.save()
ctx.status = 204 ctx.status = 204
} }

View file

@ -110,6 +110,8 @@ const EditPage = {
this.error = 'Name is missing' this.error = 'Name is missing'
} else if (!this.page.path) { } else if (!this.page.path) {
this.error = 'Path is missing' this.error = 'Path is missing'
} else {
this.error = ''
} }
if (this.error) return if (this.error) return
@ -147,6 +149,7 @@ const EditPage = {
res.media = vnode.state.page.media res.media = vnode.state.page.media
res.banner = vnode.state.page.banner res.banner = vnode.state.page.banner
vnode.state.page = res vnode.state.page = res
console.log(res)
} else { } else {
m.route.set('/admin/pages/' + res.id) m.route.set('/admin/pages/' + res.id)
} }

View file

@ -49,25 +49,6 @@ exports.getAllPageArticles = function(pageId, includes) {
}) })
} }
exports.getAllPageArticlesPagination = function(pageId, options) {
let extra = ''
if (options.sort) {
extra += '&sort=' + options.sort
}
if (options.per_page) {
extra += '&perPage=' + options.per_page
}
if (options.page) {
extra += '&page=' + options.page
}
if (options.includes) {
extra += '&includes=' + options.includes.join(',')
}
return '/api/pages/' + pageId + '/articles?' + extra
}
exports.getArticle = function(id) { exports.getArticle = function(id) {
return common.sendRequest({ return common.sendRequest({
method: 'GET', method: 'GET',

View file

@ -10,6 +10,9 @@ exports.sendRequest = function(options, isPagination) {
} }
options.extract = function(xhr) { options.extract = function(xhr) {
if (xhr.responseText && xhr.responseText.slice(0, 9) === '<!doctype') {
throw new Error('Expected JSON but got HTML (' + xhr.status + ': ' + this.url.split('?')[0] + ')')
}
let out = null let out = null
if (pagination && xhr.status < 300) { if (pagination && xhr.status < 300) {
let headers = {} let headers = {}

View file

@ -71,7 +71,7 @@ exports.getAllPages = function() {
exports.getPage = function(id) { exports.getPage = function(id) {
return common.sendRequest({ return common.sendRequest({
method: 'GET', method: 'GET',
url: '/api/pages/' + id + '?includes=media,banner,children,news,news.media', url: '/api/pages/' + id + '?includes=media,banner',
}) })
} }

View file

@ -7,7 +7,7 @@ exports.Tree = Tree
exports.getTree = function() { exports.getTree = function() {
return common.sendRequest({ return common.sendRequest({
method: 'GET', method: 'GET',
url: '/api/pages?tree=true&includes=children&fields=id,name,path,children(id,name,path)', url: '/api/pagetree',
}) })
} }

View file

@ -82,6 +82,22 @@ main {
padding-bottom: 20px; padding-bottom: 20px;
} }
.error-wrapper {
flex-grow: 2;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
background: $border;
padding: 40px 0;
.error {
border: 2px dashed $secondary-dark-bg;
padding: 10px 20px;
font-size: 1em;
}
}
[hidden] { display: none !important; } [hidden] { display: none !important; }
article { article {

View file

@ -19,6 +19,7 @@ const Article = {
}, },
fetchArticle: function(vnode) { fetchArticle: function(vnode) {
this.error = ''
this.path = m.route.param('id') this.path = m.route.param('id')
this.showcomments = false this.showcomments = false
this.article = { this.article = {
@ -73,58 +74,65 @@ const Article = {
return ( return (
this.loading ? this.loading ?
m('article.article', m('div.loading-spinner')) m('article.article', m('div.loading-spinner'))
: m('article.article', [ : this.error
this.article.parent ? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name)]) : null, ? m('div.error-wrapper', m('div.error', {
m('header', m('h1', this.article.name)), onclick: function() {
m('.fr-view', [ vnode.state.error = ''
this.article.media vnode.state.fetchArticle(vnode)
? m('a.cover', { },
rel: 'noopener', }, 'Article error: ' + this.error))
href: this.article.media.link, : m('article.article', [
}, m('img', { src: imagePath, alt: 'Cover image for ' + this.article.name })) this.article.parent ? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name)]) : null,
: null, m('header', m('h1', this.article.name)),
this.article.description ? m.trust(this.article.description) : null, m('.fr-view', [
(this.article.files && this.article.files.length this.article.media
? this.article.files.map(function(file) { ? m('a.cover', {
return m(Fileinfo, { file: file }) rel: 'noopener',
}) href: this.article.media.link,
: null), }, m('img', { src: imagePath, alt: 'Cover image for ' + this.article.name }))
m('div.entrymeta', [ : null,
'Posted ', this.article.description ? m.trust(this.article.description) : null,
(this.article.parent ? 'in' : ''), (this.article.files && this.article.files.length
(this.article.parent ? m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name) : null), ? this.article.files.map(function(file) {
'at ' + (this.article.published_at.replace('T', ' ').split('.')[0]).substr(0, 16), return m(Fileinfo, { file: file })
' by ' + (this.article.staff && this.article.staff.fullname || 'Admin'), })
: null),
m('div.entrymeta', [
'Posted ',
(this.article.parent ? 'in' : ''),
(this.article.parent ? m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name) : null),
'at ' + (this.article.published_at.replace('T', ' ').split('.')[0]).substr(0, 16),
' by ' + (this.article.staff && this.article.staff.fullname || 'Admin'),
]),
]), ]),
]), Authentication.currentUser
Authentication.currentUser ? m('div.admin-actions', [
? m('div.admin-actions', [ m('span', 'Admin controls:'),
m('span', 'Admin controls:'), m(m.route.Link, { href: '/admin/articles/' + this.article.id }, 'Edit article'),
m(m.route.Link, { href: '/admin/articles/' + this.article.id }, 'Edit article'),
])
: null,
this.showcomments
? m('div.commentcontainer', [
m('div#disqus_thread', { oncreate: function() {
let fullhost = window.location.protocol + '//' + window.location.host
/*eslint-disable */
window.disqus_config = function () {
this.page.url = fullhost + '/article/' + vnode.state.article.path
this.page.identifier = 'article-' + vnode.state.article.id
};
(function() { // DON'T EDIT BELOW THIS LINE
var d = document, s = d.createElement('script');
s.src = 'https://nfp-moe.disqus.com/embed.js';
s.setAttribute('data-timestamp', +new Date());
(d.head || d.body).appendChild(s);
})()
/*eslint-enable */
}}, m('div.loading-spinner')),
]) ])
: m('button.opencomments', { : null,
onclick: function() { vnode.state.showcomments = true }, this.showcomments
}, 'Open comment discussion'), ? m('div.commentcontainer', [
]) m('div#disqus_thread', { oncreate: function() {
let fullhost = window.location.protocol + '//' + window.location.host
/*eslint-disable */
window.disqus_config = function () {
this.page.url = fullhost + '/article/' + vnode.state.article.path
this.page.identifier = 'article-' + vnode.state.article.id
};
(function() { // DON'T EDIT BELOW THIS LINE
var d = document, s = d.createElement('script');
s.src = 'https://nfp-moe.disqus.com/embed.js';
s.setAttribute('data-timestamp', +new Date());
(d.head || d.body).appendChild(s);
})()
/*eslint-enable */
}}, m('div.loading-spinner')),
])
: m('button.opencomments', {
onclick: function() { vnode.state.showcomments = true },
}, 'Open comment discussion'),
])
) )
}, },
} }

View file

@ -9,6 +9,12 @@ m.route.set = function(path, data, options){
window.scrollTo(0, 0) window.scrollTo(0, 0)
} }
/*console.log('tree', window.__nfptree)
console.log('featured', window.__nfpfeatured)
console.log('data', window.__nfpdata)
console.log('subdata', window.__nfpsubdata)
console.log('links', window.__nfplinks)*/
m.route.linkOrig = m.route.link m.route.linkOrig = m.route.link
m.route.link = function(vnode){ m.route.link = function(vnode){
m.route.linkOrig(vnode) m.route.linkOrig(vnode)

View file

@ -12,13 +12,15 @@ const Page = {
this.lastpage = m.route.param('page') || '1' this.lastpage = m.route.param('page') || '1'
this.loadingnews = false this.loadingnews = false
console.log(window.__nfpdata)
if (window.__nfpdata) { if (window.__nfpdata) {
this.path = m.route.param('id') this.path = m.route.param('id')
this.page = window.__nfpdata this.page = window.__nfpdata
this.news = [] this.news = window.__nfpsubdata
this.newslinks = null this.newslinks = window.__nfplinks
window.__nfpdata = null window.__nfpdata = null
vnode.state.fetchArticles(vnode) window.__nfpsubdata = null
} else { } else {
this.fetchPage(vnode) this.fetchPage(vnode)
} }
@ -42,12 +44,12 @@ const Page = {
.then(function(result) { .then(function(result) {
vnode.state.page = result vnode.state.page = result
document.title = result.name + ' - NFP Moe' document.title = result.name + ' - NFP Moe'
return vnode.state.fetchArticles(vnode)
}) })
.catch(function(err) { .catch(function(err) {
vnode.state.error = err.message vnode.state.error = err.message
}) vnode.state.loading = vnode.state.loadingnews = false
.then(function() { m.redraw()
return vnode.state.fetchArticles(vnode)
}) })
}, },
@ -111,7 +113,14 @@ const Page = {
return ( return (
this.loading ? this.loading ?
m('article.page', m('div.loading-spinner')) m('article.page', m('div.loading-spinner'))
: m('article.page', [ : this.error
? m('div.error-wrapper', m('div.error', {
onclick: function() {
vnode.state.error = ''
vnode.state.fetchPage(vnode)
},
}, 'Article error: ' + this.error))
: m('article.page', [
bannerPath ? m('.div.page-banner', { style: { 'background-image': 'url("' + bannerPath + '")' } } ) : null, bannerPath ? m('.div.page-banner', { style: { 'background-image': 'url("' + bannerPath + '")' } } ) : null,
this.page.parent this.page.parent
? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.page.parent.path }, this.page.parent.name)]) ? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.page.parent.path }, this.page.parent.name)])

View file

@ -2,17 +2,19 @@ const Fileinfo = require('./fileinfo')
const Newsitem = { const Newsitem = {
oninit: function(vnode) { oninit: function(vnode) {
this.srcsetJpeg = vnode.attrs.media.small_url + ' 500w, ' if (vnode.attrs.media) {
+ vnode.attrs.media.medium_url + ' 800w ' this.srcsetJpeg = vnode.attrs.media.small_url + ' 500w, '
if (vnode.attrs.media.small_url_avif) { + vnode.attrs.media.medium_url + ' 800w '
this.srcsetAvif = vnode.attrs.media.small_url_avif + ' 500w, ' if (vnode.attrs.media.small_url_avif) {
+ vnode.attrs.media.medium_url_avif + ' 800w ' this.srcsetAvif = vnode.attrs.media.small_url_avif + ' 500w, '
} else { + vnode.attrs.media.medium_url_avif + ' 800w '
this.srcsetAvif = null } else {
this.srcsetAvif = null
}
this.coverSizes = '(max-width: 639px) calc(100vw - 40px), '
+ '(max-width: 1000px) 300px, '
+ '400px'
} }
this.coverSizes = '(max-width: 639px) calc(100vw - 40px), '
+ '(max-width: 1000px) 300px, '
+ '400px'
}, },
view: function(vnode) { view: function(vnode) {

View file

@ -13,7 +13,7 @@ setup().catch(async (error) => {
// process.exit(1) // process.exit(1)
// }) // })
}).then(() => }).then(() =>
import('./server.mjs') import('./api/server.mjs')
).catch(error => { ).catch(error => {
log.error(error, 'Unknown error starting server') log.error(error, 'Unknown error starting server')
}) })

View file

@ -0,0 +1,16 @@
/* eslint-disable */
exports.up = function(knex) {
return Promise.all([
knex.schema.raw('create index pages_gettree_index on pages (name asc) where not is_deleted'),
knex.schema.raw('create index pages_featuredpublish_index on articles (published_at desc) where is_featured = true and not is_deleted'),
knex.schema.raw('create index pages_publish_index on articles (published_at desc) where is_deleted = false'),
])
};
exports.down = function(knex) {
return Promise.all([
knex.schema.table('pages', function(table) {
table.dropIndex('pages_gettree_index')
})
])
};

View file

@ -17,6 +17,7 @@
} }
}, },
"scripts": { "scripts": {
"knex:rollback": "node scripts/rollback.mjs | bunyan",
"lint": "eslint .", "lint": "eslint .",
"start": "node --experimental-modules index.mjs", "start": "node --experimental-modules index.mjs",
"build": "sass -s compressed app/app.scss public/assets/app.css && sass -s compressed app/admin.scss public/assets/admin.css && browserify -p tinyify --no-commondir -o public/assets/app.js app/index.js && browserify -p tinyify --no-commondir -o public/assets/admin.js app/admin.js", "build": "sass -s compressed app/app.scss public/assets/app.css && sass -s compressed app/admin.scss public/assets/admin.css && browserify -p tinyify --no-commondir -o public/assets/app.js app/index.js && browserify -p tinyify --no-commondir -o public/assets/admin.js app/admin.js",

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 KiB

View file

@ -29,6 +29,7 @@
window.__nfptree = {{=it.tree}}; window.__nfptree = {{=it.tree}};
window.__nfpfeatured = {{=it.featured}}; window.__nfpfeatured = {{=it.featured}};
window.__nfpdata = {{=it.data}}; window.__nfpdata = {{=it.data}};
window.__nfpsubdata = {{=it.subdata}};
window.__nfplinks = {{=it.links}}; window.__nfplinks = {{=it.links}};
</script> </script>
<div class="maincontainer"> <div class="maincontainer">

37
scripts/rollback.mjs Normal file
View file

@ -0,0 +1,37 @@
import _ from 'lodash'
import config from '../api/config.mjs'
import log from '../api/log.mjs'
import knex from 'knex-core'
// This is important for setup to run cleanly.
let knexConfig = _.cloneDeep(config.get('knex'))
knexConfig.pool = { min: 1, max: 1 }
let knexSetup = knex(knexConfig)
export default function rollback() {
log.info(knexConfig, 'Running database rollback.')
return knexSetup.migrate.rollback({
directory: './migrations',
})
.then((result) => {
if (result[1].length === 0) {
return log.info('Database has been roll backed')
}
for (let i = 0; i < result[1].length; i++) {
log.info('Rollbacked migration from', result[1][i].substr(result[1][i].lastIndexOf('\\') + 1))
}
return knexSetup.destroy()
})
}
rollback()
.catch(async (error) => {
log.error({ code: error.code, message: error.message }, 'Error while rollbacking database')
log.error('Unable to verify database integrity.')
process.exit(1)
}).then(() =>
process.exit(0)
)