Huge refactor, much improvements, wow
This commit is contained in:
parent
b63d0ca06d
commit
e4c4e3b7ed
34 changed files with 1012 additions and 879 deletions
|
@ -1,4 +1,4 @@
|
|||
import bookshelf from '../bookshelf.mjs'
|
||||
import { createPrototype, safeColumns } from '../knex.mjs'
|
||||
import Media from '../media/model.mjs'
|
||||
import File from '../file/model.mjs'
|
||||
import Staff from '../staff/model.mjs'
|
||||
|
@ -21,10 +21,136 @@ Article model:
|
|||
|
||||
*/
|
||||
|
||||
const Article = bookshelf.createModel({
|
||||
tableName: 'articles',
|
||||
function ArticleItem(data) {
|
||||
Object.assign(this, data)
|
||||
}
|
||||
|
||||
parent() {
|
||||
function Article() {
|
||||
this.tableName = 'articles'
|
||||
this.Model = ArticleItem
|
||||
this.includes = {
|
||||
staff: Staff.includeHasOne('articles.staff_id', 'id'),
|
||||
media: Media.includeHasOne('articles.media_id', 'id'),
|
||||
banner: Media.includeHasOne('articles.banner_id', 'id'),
|
||||
parent: Page.includeHasOne('articles.parent_id', 'id'),
|
||||
files: File.includeHasMany('article_id', 'articles.id'),
|
||||
}
|
||||
this.publicFields = this.privateFields = safeColumns([
|
||||
'staff_id',
|
||||
'parent_id',
|
||||
'name',
|
||||
'path',
|
||||
'description',
|
||||
'banner_id',
|
||||
'media_id',
|
||||
'published_at',
|
||||
'is_featured',
|
||||
])
|
||||
this.init()
|
||||
}
|
||||
|
||||
Article.prototype = createPrototype({
|
||||
getAll(ctx, where = null, includes = [], orderBy = 'id', limitToday = false) {
|
||||
return this._getAll(ctx, (qb) => {
|
||||
if (where) qb.where(where)
|
||||
if (limitToday) {
|
||||
qb.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
}
|
||||
}, includes, orderBy, [])
|
||||
},
|
||||
|
||||
getAllFromPage(ctx, pageId, includes = [], orderBy = 'id', limitToday = false) {
|
||||
return this._getAll(ctx, (qb) => {
|
||||
qb = qb.innerJoin('pages', 'articles.parent_id', 'pages.id')
|
||||
qb.where(subq => {
|
||||
subq.where('pages.id', pageId)
|
||||
.orWhere('pages.parent_id', pageId)
|
||||
})
|
||||
if (limitToday) {
|
||||
qb.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
}
|
||||
return qb
|
||||
}, includes, orderBy, [])
|
||||
},
|
||||
|
||||
getSingle(id, includes = [], require = true, ctx = null, limitToday = false) {
|
||||
return this._getSingle(qb => {
|
||||
qb.where(subq => {
|
||||
subq.where(this.tableName + '.id', '=', Number(id) || 0)
|
||||
.orWhere(this.tableName + '.path', '=', id)
|
||||
})
|
||||
if (limitToday && (!ctx || !ctx.state.user || ctx.state.user.level < 10)) {
|
||||
qb.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
}
|
||||
}, includes, require, ctx)
|
||||
},
|
||||
|
||||
getFeaturedArticle(includes = [], ctx = null) {
|
||||
return this._getSingle(qb => {
|
||||
qb.where({ is_featured: true })
|
||||
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
.orderBy(this.tableName + '.published_at', 'DESC')
|
||||
.select(this.knex.raw('1 as __group'))
|
||||
.limit(1)
|
||||
}, includes, false, ctx)
|
||||
},
|
||||
|
||||
async getFrontpageArticles(orgPage = 1) {
|
||||
let page = Math.max(orgPage, 1)
|
||||
let out = {
|
||||
featured: null,
|
||||
items: [],
|
||||
total: 0,
|
||||
}
|
||||
|
||||
let qFeatured = this.query(qb => {
|
||||
return qb.where({ is_featured: true })
|
||||
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
.orderBy(this.tableName + '.published_at', 'DESC')
|
||||
.select(this.knex.raw('1 as __group'))
|
||||
.limit(1)
|
||||
}, ['staff', 'media', 'banner'])
|
||||
let qArticles = this.query(qb => {
|
||||
return qb
|
||||
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
.select(this.knex.raw('2 as __group'))
|
||||
.orderBy(this.tableName + '.published_at', 'DESC')
|
||||
.limit(10)
|
||||
.offset((page - 1) * 10)
|
||||
}, ['staff', 'media', 'banner'], null, qFeatured)
|
||||
|
||||
let [articles, total] = await Promise.all([
|
||||
this.getAllQuery(
|
||||
this.knex
|
||||
.unionAll(qFeatured, true)
|
||||
.unionAll(qArticles, true),
|
||||
qFeatured
|
||||
),
|
||||
this.knex('articles')
|
||||
.where(this.tableName + '.published_at', '<=', (new Date()).toISOString())
|
||||
.where({ is_deleted: false })
|
||||
.count('* as count'),
|
||||
])
|
||||
|
||||
out.total = total[0].count
|
||||
if (articles.length > 0 && articles[0].is_featured) {
|
||||
out.featured = articles[0]
|
||||
out.items = articles.slice(1)
|
||||
} else {
|
||||
out.items = articles
|
||||
}
|
||||
return out
|
||||
},
|
||||
|
||||
setAllUnfeatured() {
|
||||
return knex('articles')
|
||||
.where({ is_featured: true })
|
||||
.update({
|
||||
is_featured: false,
|
||||
})
|
||||
},
|
||||
|
||||
/*parent() {
|
||||
return this.belongsTo(Page, 'parent_id')
|
||||
},
|
||||
|
||||
|
@ -45,9 +171,9 @@ const Article = bookshelf.createModel({
|
|||
.query(qb => {
|
||||
qb.orderBy('id', 'asc')
|
||||
})
|
||||
},
|
||||
}, {
|
||||
getAll(ctx, where = {}, withRelated = [], orderBy = 'id', limitToday = false) {
|
||||
},*/
|
||||
|
||||
/*getAll(ctx, where = {}, withRelated = [], orderBy = 'id', limitToday = false) {
|
||||
return this.query(qb => {
|
||||
this.baseQueryAll(ctx, qb, where, orderBy)
|
||||
if (limitToday) {
|
||||
|
@ -138,7 +264,12 @@ const Article = bookshelf.createModel({
|
|||
page: page,
|
||||
withRelated: ['files', 'media', 'banner', 'parent', 'staff'],
|
||||
})
|
||||
},
|
||||
},*/
|
||||
})
|
||||
|
||||
export default Article
|
||||
const articleInstance = new Article()
|
||||
|
||||
// Hook into includes for Page
|
||||
// Page.addInclude('news', articleInstance.includeHasMany('parent_id', 'pages.id'))
|
||||
|
||||
export default articleInstance
|
||||
|
|
|
@ -16,13 +16,6 @@ export default class ArticleRoutes {
|
|||
ctx.body = await this.Article.getAll(ctx, { }, ctx.state.filter.includes, ctx.query.sort || '-published_at')
|
||||
}
|
||||
|
||||
/** GET: /api/pages/:pageId/articles */
|
||||
async getAllPageArticles(ctx) {
|
||||
await this.security.ensureIncludes(ctx)
|
||||
|
||||
ctx.body = await this.Article.getAllFromPage(ctx, ctx.params.pageId, ctx.state.filter.includes, ctx.query.sort || '-published_at')
|
||||
}
|
||||
|
||||
/** GET: /api/articles/:id */
|
||||
async getSingleArticle(ctx) {
|
||||
await this.security.ensureIncludes(ctx)
|
||||
|
@ -70,22 +63,14 @@ export default class ArticleRoutes {
|
|||
await Article.setAllUnfeatured()
|
||||
}
|
||||
|
||||
let page = await this.Article.getSingle(ctx.params.id)
|
||||
let article = await this.Article.updateSingle(ctx, ctx.params.id, ctx.request.body)
|
||||
|
||||
page.set(ctx.request.body)
|
||||
|
||||
await page.save()
|
||||
|
||||
ctx.body = page
|
||||
ctx.body = article
|
||||
}
|
||||
|
||||
/** DELETE: /api/articles/:id */
|
||||
async removeArticle(ctx) {
|
||||
let page = await this.Article.getSingle(ctx.params.id)
|
||||
|
||||
page.set({ is_deleted: true })
|
||||
|
||||
await page.save()
|
||||
await this.Article.updateSingle(ctx, ctx.params.id, { is_deleted: true })
|
||||
|
||||
ctx.status = 204
|
||||
}
|
||||
|
|
|
@ -14,13 +14,12 @@ export default class AuthHelper {
|
|||
|
||||
try {
|
||||
staff = await this.Staff
|
||||
.query(qb => {
|
||||
qb.where({ email: ctx.request.body.username })
|
||||
qb.select('*')
|
||||
})
|
||||
.fetch({ require: true })
|
||||
.getSingleQuery(
|
||||
this.Staff.query(qb => qb.where({ email: ctx.request.body.username }), [], ['*']),
|
||||
true
|
||||
)
|
||||
|
||||
await this.Staff.compare(ctx.request.body.password, staff.get('password'))
|
||||
await this.Staff.compare(ctx.request.body.password, staff.password)
|
||||
} catch (err) {
|
||||
if (err.message === 'EmptyResponse' || err.message === 'PasswordMismatch') {
|
||||
ctx.throw(422, 'The email or password did not match')
|
||||
|
@ -28,6 +27,6 @@ export default class AuthHelper {
|
|||
throw err
|
||||
}
|
||||
|
||||
return this.jwt.createToken(staff.id, staff.get('email'), staff.get('level'))
|
||||
return this.jwt.createToken(staff.id, staff.email, staff.level)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,590 +0,0 @@
|
|||
import _ from 'lodash'
|
||||
import knex from 'knex-core'
|
||||
import bookshelf from 'bookshelf'
|
||||
|
||||
import config from './config.mjs'
|
||||
import defaults from './defaults.mjs'
|
||||
import log from './log.mjs'
|
||||
|
||||
let connections = [config.get('knex:connection')]
|
||||
|
||||
if (config.get('knex:connectionslave')) {
|
||||
connections.push(config.get('knex:connectionslave'))
|
||||
}
|
||||
|
||||
let isRecovering = false
|
||||
let isUrgent = false
|
||||
let currentIndex = 0
|
||||
let nextIndex = currentIndex + 1
|
||||
let client
|
||||
let secondaryClient
|
||||
|
||||
/**
|
||||
* Semi-gracefully shift the current active client connection from the
|
||||
* current connected client and switch to the selected index server.
|
||||
*/
|
||||
async function shiftConnection(index) {
|
||||
// Update our variables
|
||||
isUrgent = false
|
||||
currentIndex = index
|
||||
|
||||
log.warn('DB: Destroying current pool')
|
||||
await client.destroy()
|
||||
|
||||
// Update connection settings to the new server and re-initialize the pool.
|
||||
log.warn(connections[currentIndex], 'DB: Connecting to next server')
|
||||
client.client.connectionSettings = connections[currentIndex]
|
||||
client.initialize()
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a graceful server migration. Creates a secondary database connection
|
||||
* and checks other available servers we have if they're up and can be used.
|
||||
*/
|
||||
async function gracefulServerMigrate() {
|
||||
// Check if we're already recovering and exit then.
|
||||
if (isRecovering) return
|
||||
|
||||
// Urgent means we don't have ANY active database connectiong and need one quickly.
|
||||
if (isUrgent) {
|
||||
log.error(connections[currentIndex], `DB: Server connected to is offline.`)
|
||||
} else {
|
||||
log.warn(connections[currentIndex], `DB: Successfully connected to a server but its status was recovering (slave).`)
|
||||
}
|
||||
log.warn('DB: Attempting to gracefully connecting to different server')
|
||||
isRecovering = true
|
||||
|
||||
// Load up next server into a new knex connection and start connecting.
|
||||
if (nextIndex === connections.length) {
|
||||
nextIndex = 0
|
||||
}
|
||||
secondaryClient = knex(getConfig(nextIndex, false))
|
||||
|
||||
// Keep on trying :)
|
||||
while (true) {
|
||||
// Make multiple attempts when we're connecting to downed or timed out databases.
|
||||
let attempts = 0
|
||||
|
||||
while (attempts++ < 5) {
|
||||
try {
|
||||
log.warn(connections[nextIndex], `DB: Gracefully attempting to connect to server (attempt ${attempts}/5).`)
|
||||
|
||||
// Connect to the database (this creates a new pool connection) and check if it's in recovery mode
|
||||
let data = await secondaryClient.raw('select pg_is_in_recovery()')
|
||||
|
||||
// If we reach here, we got data which means the database is up and running.
|
||||
// As such, there's no need to make more attempts to same server
|
||||
attempts = 6
|
||||
|
||||
// Check if it's master or if we are desperate
|
||||
if (!data.rows[0].pg_is_in_recovery || isUrgent) {
|
||||
// Found a viable server to connect to. Shift our active client to it.
|
||||
log.info(connections[nextIndex], 'DB: Found available server, connecting to it')
|
||||
await shiftConnection(nextIndex)
|
||||
|
||||
// Check if we're connected to master or just a slave.
|
||||
if (!data.rows[0].pg_is_in_recovery) {
|
||||
// We found a master, stop recovering
|
||||
log.info(connections[nextIndex], 'DB: Connection established with master.')
|
||||
isRecovering = false
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// We only care to log weird errors like postgresql errors or such.
|
||||
if (err.code !== 'ECONNREFUSED' && err.code !== 'ETIMEDOUT') {
|
||||
log.error({ code: err.code, message: err.message }, `DB: Unknown error while gracefully connecting to ${connections[nextIndex].host}`)
|
||||
}
|
||||
|
||||
// Make a next attempt after 10 seconds
|
||||
await new Promise(res => setTimeout(res, 10000))
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we found a master and break if we did.
|
||||
if (isRecovering === false) break
|
||||
|
||||
// Didn't find a master :( wait 60 seconds before running another attempt
|
||||
log.warn(connections[nextIndex], 'DB: Connected server was deemeed unable to fit master role')
|
||||
log.warn('DB: waiting 60 seconds before attempting next server')
|
||||
|
||||
await new Promise(res => setTimeout(res, 60000))
|
||||
|
||||
// Move to next server
|
||||
nextIndex++
|
||||
if (nextIndex === connections.length) {
|
||||
nextIndex = 0
|
||||
}
|
||||
|
||||
// Time to destroy our active pool on our current server and update
|
||||
// the connection settings to the next server and re-initialise.
|
||||
await secondaryClient.destroy()
|
||||
secondaryClient.client.connectionSettings = connections[nextIndex]
|
||||
secondaryClient.initialize()
|
||||
}
|
||||
|
||||
// We got here means we have stopped recovery process.
|
||||
// Shut down the secondary knex client and destroy it and
|
||||
// remove reference to it so GC can collect it eventually, hopefully.
|
||||
await secondaryClient.destroy()
|
||||
nextIndex = currentIndex + 1
|
||||
secondaryClient = null
|
||||
}
|
||||
|
||||
/**
|
||||
* Event handler after our pool is created and we are creating a connection.
|
||||
* Here we check if the database is in recovery mode (a.k.a. slave) and if so
|
||||
* start the graceful migration to migrate back to master once it's up and running.
|
||||
*/
|
||||
function afterCreate(conn, done) {
|
||||
conn.query('select pg_is_in_recovery()', (e, res) => {
|
||||
if (e) return done(e, conn)
|
||||
if (res.rows[0].pg_is_in_recovery) gracefulServerMigrate().then()
|
||||
done(null, conn)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Event handler for when the pool gets destroyed. Here we check
|
||||
* if the connection has been marked with _ending = true.
|
||||
* There are some checks available we can use to check if current
|
||||
* connection was abrubtly disconnected. Among those from my testing
|
||||
* are as follows:
|
||||
*
|
||||
* conn.__knex__disposed = 'Connection ended unexpectedly'
|
||||
* conn.connection._ending = true
|
||||
*
|
||||
* I went with connection._ending one as I feel that one's the safest.
|
||||
*
|
||||
*/
|
||||
function beforeDestroy(conn) {
|
||||
if (conn.connection._ending) {
|
||||
checkActiveConnection()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a valid confic for knex based on specific connection index.
|
||||
* Note that we don't wanna hook into afterCreate or beforeDestroy
|
||||
* in our secondary knex connection doing the recovery checking.
|
||||
*/
|
||||
function getConfig(index = 0, addEvents = true) {
|
||||
return {
|
||||
'client': 'pg',
|
||||
'connection': connections[index],
|
||||
'migrations': {
|
||||
},
|
||||
pool: {
|
||||
afterCreate: addEvents && afterCreate || null,
|
||||
min: 2,
|
||||
max: 10,
|
||||
// beforeDestroy: addEvents && beforeDestroy || null,
|
||||
},
|
||||
acquireConnectionTimeout: 10000,
|
||||
}
|
||||
}
|
||||
|
||||
client = knex(getConfig(currentIndex))
|
||||
|
||||
/**
|
||||
* Make sure no update or delete queries are run while we're recovering.
|
||||
* This allows knex to connect to a slave and only process select queries.
|
||||
*
|
||||
* Note: Probably does not support complicated select queries that cause
|
||||
* updates on trigger or other such things.
|
||||
*/
|
||||
client.on('query', data => {
|
||||
if (isRecovering && data.method !== 'select') {
|
||||
throw new Error('Database is in read-only mode')
|
||||
}
|
||||
})
|
||||
|
||||
function checkActiveConnection(attempt = 1) {
|
||||
if (attempt > 5) {
|
||||
isUrgent = true
|
||||
return gracefulServerMigrate().then()
|
||||
}
|
||||
// log.info(`DB: (Attempt ${attempt}/5) Checking connection is active.`)
|
||||
client.raw('select 1').catch(err => {
|
||||
if (err.code === 'ECONNREFUSED') { // err.code === 'ETIMEDOUT'
|
||||
isUrgent = true
|
||||
return gracefulServerMigrate().then()
|
||||
}
|
||||
if (err) {
|
||||
let wait = 3000 // err.code like '57P03' and such.
|
||||
if (err.code === 'ETIMEDOUT') {
|
||||
wait = 10000
|
||||
}
|
||||
|
||||
log.error({ code: err.code, message: err.message }, `DB: (Attempt ${attempt}/5) Error while checking connection status`)
|
||||
if (attempt < 5) {
|
||||
log.warn(`DB: (Attempt ${attempt}/5) Attempting again in ${wait / 1000} seconds.`)
|
||||
setTimeout(() => checkActiveConnection(attempt + 1), wait)
|
||||
} else {
|
||||
checkActiveConnection(attempt + 1)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Only way to check startup connection errors
|
||||
log.info(getConfig(currentIndex).connection, 'DB: Connecting to server')
|
||||
setTimeout(() => checkActiveConnection(), 100)
|
||||
|
||||
// Check if we're running tests while connected to
|
||||
// potential production environment.
|
||||
/* istanbul ignore if */
|
||||
if (config.get('NODE_ENV') === 'test' &&
|
||||
(config.get('knex:connection:database') !== 'kisildalur_test' ||
|
||||
config.get('knex:connection:connection'))) {
|
||||
// There is an offchance that we're running tests on
|
||||
// production database. Exit NOW!
|
||||
log.error('Critical: potentially running test on production enviroment. Shutting down.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const shelf = bookshelf(client)
|
||||
|
||||
shelf.plugin('virtuals')
|
||||
shelf.plugin('pagination')
|
||||
|
||||
// Helper method to create models
|
||||
shelf.createModel = (attr, opts) => {
|
||||
// Create default attributes to all models
|
||||
let attributes = defaults(attr, {
|
||||
/**
|
||||
* Always include created_at and updated_at for all models default.
|
||||
*/
|
||||
hasTimestamps: true,
|
||||
|
||||
/**
|
||||
* Columns selected in get single queries.
|
||||
*/
|
||||
privateFields: ['*'],
|
||||
|
||||
/**
|
||||
* Event handler when fetch() is called. This gets called for both
|
||||
* when getSingle() or just manual fetch() is called as well as
|
||||
* when relation models through belongsTo() resources get fetched.
|
||||
*
|
||||
* @param {Model} model - The model instance if fetch() was used. For
|
||||
* belongsTo this is the relation model thingy.
|
||||
* @param {Array} columns - Array of columns to select if fetch() was used.
|
||||
* Otherwise this is null.
|
||||
* @param {Object} options - Options for the fetch. Includes the query
|
||||
* builder object.
|
||||
*/
|
||||
checkFetching(model, columns, options) {
|
||||
// First override that is_deleted always gets filtered out.
|
||||
options.query.where({ is_deleted: false })
|
||||
|
||||
// If we have columns, fetch() or getSingle is the caller and no
|
||||
// custom select() was called on the query.
|
||||
if (columns) {
|
||||
// We override columns default value of 'table_name.*' select and
|
||||
// replace it with actual fields. This allows us to hide columns in
|
||||
// public results.
|
||||
columns.splice(...[0, columns.length].concat(
|
||||
model.privateFields.map(item => `${model.tableName}.${item}`)
|
||||
))
|
||||
// If we have relatedData in the model object, then we're dealing with a
|
||||
// belongsTo relation query. If not, then we're dealing with a custom
|
||||
// fetch() with select() query.
|
||||
} else if (model.relatedData) {
|
||||
// We are dealing with belongsTo relation query. Override the default
|
||||
// 'relation_table.*' with public select columns.
|
||||
|
||||
// We override the actual value in the query because doing select()
|
||||
// does not override or replace the previous value during testing.
|
||||
let relatedColums = options.query._statements[0].value
|
||||
|
||||
// During some Model.relatedDAta() queries, the select statement
|
||||
// is actually hidden in the third statement so we grab that instead
|
||||
if (options.query._statements[0].grouping === 'where') {
|
||||
relatedColums = options.query._statements[2].value
|
||||
}
|
||||
|
||||
relatedColums.splice(...[0, relatedColums.length].concat(
|
||||
model.relatedData.target.publicFields.map(item => `${model.relatedData.targetTableName}.${item}`)
|
||||
))
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Event handler after a fetch() operation and finished.
|
||||
*
|
||||
* @param {Model} model - The model instance.
|
||||
* @param {Object} response - Knex query response.
|
||||
* @param {Object} options - Options for the fetched.
|
||||
*/
|
||||
checkFetched(model, response, options) {
|
||||
model._ctx = options.ctx
|
||||
},
|
||||
|
||||
/**
|
||||
* Event handler when fetchALL() is called. This gets called for both
|
||||
* when getAll() or just manual fetchAll().
|
||||
*
|
||||
* @param {CollectionBase} collection - The collection base for the model.
|
||||
* This does not contain a model
|
||||
* instance so privateFields is not
|
||||
* accessible here.
|
||||
* @param {Array} columns - Array of columns to select if fetchAll() was
|
||||
* used. Otherwise this is null.
|
||||
* @param {Object} options - Options for the fetch. Includes the query
|
||||
* builder object.
|
||||
*/
|
||||
checkFetchingCollection(collection, columns, options) {
|
||||
// I really really apologise for this.
|
||||
if (!options.query._statements[0] ||
|
||||
!options.query._statements[0].column ||
|
||||
!options.query._statements[0].column.indexOf ||
|
||||
options.query._statements[0].column.indexOf('is_deleted') === -1) {
|
||||
// First override that is_deleted always gets filtered out.
|
||||
|
||||
options.query.where(`${collection.tableName()}.is_deleted`, false)
|
||||
}
|
||||
|
||||
// If we have columns, we're dealing with a normal basic fetchAll() or
|
||||
// a getAll() caller.
|
||||
if (columns) {
|
||||
columns.splice(...[0, columns.length].concat(collection.model.publicFields))
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Event handler when fetchAll() has been called and fetched.
|
||||
*
|
||||
* @param {CollectionBase} collection - The collection that has been fetched.
|
||||
* @param {Array} columns - Array of columns to select if fetchAll() was
|
||||
* used. Otherwise this is null.
|
||||
* @param {Object} options - Options for the fetch.
|
||||
*/
|
||||
checkFetchedCollection(collection, columns, options) {
|
||||
collection.forEach(item => (item._ctx = options.ctx))
|
||||
},
|
||||
|
||||
/**
|
||||
* Event handler for hasMany relation fetching. This gets called whenever
|
||||
* hasMany related is being fetched.
|
||||
*
|
||||
* @param {CollectionBase} collection - The collection base for the model.
|
||||
* This does not contain a model
|
||||
* instance so privateFields is not
|
||||
* accessible here.
|
||||
* @param {Array} columns - Array of columns to select. This is
|
||||
* always null.
|
||||
* @param {Object} options - Options for the fetch. Includes the query
|
||||
* builder object.
|
||||
*/
|
||||
checkFetchingHasMany(collection, columns, options) {
|
||||
// First override that is_deleted always gets filtered out.
|
||||
options.query.where({ is_deleted: false })
|
||||
|
||||
// Then we override the actual value in the query because doing select()
|
||||
// does not override or replace the previous value during testing.
|
||||
let relatedColums
|
||||
if (options.query._statements[0].grouping === 'columns') {
|
||||
relatedColums = options.query._statements[0].value
|
||||
} else {
|
||||
relatedColums = options.query._statements[1].value
|
||||
}
|
||||
|
||||
relatedColums.splice(...[0, relatedColums.length]
|
||||
.concat(collection.model.publicFields.map(
|
||||
item => `${collection.relatedData.targetTableName}.${item}`
|
||||
))
|
||||
)
|
||||
|
||||
// check if pagination is being requested and we support it
|
||||
if (collection.relatedName
|
||||
&& options.ctx
|
||||
&& options.ctx.state.pagination
|
||||
&& options.ctx.state.pagination[collection.relatedName]) {
|
||||
let pagination = options.ctx.state.pagination[collection.relatedName]
|
||||
|
||||
options.query.limit(pagination.perPage).offset((pagination.page - 1) * pagination.perPage)
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Event handler for belongsTo relation fetching. This gets called whenever
|
||||
* belongsTo related is being fetched.
|
||||
*
|
||||
* @param {CollectionBase} collection - The collection base for the model.
|
||||
* This does not contain a model
|
||||
* instance so privateFields is not
|
||||
* accessible here.
|
||||
* @param {Array} columns - Array of columns to select. This is
|
||||
* always null.
|
||||
* @param {Object} options - Options for the fetch. Includes the query
|
||||
* builder object.
|
||||
*/
|
||||
checkFetchingBelongs(model, columns, options) {
|
||||
// First override that is_deleted always gets filtered out.
|
||||
options.query.where({ is_deleted: false })
|
||||
|
||||
// Then we override the actual value in the query because doing select()
|
||||
// does not override or replace the previous value during testing.
|
||||
|
||||
// The difference between belongsTo and hasMany is in belongsTo, the
|
||||
// actual 'table_name.*' value is in the second item in _statements as
|
||||
// opposed to the first.
|
||||
let relatedColums = options.query._statements[1].value
|
||||
|
||||
relatedColums.splice(...[0, relatedColums.length].concat(
|
||||
model.model.publicFields.map(item => `${model.relatedData.targetTableName}.${item}`)
|
||||
))
|
||||
|
||||
// check if pagination is being requested and we support it
|
||||
if (model.relatedName
|
||||
&& options.ctx
|
||||
&& options.ctx.state.pagination
|
||||
&& options.ctx.state.pagination[model.relatedName]) {
|
||||
let pagination = options.ctx.state.pagination[model.relatedName]
|
||||
|
||||
options.query.limit(pagination.perPage).offset((pagination.page - 1) * pagination.perPage)
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialize a new instance of model. This does not get called when
|
||||
* relations to this model is being fetched though.
|
||||
*/
|
||||
initialize() {
|
||||
this.on('fetching', this.checkFetching)
|
||||
this.on('fetched', this.checkFetched)
|
||||
this.on('fetching:collection', this.checkFetchingCollection)
|
||||
this.on('fetched:collection', this.checkFetchedCollection)
|
||||
},
|
||||
|
||||
/**
|
||||
* Define a hasMany relations with the model. This version as opposed to
|
||||
* the default hasMany has filtering enabled to filter is_deleted items
|
||||
* out among other things.
|
||||
*/
|
||||
hasManyFiltered(model, relatedName, foreignKey) {
|
||||
let out = this.hasMany(model, foreignKey)
|
||||
|
||||
// Hook to the fetching event on the relation
|
||||
out.on('fetching', this.checkFetchingHasMany)
|
||||
out.on('fetched', this.checkFetched)
|
||||
|
||||
// Add related name if specified to add pagination support
|
||||
out.relatedName = relatedName
|
||||
|
||||
return out
|
||||
},
|
||||
|
||||
/**
|
||||
* Define belongsToMany relations with the model. This version as opposed
|
||||
* to the default belongsToMany has filtering enabled to filter is_deleted items
|
||||
* out among other things.
|
||||
*/
|
||||
belongsToManyFiltered(model, table, foreignKey, otherKey, relatedName) {
|
||||
let out = this.belongsToMany(model, table, foreignKey, otherKey)
|
||||
|
||||
// Hook to the fetching event on the relation
|
||||
out.on('fetching', this.checkFetchingBelongs)
|
||||
out.on('fetched', this.checkFetched)
|
||||
|
||||
// Add related name if specified to add pagination support
|
||||
out.relatedName = relatedName
|
||||
|
||||
return out
|
||||
},
|
||||
})
|
||||
|
||||
// Create default options for all models
|
||||
let options = defaults(opts, {
|
||||
/**
|
||||
* Columns selected in get many queries and relation queries.
|
||||
*/
|
||||
publicFields: ['*'],
|
||||
|
||||
/**
|
||||
* Create new model object in database.
|
||||
*
|
||||
* @param {Object} data - The values the new model should have
|
||||
* @return {Model} The resulted model
|
||||
*/
|
||||
create(data) {
|
||||
return this.forge(data).save()
|
||||
},
|
||||
|
||||
/**
|
||||
* Apply basic filtering to query builder object. Basic filtering
|
||||
* applies stuff like custom filtering in the query and ordering and other stuff
|
||||
*
|
||||
* @param {Request} ctx - API Request object
|
||||
* @param {QueryBuilder} qb - knex query builder object to apply filtering on
|
||||
* @param {Object} [where={}] - Any additional filtering
|
||||
* @param {string} [orderBy=id] - property to order result by
|
||||
* @param {Object[]} [properties=[]] - Properties allowed to filter by from query
|
||||
*/
|
||||
_baseQueryAll(ctx, qb, where = {}, orderBy = 'id', properties = []) {
|
||||
let orderProperty = orderBy
|
||||
let sort = 'ASC'
|
||||
|
||||
if (orderProperty[0] === '-') {
|
||||
orderProperty = orderProperty.slice(1)
|
||||
sort = 'DESC'
|
||||
}
|
||||
|
||||
qb.where(where)
|
||||
_.forOwn(ctx.state.filter.where(properties), (value, key) => {
|
||||
if (key.startsWith('is_')) {
|
||||
qb.where(key, value === '0' ? false : true)
|
||||
} else {
|
||||
qb.where(key, 'LIKE', `%${value}%`)
|
||||
}
|
||||
})
|
||||
_.forOwn(ctx.state.filter.whereNot(properties), (value, key) => {
|
||||
if (key.startsWith('is_')) {
|
||||
qb.whereNot(key, value === '0' ? false : true)
|
||||
} else {
|
||||
qb.where(key, 'NOT LIKE', `%${value}%`)
|
||||
}
|
||||
})
|
||||
qb.orderBy(orderProperty, sort)
|
||||
},
|
||||
|
||||
/**
|
||||
* Wrapper for _baseQueryAll that can be overridden.
|
||||
*/
|
||||
baseQueryAll(ctx, qb, where, orderBy, properties) {
|
||||
return this._baseQueryAll(ctx, qb, where, orderBy, properties)
|
||||
},
|
||||
|
||||
getSingle(id, withRelated = [], require = true, ctx = null) {
|
||||
let where = { id: Number(id) || 0 }
|
||||
|
||||
return this.query({ where })
|
||||
.fetch({ require, withRelated, ctx })
|
||||
},
|
||||
|
||||
getAll(ctx, where = {}, withRelated = [], orderBy = 'id') {
|
||||
return this.query(qb => {
|
||||
this.baseQueryAll(ctx, qb, where, orderBy)
|
||||
})
|
||||
.fetchPage({
|
||||
pageSize: ctx.state.pagination.perPage,
|
||||
page: ctx.state.pagination.page,
|
||||
withRelated,
|
||||
ctx: ctx,
|
||||
})
|
||||
.then(result => {
|
||||
ctx.state.pagination.total = result.pagination.rowCount
|
||||
return result
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
return shelf.Model.extend(attributes, options)
|
||||
}
|
||||
|
||||
shelf.safeColumns = (extra) =>
|
||||
['id', 'is_deleted', 'created_at', 'updated_at'].concat(extra || [])
|
||||
|
||||
|
||||
export default shelf
|
|
@ -1,4 +1,4 @@
|
|||
import bookshelf from '../bookshelf.mjs'
|
||||
import { createPrototype, safeColumns } from '../knex.mjs'
|
||||
import config from '../config.mjs'
|
||||
|
||||
/*
|
||||
|
@ -20,26 +20,40 @@ File model:
|
|||
|
||||
*/
|
||||
|
||||
const File = bookshelf.createModel({
|
||||
tableName: 'files',
|
||||
const baseUrl = config.get('upload:baseurl')
|
||||
|
||||
virtuals: {
|
||||
url() {
|
||||
return `${File.baseUrl}${this.get('path')}`
|
||||
},
|
||||
function FileItem(data) {
|
||||
Object.assign(this, data)
|
||||
this.url = `${baseUrl}${this.path}`
|
||||
|
||||
magnet() {
|
||||
let meta = this.get('meta')
|
||||
if (!meta.torrent) return ''
|
||||
return 'magnet:?'
|
||||
+ 'xl=' + this.get('size')
|
||||
+ '&dn=' + encodeURIComponent(meta.torrent.name)
|
||||
+ '&xt=urn:btih:' + meta.torrent.hash
|
||||
+ meta.torrent.announce.map(item => ('&tr=' + encodeURIComponent(item))).join('')
|
||||
},
|
||||
},
|
||||
}, {
|
||||
baseUrl: config.get('upload:baseurl'),
|
||||
let meta = this.meta
|
||||
if (!meta.torrent) {
|
||||
this.magnet = ''
|
||||
} else {
|
||||
this.magnet = 'magnet:?'
|
||||
+ 'xl=' + this.size
|
||||
+ '&dn=' + encodeURIComponent(meta.torrent.name)
|
||||
+ '&xt=urn:btih:' + meta.torrent.hash
|
||||
+ meta.torrent.announce.map(item => ('&tr=' + encodeURIComponent(item))).join('')
|
||||
}
|
||||
}
|
||||
|
||||
function File() {
|
||||
this.tableName = 'files'
|
||||
this.Model = FileItem
|
||||
this.publicFields = this.privateFields = safeColumns([
|
||||
'article_id',
|
||||
'filename',
|
||||
'filetype',
|
||||
'path',
|
||||
'size',
|
||||
'staff_id',
|
||||
'meta',
|
||||
])
|
||||
this.init()
|
||||
}
|
||||
|
||||
File.prototype = createPrototype({
|
||||
})
|
||||
|
||||
export default File
|
||||
export default new File()
|
||||
|
|
|
@ -53,18 +53,8 @@ export default class FileRoutes {
|
|||
})
|
||||
}
|
||||
|
||||
async getAllFiles(ctx) {
|
||||
ctx.body = await this.File.getAll(ctx)
|
||||
}
|
||||
|
||||
async removeFile(ctx) {
|
||||
let file = await this.File.getSingle(ctx.params.id)
|
||||
|
||||
file.set({
|
||||
is_deleted: true,
|
||||
})
|
||||
|
||||
await file.save()
|
||||
await this.File.updateSingle(ctx, ctx.params.id, { is_deleted: true })
|
||||
|
||||
ctx.status = 200
|
||||
}
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
import _ from 'lodash'
|
||||
import jwt from 'jsonwebtoken'
|
||||
import koaJwt from 'koa-jwt'
|
||||
import Staff from './staff/model.mjs'
|
||||
import config from './config.mjs'
|
||||
|
||||
export default class Jwt {
|
||||
constructor(opts = {}) {
|
||||
Object.assign(this, {
|
||||
Staff: opts.Staff || Staff,
|
||||
jwt: opts.jwt || jwt,
|
||||
})
|
||||
}
|
||||
|
|
414
api/knex.mjs
Normal file
414
api/knex.mjs
Normal file
|
@ -0,0 +1,414 @@
|
|||
import _ from 'lodash'
|
||||
import knexCore from 'knex-core'
|
||||
|
||||
import config from './config.mjs'
|
||||
import defaults from './defaults.mjs'
|
||||
import log from './log.mjs'
|
||||
|
||||
const knex = knexCore(config.get('knex'))
|
||||
|
||||
const functionMap = new Map()
|
||||
let joinPostFix = 1
|
||||
|
||||
// Helper method to create models
|
||||
export function createPrototype(opts) {
|
||||
return defaults(opts, {
|
||||
knex: knex,
|
||||
|
||||
init() {
|
||||
if (!this.tableName) throw new Error('createModel was called with missing tableName')
|
||||
if (!this.Model) throw new Error('createModel was called with missing Model')
|
||||
|
||||
if (!this.includes) this.includes = {}
|
||||
if (!this.publicFields) throw new Error(this.tableName + ' was missing publicFields')
|
||||
if (!this.privateFields) throw new Error(this.tableName + ' was missing privateFields')
|
||||
|
||||
this.__includeFields = this.publicFields.map(x => x)
|
||||
|
||||
this.publicFields = this.publicFields.map(x => `${this.tableName}.${x} as ${this.tableName}.${x}`)
|
||||
if (this.publicFields !== this.privateFields) {
|
||||
this.privateFields = this.privateFields.map(x => `${this.tableName}.${x} as ${this.tableName}.${x}`)
|
||||
}
|
||||
},
|
||||
|
||||
addInclude(name, include) {
|
||||
this.includes[name] = include
|
||||
},
|
||||
|
||||
_includeBase(type, subq) {
|
||||
let self = this
|
||||
let postfix = '_' + joinPostFix++
|
||||
let table = this.tableName + postfix
|
||||
return {
|
||||
type: type,
|
||||
postfix: postfix,
|
||||
table: table,
|
||||
fields: this.__includeFields.map(x => `${table}.${x} as ${table}.${x}`),
|
||||
model: self,
|
||||
qb: function(qb) {
|
||||
return subq(self, table, qb)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
includeHasOne(source_id, target_id) {
|
||||
return this._includeBase(1, function(self, table, qb) {
|
||||
return qb.leftOuterJoin(`${self.tableName} as ${table}`, function() {
|
||||
this.on(source_id, '=', table + '.' + target_id)
|
||||
.andOn(table + '.is_deleted', '=', knex.raw('false'))
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
includeHasMany(source_id, target_id, subq = null) {
|
||||
return this._includeBase(2, function(self, table, qb) {
|
||||
return qb.leftOuterJoin(`${self.tableName} as ${table}`, function() {
|
||||
this.on(table + '.' + source_id, '=', target_id)
|
||||
.andOn(table + '.is_deleted', '=', knex.raw('false'))
|
||||
if (subq) {
|
||||
subq(this, self)
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
async getAllQuery(query, queryContext = null) {
|
||||
let context = (queryContext || query).queryContext()
|
||||
if (!context.tables) throw new Error('getAll was called before query')
|
||||
let tables = context.tables
|
||||
let tableMap = new Map(tables)
|
||||
|
||||
let data = await query
|
||||
|
||||
if (data.length === 0) {
|
||||
return data
|
||||
}
|
||||
|
||||
let keys = Object.keys(data[0])
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
let parts = keys[i].split('.')
|
||||
if (parts.length === 1) {
|
||||
if (parts[0] !== '__group') {
|
||||
tables[0][1].builder += `'${parts[0]}': data.${keys[i]},`
|
||||
}
|
||||
} else {
|
||||
let builder = tableMap.get(parts[0])
|
||||
if (builder) {
|
||||
builder.builder += `'${parts[1]}': data['${keys[i]}'],`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tableMap.forEach(table => {
|
||||
table.builder += '}'
|
||||
table.fn = functionMap.get(table.builder)
|
||||
if (!table.fn) {
|
||||
table.fn = new Function('data', table.builder)
|
||||
functionMap.set(table.builder, table.fn)
|
||||
}
|
||||
})
|
||||
|
||||
let out = []
|
||||
let includesTwoSet = new Set()
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let baseItem = null
|
||||
for (var t = 0; t < tables.length; t++) {
|
||||
let table = tables[t][1]
|
||||
let propertyName = table.include
|
||||
let formattedData = table.fn(data[i])
|
||||
|
||||
if (!formattedData) {
|
||||
if (propertyName && baseItem[propertyName] === undefined) {
|
||||
console.log('emptying')
|
||||
baseItem[propertyName] = (table.includeType.type === 1 ? null : [])
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
let row = new table.Model(table.fn(data[i]))
|
||||
let rowId = row.id
|
||||
if (table.isRoot && data[i].__group) {
|
||||
rowId = data[i].__group + '_' + row.id
|
||||
}
|
||||
|
||||
let foundItem = table.map.get(rowId)
|
||||
|
||||
// If we didn't find this item, current table moble or joined table model
|
||||
// is new, therefore we need to create it
|
||||
if (!foundItem) {
|
||||
// Create a reference to it if we're dealing with the root object
|
||||
if (table.isRoot) {
|
||||
baseItem = row
|
||||
}
|
||||
table.map.set(rowId, row)
|
||||
|
||||
if (table.isRoot) {
|
||||
// Add item to root array since this is a root array
|
||||
out.push(baseItem)
|
||||
} else if (table.includeType.type === 1) {
|
||||
// This is a single instance join for the root mode,
|
||||
// set it directly to the root
|
||||
baseItem[propertyName] = row
|
||||
} else if (table.includeType.type === 2) {
|
||||
// This is an array instance for the root model. Time to dig in.
|
||||
/* if (!baseItem[propertyName]) {
|
||||
baseItem[propertyName] = []
|
||||
} */
|
||||
if (!includesTwoSet.has(baseItem.id + '_' + propertyName + '_' + row.id)) {
|
||||
baseItem[propertyName].push(row)
|
||||
includesTwoSet.add(baseItem.id + '_' + propertyName + '_' + row.id)
|
||||
}
|
||||
}
|
||||
} else if (table.isRoot) {
|
||||
baseItem = foundItem
|
||||
} else if (propertyName) {
|
||||
if (table.includeType.type === 1 && !baseItem[propertyName]) {
|
||||
baseItem[propertyName] = foundItem
|
||||
} else if (table.includeType.type === 2 && !includesTwoSet.has(baseItem.id + '_' + propertyName + '_' + row.id)) {
|
||||
/* if (!baseItem[propertyName]) {
|
||||
baseItem[propertyName] = []
|
||||
} */
|
||||
baseItem[propertyName].push(foundItem)
|
||||
includesTwoSet.add(baseItem.id + '_' + propertyName + '_' + row.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
},
|
||||
|
||||
async getSingleQuery(query, require = true) {
|
||||
let data = await this.getAllQuery(query)
|
||||
if (data.length) return data[0]
|
||||
if (require) throw new Error('EmptyResponse')
|
||||
return null
|
||||
},
|
||||
|
||||
query(qb, includes = [], customFields = null, parent = null, pagination = null, paginationOrderBy = null) {
|
||||
let query
|
||||
let fields
|
||||
if (customFields === true) {
|
||||
fields = this.publicFields
|
||||
} else {
|
||||
fields = customFields ? customFields : this.publicFields
|
||||
}
|
||||
if (pagination) {
|
||||
query = knex.with(this.tableName, subq => {
|
||||
subq.select(this.tableName + '.*')
|
||||
.from(this.tableName)
|
||||
.where(this.tableName + '.is_deleted', '=', 'false')
|
||||
|
||||
qb(subq)
|
||||
subq.orderBy(pagination.orderProperty, pagination.sort)
|
||||
.limit(pagination.perPage)
|
||||
.offset((pagination.page - 1) * pagination.perPage)
|
||||
}).from(this.tableName)
|
||||
} else {
|
||||
query = knex(this.tableName).where(this.tableName + '.is_deleted', '=', 'false')
|
||||
qb(query)
|
||||
}
|
||||
let tables = parent && parent.queryContext().tables || []
|
||||
let tableMap = new Map(tables)
|
||||
if (!tables.length) {
|
||||
tables.push([this.tableName, {
|
||||
builder: 'return {',
|
||||
fn: null,
|
||||
map: new Map(),
|
||||
Model: this.Model,
|
||||
isRoot: true,
|
||||
include: null,
|
||||
includeType: {},
|
||||
}])
|
||||
}
|
||||
|
||||
query.select(fields)
|
||||
|
||||
for (let i = 0; i < includes.length; i++) {
|
||||
let includeType = this.includes[includes[i]]
|
||||
if (!includeType) {
|
||||
throw new Error(`Model ${this.tableName} was missing includes ${includes[i]}`)
|
||||
}
|
||||
includeType.qb(query).select(includeType.fields)
|
||||
|
||||
if (tableMap.has(includeType.table)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (includeType.type === 1) {
|
||||
tables[0][1].builder += `${includes[i]}: null,`
|
||||
} else {
|
||||
tables[0][1].builder += `${includes[i]}: [],`
|
||||
}
|
||||
let newTable = [
|
||||
includeType.table,
|
||||
{
|
||||
builder: `if (!data.id && !data['${includeType.table}.id']) {/*console.log('${includeType.table}', data.id, data['${includeType.table}.id']);*/return null;} return {`,
|
||||
fn: null,
|
||||
map: new Map(),
|
||||
isRoot: false,
|
||||
Model: includeType.model.Model,
|
||||
include: includes[i],
|
||||
includeType: includeType,
|
||||
}
|
||||
]
|
||||
tables.push(newTable)
|
||||
tableMap.set(newTable[0], newTable[1])
|
||||
}
|
||||
|
||||
if (pagination) {
|
||||
query.orderBy(pagination.orderProperty, pagination.sort)
|
||||
}
|
||||
|
||||
query.queryContext({ tables: tables })
|
||||
|
||||
return query
|
||||
},
|
||||
|
||||
async _getAll(ctx, subq, includes = [], orderBy = 'id') {
|
||||
let orderProperty = orderBy
|
||||
let sort = 'ASC'
|
||||
|
||||
if (orderProperty[0] === '-') {
|
||||
orderProperty = orderProperty.slice(1)
|
||||
sort = 'DESC'
|
||||
}
|
||||
|
||||
ctx.state.pagination.sort = sort
|
||||
ctx.state.pagination.orderProperty = orderProperty
|
||||
|
||||
let [data, total] = await Promise.all([
|
||||
this.getAllQuery(this.query(qb => {
|
||||
let qbnow = qb
|
||||
if (subq) {
|
||||
qbnow = subq(qb) || qb
|
||||
}
|
||||
return qbnow
|
||||
}, includes, null, null, ctx.state.pagination)),
|
||||
(() => {
|
||||
let qb = this.knex(this.tableName)
|
||||
if (subq) {
|
||||
qb = subq(qb) || qb
|
||||
}
|
||||
qb.where(this.tableName + '.is_deleted', '=', false)
|
||||
return qb.count('* as count')
|
||||
})(),
|
||||
])
|
||||
ctx.state.pagination.total = total[0].count
|
||||
return data
|
||||
},
|
||||
|
||||
getAll(ctx, subq, includes = [], orderBy = 'id') {
|
||||
return this._getAll(ctx, subq, includes, orderBy)
|
||||
},
|
||||
|
||||
_getSingle(subq, includes = [], require = true, ctx = null) {
|
||||
return this.getSingleQuery(this.query(qb => {
|
||||
return qb
|
||||
.where(qb => {
|
||||
if (subq) subq(qb)
|
||||
})
|
||||
}, includes), require)
|
||||
},
|
||||
|
||||
getSingle(id, includes = [], require = true, ctx = null) {
|
||||
return this._getSingle(qb => qb.where(this.tableName + '.id', '=', Number(id) || 0 ), includes, require, ctx)
|
||||
},
|
||||
|
||||
async updateSingle(ctx, id, body) {
|
||||
// Fetch the item in question, making sure it exists
|
||||
let item = await this.getSingle(id, [], true, ctx)
|
||||
|
||||
// Paranoia checking
|
||||
if (typeof(item.id) !== 'number') throw new Error('Item was missing id')
|
||||
|
||||
body.updated_at = new Date()
|
||||
|
||||
// Update our item in the database
|
||||
let out = await knex(this.tableName)
|
||||
.where({ id: item.id })
|
||||
// Map out the 'as' from the private fields so it returns a clean
|
||||
// response in the body
|
||||
.update(body, this.privateFields.map(x => x.split('as')[0]))
|
||||
|
||||
// More paranoia checking
|
||||
if (out.length < 1) throw new Error('Updated item returned empty result')
|
||||
|
||||
return out[0]
|
||||
},
|
||||
|
||||
/**
|
||||
* Create new entry in the database.
|
||||
*
|
||||
* @param {Object} data - The values the new item should have
|
||||
* @return {Object} The resulting object
|
||||
*/
|
||||
async create(body) {
|
||||
body.created_at = new Date()
|
||||
body.updated_at = new Date()
|
||||
let out = await knex(this.tableName)
|
||||
// Map out the 'as' from the private fields so it returns a clean
|
||||
// response in the body
|
||||
.insert(body, this.privateFields.map(x => x.split('as')[0]))
|
||||
|
||||
// More paranoia checking
|
||||
if (out.length < 1) throw new Error('Updated item returned empty result')
|
||||
|
||||
return out[0]
|
||||
},
|
||||
|
||||
/**
|
||||
* Apply basic filtering to query builder object. Basic filtering
|
||||
* applies stuff like custom filtering in the query and ordering and other stuff
|
||||
*
|
||||
* @param {Request} ctx - API Request object
|
||||
* @param {QueryBuilder} qb - knex query builder object to apply filtering on
|
||||
* @param {Object} [where={}] - Any additional filtering
|
||||
* @param {string} [orderBy=id] - property to order result by
|
||||
* @param {Object[]} [properties=[]] - Properties allowed to filter by from query
|
||||
*/
|
||||
_baseQueryAll(ctx, qb, where = {}, orderBy = 'id', properties = []) {
|
||||
let orderProperty = orderBy
|
||||
let sort = 'ASC'
|
||||
|
||||
if (orderProperty[0] === '-') {
|
||||
orderProperty = orderProperty.slice(1)
|
||||
sort = 'DESC'
|
||||
}
|
||||
|
||||
qb.where(where)
|
||||
_.forOwn(ctx.state.filter.where(properties), (value, key) => {
|
||||
if (key.startsWith('is_')) {
|
||||
qb.where(key, value === '0' ? false : true)
|
||||
} else {
|
||||
qb.where(key, 'LIKE', `%${value}%`)
|
||||
}
|
||||
})
|
||||
_.forOwn(ctx.state.filter.whereNot(properties), (value, key) => {
|
||||
if (key.startsWith('is_')) {
|
||||
qb.whereNot(key, value === '0' ? false : true)
|
||||
} else {
|
||||
qb.where(key, 'NOT LIKE', `%${value}%`)
|
||||
}
|
||||
})
|
||||
qb.orderBy(orderProperty, sort)
|
||||
},
|
||||
|
||||
/*async getSingle(id, require = true, ctx = null) {
|
||||
let where = { id: Number(id) || 0 }
|
||||
|
||||
let data = await knex(this.tableName).where(where).first(this.publicFields)
|
||||
|
||||
if (!data && require) throw new Error('EmptyResponse')
|
||||
|
||||
return data
|
||||
},*/
|
||||
})
|
||||
}
|
||||
|
||||
export function safeColumns(extra) {
|
||||
return ['id', /*'is_deleted',*/ 'created_at', 'updated_at'].concat(extra || [])
|
||||
}
|
||||
/*shelf.safeColumns = (extra) =>
|
||||
['id', 'is_deleted', 'created_at', 'updated_at'].concat(extra || [])*/
|
|
@ -1,5 +1,5 @@
|
|||
import path from 'path'
|
||||
import bookshelf from '../bookshelf.mjs'
|
||||
import { createPrototype, safeColumns } from '../knex.mjs'
|
||||
import config from '../config.mjs'
|
||||
|
||||
/*
|
||||
|
@ -23,6 +23,55 @@ Media model:
|
|||
|
||||
*/
|
||||
|
||||
const baseUrl = config.get('upload:baseurl')
|
||||
|
||||
function MediaItem(data) {
|
||||
Object.assign(this, data)
|
||||
|
||||
this.small_url = `${baseUrl}${this.small_image}`
|
||||
this.medium_url = `${baseUrl}${this.medium_image}`
|
||||
this.large_url = `${baseUrl}${this.large_image}`
|
||||
this.small_url_avif = this.small_image_avif ? `${baseUrl}${this.small_image_avif}` : null
|
||||
this.medium_url_avif = this.small_image_avif ? `${baseUrl}${this.medium_image_avif}` : null
|
||||
this.large_url_avif = this.small_image_avif ? `${baseUrl}${this.large_image_avif}` : null
|
||||
this.link = `${baseUrl}${this.org_image}`
|
||||
}
|
||||
|
||||
function Media() {
|
||||
this.tableName = 'media'
|
||||
this.Model = MediaItem
|
||||
this.publicFields = this.privateFields = safeColumns([
|
||||
'filename',
|
||||
'filetype',
|
||||
'small_image',
|
||||
'medium_image',
|
||||
'large_image',
|
||||
'org_image',
|
||||
'size',
|
||||
'staff_id',
|
||||
'small_image_avif',
|
||||
'medium_image_avif',
|
||||
'large_image_avif',
|
||||
])
|
||||
this.init()
|
||||
}
|
||||
|
||||
Media.prototype = createPrototype({
|
||||
baseUrl: baseUrl,
|
||||
|
||||
getSubUrl(input, size, type = 'jpg') {
|
||||
if (!input) return input
|
||||
|
||||
let output = input
|
||||
if (path.extname(input)) {
|
||||
let ext = path.extname(input).toLowerCase()
|
||||
output = input.slice(0, -ext.length)
|
||||
}
|
||||
return `${output}.${size}.${type}`
|
||||
},
|
||||
})
|
||||
|
||||
/*
|
||||
const Media = bookshelf.createModel({
|
||||
tableName: 'media',
|
||||
|
||||
|
@ -79,6 +128,6 @@ const Media = bookshelf.createModel({
|
|||
}
|
||||
return `${output}.${size}.${type}`
|
||||
},
|
||||
})
|
||||
})*/
|
||||
|
||||
export default Media
|
||||
export default new Media()
|
||||
|
|
|
@ -63,13 +63,7 @@ export default class MediaRoutes {
|
|||
}
|
||||
|
||||
async removeMedia(ctx) {
|
||||
let media = await this.Media.getSingle(ctx.params.id)
|
||||
|
||||
media.set({
|
||||
is_deleted: true,
|
||||
})
|
||||
|
||||
await media.save()
|
||||
await this.Media.updateSingle(ctx, ctx.params.id, { is_deleted: true })
|
||||
|
||||
ctx.status = 200
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
|
||||
import bookshelf from '../bookshelf.mjs'
|
||||
import { createPrototype, safeColumns } from '../knex.mjs'
|
||||
import Media from '../media/model.mjs'
|
||||
import Staff from '../staff/model.mjs'
|
||||
import Article from '../article/model.mjs'
|
||||
// import Staff from '../staff/model.mjs'
|
||||
// import Article from '../article/model.mjs'
|
||||
|
||||
/*
|
||||
|
||||
|
@ -25,10 +25,36 @@ Page model:
|
|||
|
||||
*/
|
||||
|
||||
const Page = bookshelf.createModel({
|
||||
tableName: 'pages',
|
||||
function PageItem(data) {
|
||||
Object.assign(this, data)
|
||||
this.children = []
|
||||
}
|
||||
|
||||
banner() {
|
||||
function Page() {
|
||||
this.tableName = 'pages'
|
||||
this.Model = PageItem
|
||||
this.includes = {
|
||||
media: Media.includeHasOne('pages.media_id', 'id'),
|
||||
banner: Media.includeHasOne('pages.banner_id', 'id'),
|
||||
}
|
||||
this.publicFields = this.privateFields = safeColumns([
|
||||
'staff_id',
|
||||
'parent_id',
|
||||
'name',
|
||||
'path',
|
||||
'description',
|
||||
'banner_id',
|
||||
'media_id',
|
||||
])
|
||||
this.init()
|
||||
}
|
||||
|
||||
Page.prototype = createPrototype({
|
||||
/* includes: {
|
||||
staff: Staff.includeHasOne('staff_id', 'id'),
|
||||
}, */
|
||||
|
||||
/*banner() {
|
||||
return this.belongsTo(Media, 'banner_id')
|
||||
},
|
||||
|
||||
|
@ -56,22 +82,44 @@ const Page = bookshelf.createModel({
|
|||
|
||||
staff() {
|
||||
return this.belongsTo(Staff, 'staff_id')
|
||||
},
|
||||
}, {
|
||||
getSingle(id, withRelated = [], require = true, ctx = null) {
|
||||
return this.query(qb => {
|
||||
qb.where({ id: Number(id) || 0 })
|
||||
.orWhere({ path: id })
|
||||
},*/
|
||||
|
||||
getSingle(id, includes = [], require = true, ctx = null) {
|
||||
return this._getSingle(qb => {
|
||||
qb.where(subq => {
|
||||
subq.where(this.tableName + '.id', '=', Number(id) || 0)
|
||||
.orWhere(this.tableName + '.path', '=', id)
|
||||
})
|
||||
.fetch({ require, withRelated, ctx })
|
||||
}, includes, require, ctx)
|
||||
},
|
||||
getTree() {
|
||||
return this.query(qb => {
|
||||
qb.where({ parent_id: null })
|
||||
qb.select(['id', 'name', 'path'])
|
||||
qb.orderBy('name', 'ASC')
|
||||
}).fetchAll({ withRelated: ['children'] })
|
||||
|
||||
async getTree() {
|
||||
let items = await this.getAllQuery(this.query(
|
||||
qb => qb.orderBy('name', 'ASC'),
|
||||
[],
|
||||
['parent_id', 'id', 'name', 'path']
|
||||
))
|
||||
|
||||
let out = []
|
||||
let map = new Map()
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (!items[i].parent_id) {
|
||||
out.push(items[i])
|
||||
}
|
||||
map.set(items[i].id, items[i])
|
||||
}
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (items[i].parent_id && map.has(items[i].parent_id)) {
|
||||
map.get(items[i].parent_id).children.push(items[i])
|
||||
}
|
||||
}
|
||||
return out
|
||||
},
|
||||
})
|
||||
|
||||
export default Page
|
||||
const pageInstance = new Page()
|
||||
|
||||
pageInstance.addInclude('children', pageInstance.includeHasMany('parent_id', 'pages.id'))
|
||||
pageInstance.addInclude('parent', pageInstance.includeHasOne('pages.parent_id', 'id'))
|
||||
|
||||
export default pageInstance
|
||||
|
|
|
@ -9,16 +9,16 @@ export default class PageRoutes {
|
|||
})
|
||||
}
|
||||
|
||||
/** GET: /api/pagetree */
|
||||
async getPageTree(ctx) {
|
||||
ctx.body = await this.Page.getTree()
|
||||
}
|
||||
|
||||
/** GET: /api/pages */
|
||||
async getAllPages(ctx) {
|
||||
await this.security.ensureIncludes(ctx)
|
||||
|
||||
let filter = {}
|
||||
if (ctx.query.tree && ctx.query.tree === 'true') {
|
||||
filter.parent_id = null
|
||||
}
|
||||
|
||||
ctx.body = await this.Page.getAll(ctx, filter, ctx.state.filter.includes, 'name')
|
||||
ctx.body = await this.Page.getAll(ctx, null, ctx.state.filter.includes, 'name')
|
||||
}
|
||||
|
||||
/** GET: /api/pages/:id */
|
||||
|
@ -39,22 +39,14 @@ export default class PageRoutes {
|
|||
async updatePage(ctx) {
|
||||
await this.security.validUpdate(ctx)
|
||||
|
||||
let page = await this.Page.getSingle(ctx.params.id)
|
||||
|
||||
page.set(ctx.request.body)
|
||||
|
||||
await page.save()
|
||||
let page = await this.Page.updateSingle(ctx, ctx.params.id, ctx.request.body)
|
||||
|
||||
ctx.body = page
|
||||
}
|
||||
|
||||
/** DELETE: /api/pages/:id */
|
||||
async removePage(ctx) {
|
||||
let page = await this.Page.getSingle(ctx.params.id)
|
||||
|
||||
page.set({ is_deleted: true })
|
||||
|
||||
await page.save()
|
||||
await this.Page.updateSingle(ctx, ctx.params.id, { is_deleted: true })
|
||||
|
||||
ctx.status = 204
|
||||
}
|
||||
|
|
|
@ -2,13 +2,14 @@
|
|||
import Router from 'koa-router'
|
||||
|
||||
import access from './access/index.mjs'
|
||||
import { restrict } from './access/middleware.mjs'
|
||||
|
||||
import AuthRoutes from './authentication/routes.mjs'
|
||||
import MediaRoutes from './media/routes.mjs'
|
||||
import FileRoutes from './file/routes.mjs'
|
||||
// import MediaRoutes from './media/routes.mjs'
|
||||
// import FileRoutes from './file/routes.mjs'
|
||||
import PageRoutes from './page/routes.mjs'
|
||||
import ArticleRoutes from './article/routes.mjs'
|
||||
import StaffRoutes from './staff/routes.mjs'
|
||||
import { restrict } from './access/middleware.mjs'
|
||||
|
||||
const router = new Router()
|
||||
|
||||
|
@ -17,18 +18,19 @@ const authentication = new AuthRoutes()
|
|||
router.post('/api/login/user', authentication.loginUser.bind(authentication))
|
||||
|
||||
// API Media
|
||||
const media = new MediaRoutes()
|
||||
router.get('/api/media', restrict(access.Manager), media.getAllMedia.bind(media))
|
||||
router.post('/api/media', restrict(access.Manager), media.upload.bind(media))
|
||||
router.del('/api/media/:id', restrict(access.Manager), media.removeMedia.bind(media))
|
||||
// const media = new MediaRoutes()
|
||||
// router.get('/api/media', restrict(access.Manager), media.getAllMedia.bind(media))
|
||||
// router.post('/api/media', restrict(access.Manager), media.upload.bind(media))
|
||||
// router.del('/api/media/:id', restrict(access.Manager), media.removeMedia.bind(media))
|
||||
|
||||
// API File
|
||||
const file = new FileRoutes()
|
||||
router.get('/api/file', restrict(access.Manager), file.getAllFiles.bind(file))
|
||||
router.post('/api/articles/:articleId/file', restrict(access.Manager), file.upload.bind(file))
|
||||
router.del('/api/file/:id', restrict(access.Manager), file.removeFile.bind(file))
|
||||
// const file = new FileRoutes()
|
||||
// router.get('/api/file', restrict(access.Manager), file.getAllFiles.bind(file))
|
||||
// router.post('/api/articles/:articleId/file', restrict(access.Manager), file.upload.bind(file))
|
||||
// router.del('/api/file/:id', restrict(access.Manager), file.removeFile.bind(file))
|
||||
|
||||
const page = new PageRoutes()
|
||||
router.get('/api/pagetree', page.getPageTree.bind(page))
|
||||
router.get('/api/pages', page.getAllPages.bind(page))
|
||||
router.get('/api/pages/:id', page.getSinglePage.bind(page))
|
||||
router.post('/api/pages', restrict(access.Manager), page.createPage.bind(page))
|
||||
|
@ -40,14 +42,13 @@ router.get('/api/articles', restrict(access.Manager), article.getAllArticles.bin
|
|||
router.get('/api/articles/public', article.getPublicAllArticles.bind(article))
|
||||
router.get('/api/articles/public/:id', article.getPublicSingleArticle.bind(article))
|
||||
router.get('/api/pages/:pageId/articles/public', article.getPublicAllPageArticles.bind(article))
|
||||
router.get('/api/pages/:pageId/articles', restrict(access.Manager), article.getAllPageArticles.bind(article))
|
||||
router.get('/api/articles/:id', restrict(access.Manager), article.getSingleArticle.bind(article))
|
||||
router.post('/api/articles', restrict(access.Manager), article.createArticle.bind(article))
|
||||
router.put('/api/articles/:id', restrict(access.Manager), article.updateArticle.bind(article))
|
||||
router.del('/api/articles/:id', restrict(access.Manager), article.removeArticle.bind(article))
|
||||
|
||||
const staff = new StaffRoutes()
|
||||
router.get('/api/staff', restrict(access.Admin), staff.getAllStaff.bind(staff))
|
||||
router.get('/api/staff', restrict(access.Manager), staff.getAllStaff.bind(staff))
|
||||
router.get('/api/staff/:id', restrict(access.Admin), staff.getSingleStaff.bind(staff))
|
||||
router.post('/api/staff', restrict(access.Admin), staff.createStaff.bind(staff))
|
||||
router.put('/api/staff/:id', restrict(access.Admin), staff.updateStaff.bind(staff))
|
||||
|
|
|
@ -4,6 +4,7 @@ import striptags from 'striptags'
|
|||
|
||||
import config from './config.mjs'
|
||||
import Page from './page/model.mjs'
|
||||
// import Article from '../app/article/model.mjs'
|
||||
import Article from './article/model.mjs'
|
||||
|
||||
const body = readFileSync('./public/index.html').toString()
|
||||
|
@ -103,6 +104,7 @@ function mapPage(x) {
|
|||
export async function serveIndex(ctx, path) {
|
||||
let tree = null
|
||||
let data = null
|
||||
let subdata = null
|
||||
let links = null
|
||||
let featured = null
|
||||
let url = frontend + ctx.request.url
|
||||
|
@ -111,29 +113,26 @@ export async function serveIndex(ctx, path) {
|
|||
let title = 'NFP Moe - Anime/Manga translation group'
|
||||
let description = 'Small fansubbing and scanlation group translating and encoding our favourite shows from Japan.'
|
||||
try {
|
||||
tree = (await Page.getTree()).toJSON()
|
||||
tree.forEach(item => (
|
||||
item.children = item.children.map(x => (
|
||||
{ id: x.id, name: x.name, path: x.path }
|
||||
))
|
||||
))
|
||||
featured = await Article.getFeatured(['media', 'banner'])
|
||||
if (featured) {
|
||||
featured = mapArticle(true, featured.toJSON(), true, false)
|
||||
}
|
||||
tree = await Page.getTree()
|
||||
let currPage = Number(ctx.query.page || '1')
|
||||
|
||||
if (path === '/') {
|
||||
let currPage = Number(ctx.query.page || '1')
|
||||
data = await Article.getFrontpageArticles(currPage)
|
||||
let frontpage = await Article.getFrontpageArticles(currPage)
|
||||
featured = frontpage.featured
|
||||
data = frontpage.items.map(mapArticle.bind(null, true))
|
||||
|
||||
if (data.pagination.rowCount > 10) {
|
||||
if (frontpage.total > currPage * 10) {
|
||||
links = {
|
||||
first: currPage > 1 ? { page: 1, title: 'First' } : null,
|
||||
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
|
||||
current: { title: 'Page ' + currPage },
|
||||
next: { page: 2, title: 'Next' },
|
||||
last: { page: Math.ceil(data.pagination.rowCount / 10), title: 'Last' },
|
||||
next: { page: currPage + 1, title: 'Next' },
|
||||
last: { page: Math.ceil(frontpage.total / 10), title: 'Last' },
|
||||
}
|
||||
} else {
|
||||
links = {
|
||||
first: currPage > 1 ? { page: 1, title: 'First' } : null,
|
||||
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
|
||||
current: { title: 'Page 1' },
|
||||
}
|
||||
}
|
||||
|
@ -141,41 +140,64 @@ export async function serveIndex(ctx, path) {
|
|||
links.previous = { page: currPage - 1, title: 'Previous' }
|
||||
links.first = { page: 1, title: 'First' }
|
||||
}
|
||||
data = data.toJSON().map(mapArticle.bind(null, true))
|
||||
} else if (path.startsWith('/article/') || path.startsWith('/page/')) {
|
||||
let id = path.split('/')[2]
|
||||
if (id) {
|
||||
let found
|
||||
if (path.startsWith('/article/')) {
|
||||
found = await Article.getSingle(id, ['media', 'parent', 'banner', 'files', 'staff'], false, null, true)
|
||||
if (found) {
|
||||
found = mapArticle(false, found.toJSON())
|
||||
data = await Article.getSingle(id, ['media', 'parent', 'banner', 'files', 'staff'], false, null, true)
|
||||
if (data) {
|
||||
data = mapArticle(false, data)
|
||||
}
|
||||
data = found
|
||||
} else {
|
||||
found = await Page.getSingle(id, ['media', 'banner', 'children', 'parent'])
|
||||
found = mapPage(found.toJSON())
|
||||
data = found
|
||||
}
|
||||
if (found) {
|
||||
if (found.media) {
|
||||
image = found.media.large_url
|
||||
image_avif = found.media.large_url_avifl
|
||||
} else if (found.banner) {
|
||||
image = found.banner.large_url
|
||||
image_avif = found.banner.large_url_avifl
|
||||
data = await Page.getSingle(id, ['media', 'banner', 'children', 'parent'])
|
||||
data = mapPage(data)
|
||||
ctx.state.pagination = {
|
||||
perPage: 10,
|
||||
page: currPage,
|
||||
}
|
||||
if (found.description) {
|
||||
description = striptags(found.description)
|
||||
}
|
||||
if (found.parent) {
|
||||
title = found.name + ' - ' + found.parent.name + ' - NFP Moe'
|
||||
subdata = await Article.getAllFromPage(ctx, data.id, ['files', 'media'], '-published_at', true)
|
||||
subdata = subdata.map(mapArticle.bind(null, true))
|
||||
if (ctx.state.pagination.total > currPage * 10) {
|
||||
links = {
|
||||
first: currPage > 1 ? { page: 1, title: 'First' } : null,
|
||||
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
|
||||
current: { title: 'Page ' + currPage },
|
||||
next: { page: currPage + 1, title: 'Next' },
|
||||
last: { page: Math.ceil(ctx.state.pagination.total / 10), title: 'Last' },
|
||||
}
|
||||
} else {
|
||||
title = found.name + ' - NFP Moe'
|
||||
links = {
|
||||
first: currPage > 1 ? { page: 1, title: 'First' } : null,
|
||||
previous: currPage > 1 ? { page: currPage - 1, title: 'Previous' } : null,
|
||||
current: { title: 'Page 1' },
|
||||
}
|
||||
}
|
||||
}
|
||||
if (data) {
|
||||
if (data.media) {
|
||||
image = data.media.large_url
|
||||
image_avif = data.media.large_url_avifl
|
||||
} else if (data.banner) {
|
||||
image = data.banner.large_url
|
||||
image_avif = data.banner.large_url_avifl
|
||||
}
|
||||
if (data.description) {
|
||||
description = striptags(data.description)
|
||||
}
|
||||
if (data.parent) {
|
||||
title = data.name + ' - ' + data.parent.name + ' - NFP Moe'
|
||||
} else {
|
||||
title = data.name + ' - NFP Moe'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!featured) {
|
||||
featured = await Article.getFeaturedArticle(['media', 'banner'])
|
||||
}
|
||||
if (featured) {
|
||||
featured = mapArticle(true, featured, true, false)
|
||||
}
|
||||
} catch (e) {
|
||||
ctx.log.error(e)
|
||||
data = null
|
||||
|
@ -185,6 +207,7 @@ export async function serveIndex(ctx, path) {
|
|||
v: config.get('CIRCLECI_VERSION'),
|
||||
tree: JSON.stringify(tree),
|
||||
data: JSON.stringify(data),
|
||||
subdata: JSON.stringify(subdata),
|
||||
links: JSON.stringify(links),
|
||||
featured: JSON.stringify(featured),
|
||||
url: url,
|
||||
|
|
|
@ -2,15 +2,15 @@ import Koa from 'koa-lite'
|
|||
import bodyParser from 'koa-bodyparser'
|
||||
import cors from '@koa/cors'
|
||||
|
||||
import config from './api/config.mjs'
|
||||
import router from './api/router.mjs'
|
||||
import Jwt from './api/jwt.mjs'
|
||||
import log from './api/log.mjs'
|
||||
import { serve } from './api/serve.mjs'
|
||||
import { mask } from './api/middlewares/mask.mjs'
|
||||
import { errorHandler } from './api/error/middleware.mjs'
|
||||
import { accessChecks } from './api/access/middleware.mjs'
|
||||
import ParserMiddleware from './api/parser/middleware.mjs'
|
||||
import config from './config.mjs'
|
||||
import router from './router.mjs'
|
||||
import Jwt from './jwt.mjs'
|
||||
import log from './log.mjs'
|
||||
import { serve } from './serve.mjs'
|
||||
import { mask } from './middlewares/mask.mjs'
|
||||
import { errorHandler } from './error/middleware.mjs'
|
||||
import { accessChecks } from './access/middleware.mjs'
|
||||
import ParserMiddleware from './parser/middleware.mjs'
|
||||
|
||||
const app = new Koa()
|
||||
const parser = new ParserMiddleware()
|
|
@ -1,6 +1,6 @@
|
|||
import bookshelf from '../bookshelf.mjs'
|
||||
import { createPrototype, safeColumns } from '../knex.mjs'
|
||||
import bcrypt from 'bcrypt'
|
||||
import config from '../config.mjs'
|
||||
/*import config from '../config.mjs'*/
|
||||
|
||||
/* Staff model:
|
||||
{
|
||||
|
@ -16,18 +16,19 @@ import config from '../config.mjs'
|
|||
|
||||
*/
|
||||
|
||||
const Staff = bookshelf.createModel({
|
||||
tableName: 'staff',
|
||||
function StaffItem(data) {
|
||||
Object.assign(this, data)
|
||||
}
|
||||
|
||||
privateFields: bookshelf.safeColumns([
|
||||
'fullname',
|
||||
'email',
|
||||
'level',
|
||||
]),
|
||||
}, {
|
||||
// Hide password from any relations and include requests.
|
||||
publicFields: ['id', 'fullname'],
|
||||
function Staff() {
|
||||
this.tableName = 'staff'
|
||||
this.Model = StaffItem
|
||||
this.privateFields = safeColumns(['fullname','email','level',])
|
||||
this.publicFields = ['id', 'fullname']
|
||||
this.init()
|
||||
}
|
||||
|
||||
Staff.prototype = createPrototype({
|
||||
hash(password) {
|
||||
return new Promise((resolve, reject) =>
|
||||
bcrypt.hash(password, config.get('bcrypt'), (err, hashed) => {
|
||||
|
@ -47,7 +48,16 @@ const Staff = bookshelf.createModel({
|
|||
)
|
||||
},
|
||||
|
||||
getAll(ctx, where = {}, withRelated = [], orderBy = 'id') {
|
||||
_getSingle(subq, includes = [], require = true, ctx = null) {
|
||||
return this.getSingleQuery(this.query(qb => {
|
||||
return qb
|
||||
.where(qb => {
|
||||
if (subq) subq(qb)
|
||||
})
|
||||
}, includes, this.privateFields), require)
|
||||
},
|
||||
|
||||
/* getAll(ctx, where = {}, withRelated = [], orderBy = 'id') {
|
||||
return this.query(qb => {
|
||||
this.baseQueryAll(ctx, qb, where, orderBy)
|
||||
qb.select(bookshelf.safeColumns([
|
||||
|
@ -66,7 +76,7 @@ const Staff = bookshelf.createModel({
|
|||
ctx.state.pagination.total = result.pagination.rowCount
|
||||
return result
|
||||
})
|
||||
},
|
||||
}, */
|
||||
})
|
||||
|
||||
export default Staff
|
||||
export default new Staff()
|
||||
|
|
|
@ -11,7 +11,7 @@ export default class StaffRoutes {
|
|||
|
||||
/** GET: /api/staff */
|
||||
async getAllStaff(ctx) {
|
||||
ctx.body = await this.Staff.getAll(ctx, { }, [])
|
||||
ctx.body = await this.Staff.getAll(ctx, null, [])
|
||||
}
|
||||
|
||||
/** GET: /api/staff/:id */
|
||||
|
@ -30,22 +30,14 @@ export default class StaffRoutes {
|
|||
async updateStaff(ctx) {
|
||||
await this.security.validUpdate(ctx)
|
||||
|
||||
let page = await this.Staff.getSingle(ctx.params.id)
|
||||
let staff = await this.Staff.updateSingle(ctx, ctx.params.id, ctx.request.body)
|
||||
|
||||
page.set(ctx.request.body)
|
||||
|
||||
await page.save()
|
||||
|
||||
ctx.body = page
|
||||
ctx.body = staff
|
||||
}
|
||||
|
||||
/** DELETE: /api/staff/:id */
|
||||
async removeStaff(ctx) {
|
||||
let page = await this.Staff.getSingle(ctx.params.id)
|
||||
|
||||
page.set({ is_deleted: true })
|
||||
|
||||
await page.save()
|
||||
await this.Staff.updateSingle(ctx, ctx.params.id, { is_deleted: true })
|
||||
|
||||
ctx.status = 204
|
||||
}
|
||||
|
|
|
@ -110,6 +110,8 @@ const EditPage = {
|
|||
this.error = 'Name is missing'
|
||||
} else if (!this.page.path) {
|
||||
this.error = 'Path is missing'
|
||||
} else {
|
||||
this.error = ''
|
||||
}
|
||||
if (this.error) return
|
||||
|
||||
|
@ -147,6 +149,7 @@ const EditPage = {
|
|||
res.media = vnode.state.page.media
|
||||
res.banner = vnode.state.page.banner
|
||||
vnode.state.page = res
|
||||
console.log(res)
|
||||
} else {
|
||||
m.route.set('/admin/pages/' + res.id)
|
||||
}
|
||||
|
|
|
@ -49,25 +49,6 @@ exports.getAllPageArticles = function(pageId, includes) {
|
|||
})
|
||||
}
|
||||
|
||||
exports.getAllPageArticlesPagination = function(pageId, options) {
|
||||
let extra = ''
|
||||
|
||||
if (options.sort) {
|
||||
extra += '&sort=' + options.sort
|
||||
}
|
||||
if (options.per_page) {
|
||||
extra += '&perPage=' + options.per_page
|
||||
}
|
||||
if (options.page) {
|
||||
extra += '&page=' + options.page
|
||||
}
|
||||
if (options.includes) {
|
||||
extra += '&includes=' + options.includes.join(',')
|
||||
}
|
||||
|
||||
return '/api/pages/' + pageId + '/articles?' + extra
|
||||
}
|
||||
|
||||
exports.getArticle = function(id) {
|
||||
return common.sendRequest({
|
||||
method: 'GET',
|
||||
|
|
|
@ -10,6 +10,9 @@ exports.sendRequest = function(options, isPagination) {
|
|||
}
|
||||
|
||||
options.extract = function(xhr) {
|
||||
if (xhr.responseText && xhr.responseText.slice(0, 9) === '<!doctype') {
|
||||
throw new Error('Expected JSON but got HTML (' + xhr.status + ': ' + this.url.split('?')[0] + ')')
|
||||
}
|
||||
let out = null
|
||||
if (pagination && xhr.status < 300) {
|
||||
let headers = {}
|
||||
|
|
|
@ -71,7 +71,7 @@ exports.getAllPages = function() {
|
|||
exports.getPage = function(id) {
|
||||
return common.sendRequest({
|
||||
method: 'GET',
|
||||
url: '/api/pages/' + id + '?includes=media,banner,children,news,news.media',
|
||||
url: '/api/pages/' + id + '?includes=media,banner',
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ exports.Tree = Tree
|
|||
exports.getTree = function() {
|
||||
return common.sendRequest({
|
||||
method: 'GET',
|
||||
url: '/api/pages?tree=true&includes=children&fields=id,name,path,children(id,name,path)',
|
||||
url: '/api/pagetree',
|
||||
})
|
||||
}
|
||||
|
||||
|
|
16
app/app.scss
16
app/app.scss
|
@ -82,6 +82,22 @@ main {
|
|||
padding-bottom: 20px;
|
||||
}
|
||||
|
||||
.error-wrapper {
|
||||
flex-grow: 2;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background: $border;
|
||||
padding: 40px 0;
|
||||
|
||||
.error {
|
||||
border: 2px dashed $secondary-dark-bg;
|
||||
padding: 10px 20px;
|
||||
font-size: 1em;
|
||||
}
|
||||
}
|
||||
|
||||
[hidden] { display: none !important; }
|
||||
|
||||
article {
|
||||
|
|
|
@ -19,6 +19,7 @@ const Article = {
|
|||
},
|
||||
|
||||
fetchArticle: function(vnode) {
|
||||
this.error = ''
|
||||
this.path = m.route.param('id')
|
||||
this.showcomments = false
|
||||
this.article = {
|
||||
|
@ -73,58 +74,65 @@ const Article = {
|
|||
return (
|
||||
this.loading ?
|
||||
m('article.article', m('div.loading-spinner'))
|
||||
: m('article.article', [
|
||||
this.article.parent ? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name)]) : null,
|
||||
m('header', m('h1', this.article.name)),
|
||||
m('.fr-view', [
|
||||
this.article.media
|
||||
? m('a.cover', {
|
||||
rel: 'noopener',
|
||||
href: this.article.media.link,
|
||||
}, m('img', { src: imagePath, alt: 'Cover image for ' + this.article.name }))
|
||||
: null,
|
||||
this.article.description ? m.trust(this.article.description) : null,
|
||||
(this.article.files && this.article.files.length
|
||||
? this.article.files.map(function(file) {
|
||||
return m(Fileinfo, { file: file })
|
||||
})
|
||||
: null),
|
||||
m('div.entrymeta', [
|
||||
'Posted ',
|
||||
(this.article.parent ? 'in' : ''),
|
||||
(this.article.parent ? m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name) : null),
|
||||
'at ' + (this.article.published_at.replace('T', ' ').split('.')[0]).substr(0, 16),
|
||||
' by ' + (this.article.staff && this.article.staff.fullname || 'Admin'),
|
||||
: this.error
|
||||
? m('div.error-wrapper', m('div.error', {
|
||||
onclick: function() {
|
||||
vnode.state.error = ''
|
||||
vnode.state.fetchArticle(vnode)
|
||||
},
|
||||
}, 'Article error: ' + this.error))
|
||||
: m('article.article', [
|
||||
this.article.parent ? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name)]) : null,
|
||||
m('header', m('h1', this.article.name)),
|
||||
m('.fr-view', [
|
||||
this.article.media
|
||||
? m('a.cover', {
|
||||
rel: 'noopener',
|
||||
href: this.article.media.link,
|
||||
}, m('img', { src: imagePath, alt: 'Cover image for ' + this.article.name }))
|
||||
: null,
|
||||
this.article.description ? m.trust(this.article.description) : null,
|
||||
(this.article.files && this.article.files.length
|
||||
? this.article.files.map(function(file) {
|
||||
return m(Fileinfo, { file: file })
|
||||
})
|
||||
: null),
|
||||
m('div.entrymeta', [
|
||||
'Posted ',
|
||||
(this.article.parent ? 'in' : ''),
|
||||
(this.article.parent ? m(m.route.Link, { href: '/page/' + this.article.parent.path }, this.article.parent.name) : null),
|
||||
'at ' + (this.article.published_at.replace('T', ' ').split('.')[0]).substr(0, 16),
|
||||
' by ' + (this.article.staff && this.article.staff.fullname || 'Admin'),
|
||||
]),
|
||||
]),
|
||||
]),
|
||||
Authentication.currentUser
|
||||
? m('div.admin-actions', [
|
||||
m('span', 'Admin controls:'),
|
||||
m(m.route.Link, { href: '/admin/articles/' + this.article.id }, 'Edit article'),
|
||||
])
|
||||
: null,
|
||||
this.showcomments
|
||||
? m('div.commentcontainer', [
|
||||
m('div#disqus_thread', { oncreate: function() {
|
||||
let fullhost = window.location.protocol + '//' + window.location.host
|
||||
/*eslint-disable */
|
||||
window.disqus_config = function () {
|
||||
this.page.url = fullhost + '/article/' + vnode.state.article.path
|
||||
this.page.identifier = 'article-' + vnode.state.article.id
|
||||
};
|
||||
(function() { // DON'T EDIT BELOW THIS LINE
|
||||
var d = document, s = d.createElement('script');
|
||||
s.src = 'https://nfp-moe.disqus.com/embed.js';
|
||||
s.setAttribute('data-timestamp', +new Date());
|
||||
(d.head || d.body).appendChild(s);
|
||||
})()
|
||||
/*eslint-enable */
|
||||
}}, m('div.loading-spinner')),
|
||||
Authentication.currentUser
|
||||
? m('div.admin-actions', [
|
||||
m('span', 'Admin controls:'),
|
||||
m(m.route.Link, { href: '/admin/articles/' + this.article.id }, 'Edit article'),
|
||||
])
|
||||
: m('button.opencomments', {
|
||||
onclick: function() { vnode.state.showcomments = true },
|
||||
}, 'Open comment discussion'),
|
||||
])
|
||||
: null,
|
||||
this.showcomments
|
||||
? m('div.commentcontainer', [
|
||||
m('div#disqus_thread', { oncreate: function() {
|
||||
let fullhost = window.location.protocol + '//' + window.location.host
|
||||
/*eslint-disable */
|
||||
window.disqus_config = function () {
|
||||
this.page.url = fullhost + '/article/' + vnode.state.article.path
|
||||
this.page.identifier = 'article-' + vnode.state.article.id
|
||||
};
|
||||
(function() { // DON'T EDIT BELOW THIS LINE
|
||||
var d = document, s = d.createElement('script');
|
||||
s.src = 'https://nfp-moe.disqus.com/embed.js';
|
||||
s.setAttribute('data-timestamp', +new Date());
|
||||
(d.head || d.body).appendChild(s);
|
||||
})()
|
||||
/*eslint-enable */
|
||||
}}, m('div.loading-spinner')),
|
||||
])
|
||||
: m('button.opencomments', {
|
||||
onclick: function() { vnode.state.showcomments = true },
|
||||
}, 'Open comment discussion'),
|
||||
])
|
||||
)
|
||||
},
|
||||
}
|
||||
|
|
|
@ -9,6 +9,12 @@ m.route.set = function(path, data, options){
|
|||
window.scrollTo(0, 0)
|
||||
}
|
||||
|
||||
/*console.log('tree', window.__nfptree)
|
||||
console.log('featured', window.__nfpfeatured)
|
||||
console.log('data', window.__nfpdata)
|
||||
console.log('subdata', window.__nfpsubdata)
|
||||
console.log('links', window.__nfplinks)*/
|
||||
|
||||
m.route.linkOrig = m.route.link
|
||||
m.route.link = function(vnode){
|
||||
m.route.linkOrig(vnode)
|
||||
|
|
|
@ -12,13 +12,15 @@ const Page = {
|
|||
this.lastpage = m.route.param('page') || '1'
|
||||
this.loadingnews = false
|
||||
|
||||
console.log(window.__nfpdata)
|
||||
if (window.__nfpdata) {
|
||||
this.path = m.route.param('id')
|
||||
this.page = window.__nfpdata
|
||||
this.news = []
|
||||
this.newslinks = null
|
||||
this.news = window.__nfpsubdata
|
||||
this.newslinks = window.__nfplinks
|
||||
|
||||
window.__nfpdata = null
|
||||
vnode.state.fetchArticles(vnode)
|
||||
window.__nfpsubdata = null
|
||||
} else {
|
||||
this.fetchPage(vnode)
|
||||
}
|
||||
|
@ -42,12 +44,12 @@ const Page = {
|
|||
.then(function(result) {
|
||||
vnode.state.page = result
|
||||
document.title = result.name + ' - NFP Moe'
|
||||
return vnode.state.fetchArticles(vnode)
|
||||
})
|
||||
.catch(function(err) {
|
||||
vnode.state.error = err.message
|
||||
})
|
||||
.then(function() {
|
||||
return vnode.state.fetchArticles(vnode)
|
||||
vnode.state.loading = vnode.state.loadingnews = false
|
||||
m.redraw()
|
||||
})
|
||||
},
|
||||
|
||||
|
@ -111,7 +113,14 @@ const Page = {
|
|||
return (
|
||||
this.loading ?
|
||||
m('article.page', m('div.loading-spinner'))
|
||||
: m('article.page', [
|
||||
: this.error
|
||||
? m('div.error-wrapper', m('div.error', {
|
||||
onclick: function() {
|
||||
vnode.state.error = ''
|
||||
vnode.state.fetchPage(vnode)
|
||||
},
|
||||
}, 'Article error: ' + this.error))
|
||||
: m('article.page', [
|
||||
bannerPath ? m('.div.page-banner', { style: { 'background-image': 'url("' + bannerPath + '")' } } ) : null,
|
||||
this.page.parent
|
||||
? m('div.goback', ['« ', m(m.route.Link, { href: '/page/' + this.page.parent.path }, this.page.parent.name)])
|
||||
|
|
|
@ -2,17 +2,19 @@ const Fileinfo = require('./fileinfo')
|
|||
|
||||
const Newsitem = {
|
||||
oninit: function(vnode) {
|
||||
this.srcsetJpeg = vnode.attrs.media.small_url + ' 500w, '
|
||||
+ vnode.attrs.media.medium_url + ' 800w '
|
||||
if (vnode.attrs.media.small_url_avif) {
|
||||
this.srcsetAvif = vnode.attrs.media.small_url_avif + ' 500w, '
|
||||
+ vnode.attrs.media.medium_url_avif + ' 800w '
|
||||
} else {
|
||||
this.srcsetAvif = null
|
||||
if (vnode.attrs.media) {
|
||||
this.srcsetJpeg = vnode.attrs.media.small_url + ' 500w, '
|
||||
+ vnode.attrs.media.medium_url + ' 800w '
|
||||
if (vnode.attrs.media.small_url_avif) {
|
||||
this.srcsetAvif = vnode.attrs.media.small_url_avif + ' 500w, '
|
||||
+ vnode.attrs.media.medium_url_avif + ' 800w '
|
||||
} else {
|
||||
this.srcsetAvif = null
|
||||
}
|
||||
this.coverSizes = '(max-width: 639px) calc(100vw - 40px), '
|
||||
+ '(max-width: 1000px) 300px, '
|
||||
+ '400px'
|
||||
}
|
||||
this.coverSizes = '(max-width: 639px) calc(100vw - 40px), '
|
||||
+ '(max-width: 1000px) 300px, '
|
||||
+ '400px'
|
||||
},
|
||||
|
||||
view: function(vnode) {
|
||||
|
|
|
@ -13,7 +13,7 @@ setup().catch(async (error) => {
|
|||
// process.exit(1)
|
||||
// })
|
||||
}).then(() =>
|
||||
import('./server.mjs')
|
||||
import('./api/server.mjs')
|
||||
).catch(error => {
|
||||
log.error(error, 'Unknown error starting server')
|
||||
})
|
||||
|
|
16
migrations/20210107081810_indexes.js
Normal file
16
migrations/20210107081810_indexes.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
/* eslint-disable */
|
||||
exports.up = function(knex) {
|
||||
return Promise.all([
|
||||
knex.schema.raw('create index pages_gettree_index on pages (name asc) where not is_deleted'),
|
||||
knex.schema.raw('create index pages_featuredpublish_index on articles (published_at desc) where is_featured = true and not is_deleted'),
|
||||
knex.schema.raw('create index pages_publish_index on articles (published_at desc) where is_deleted = false'),
|
||||
])
|
||||
};
|
||||
|
||||
exports.down = function(knex) {
|
||||
return Promise.all([
|
||||
knex.schema.table('pages', function(table) {
|
||||
table.dropIndex('pages_gettree_index')
|
||||
})
|
||||
])
|
||||
};
|
|
@ -17,6 +17,7 @@
|
|||
}
|
||||
},
|
||||
"scripts": {
|
||||
"knex:rollback": "node scripts/rollback.mjs | bunyan",
|
||||
"lint": "eslint .",
|
||||
"start": "node --experimental-modules index.mjs",
|
||||
"build": "sass -s compressed app/app.scss public/assets/app.css && sass -s compressed app/admin.scss public/assets/admin.css && browserify -p tinyify --no-commondir -o public/assets/app.js app/index.js && browserify -p tinyify --no-commondir -o public/assets/admin.js app/admin.js",
|
||||
|
|
BIN
public/assets/img/asuna_frontpage_test.avif
Normal file
BIN
public/assets/img/asuna_frontpage_test.avif
Normal file
Binary file not shown.
BIN
public/assets/img/asuna_frontpage_test.png
Normal file
BIN
public/assets/img/asuna_frontpage_test.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 285 KiB |
|
@ -29,6 +29,7 @@
|
|||
window.__nfptree = {{=it.tree}};
|
||||
window.__nfpfeatured = {{=it.featured}};
|
||||
window.__nfpdata = {{=it.data}};
|
||||
window.__nfpsubdata = {{=it.subdata}};
|
||||
window.__nfplinks = {{=it.links}};
|
||||
</script>
|
||||
<div class="maincontainer">
|
||||
|
|
37
scripts/rollback.mjs
Normal file
37
scripts/rollback.mjs
Normal file
|
@ -0,0 +1,37 @@
|
|||
import _ from 'lodash'
|
||||
|
||||
import config from '../api/config.mjs'
|
||||
import log from '../api/log.mjs'
|
||||
import knex from 'knex-core'
|
||||
|
||||
// This is important for setup to run cleanly.
|
||||
let knexConfig = _.cloneDeep(config.get('knex'))
|
||||
knexConfig.pool = { min: 1, max: 1 }
|
||||
|
||||
let knexSetup = knex(knexConfig)
|
||||
|
||||
export default function rollback() {
|
||||
log.info(knexConfig, 'Running database rollback.')
|
||||
|
||||
return knexSetup.migrate.rollback({
|
||||
directory: './migrations',
|
||||
})
|
||||
.then((result) => {
|
||||
if (result[1].length === 0) {
|
||||
return log.info('Database has been roll backed')
|
||||
}
|
||||
for (let i = 0; i < result[1].length; i++) {
|
||||
log.info('Rollbacked migration from', result[1][i].substr(result[1][i].lastIndexOf('\\') + 1))
|
||||
}
|
||||
return knexSetup.destroy()
|
||||
})
|
||||
}
|
||||
|
||||
rollback()
|
||||
.catch(async (error) => {
|
||||
log.error({ code: error.code, message: error.message }, 'Error while rollbacking database')
|
||||
log.error('Unable to verify database integrity.')
|
||||
process.exit(1)
|
||||
}).then(() =>
|
||||
process.exit(0)
|
||||
)
|
Loading…
Reference in a new issue