feat: tree model improvements + site uploads config

parent 714aa1eb
...@@ -43,6 +43,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => { ...@@ -43,6 +43,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => {
switch (req.params.resource) { switch (req.params.resource) {
case 'logo': { case 'logo': {
if (site.config.assets.logo) { if (site.config.assets.logo) {
// TODO: Fetch from db if not in disk cache
res.sendFile(path.join(siteAssetsPath, `logo-${site.id}.${site.config.assets.logoExt}`)) res.sendFile(path.join(siteAssetsPath, `logo-${site.id}.${site.config.assets.logoExt}`))
} else { } else {
res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg')) res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg'))
...@@ -51,6 +52,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => { ...@@ -51,6 +52,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => {
} }
case 'favicon': { case 'favicon': {
if (site.config.assets.favicon) { if (site.config.assets.favicon) {
// TODO: Fetch from db if not in disk cache
res.sendFile(path.join(siteAssetsPath, `favicon-${site.id}.${site.config.assets.faviconExt}`)) res.sendFile(path.join(siteAssetsPath, `favicon-${site.id}.${site.config.assets.faviconExt}`))
} else { } else {
res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg')) res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg'))
...@@ -59,6 +61,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => { ...@@ -59,6 +61,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => {
} }
case 'loginbg': { case 'loginbg': {
if (site.config.assets.loginBg) { if (site.config.assets.loginBg) {
// TODO: Fetch from db if not in disk cache
res.sendFile(path.join(siteAssetsPath, `loginbg-${site.id}.jpg`)) res.sendFile(path.join(siteAssetsPath, `loginbg-${site.id}.jpg`))
} else { } else {
res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/bg/login.jpg')) res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/bg/login.jpg'))
......
...@@ -45,20 +45,17 @@ exports.up = async knex => { ...@@ -45,20 +45,17 @@ exports.up = async knex => {
.createTable('assets', table => { .createTable('assets', table => {
table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()')) table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
table.string('filename').notNullable() table.string('filename').notNullable()
table.string('hash').notNullable().index()
table.string('ext').notNullable() table.string('ext').notNullable()
table.enum('kind', ['binary', 'image']).notNullable().defaultTo('binary') table.boolean('isSystem').notNullable().defaultTo(false)
table.enum('kind', ['document', 'image', 'other']).notNullable().defaultTo('other')
table.string('mime').notNullable().defaultTo('application/octet-stream') table.string('mime').notNullable().defaultTo('application/octet-stream')
table.integer('fileSize').unsigned().comment('In kilobytes') table.integer('fileSize').unsigned().comment('In kilobytes')
table.jsonb('metadata') table.jsonb('metadata').notNullable().defaultTo('{}')
table.timestamp('createdAt').notNullable().defaultTo(knex.fn.now()) table.timestamp('createdAt').notNullable().defaultTo(knex.fn.now())
table.timestamp('updatedAt').notNullable().defaultTo(knex.fn.now()) table.timestamp('updatedAt').notNullable().defaultTo(knex.fn.now())
})
// ASSET DATA --------------------------
.createTable('assetData', table => {
table.uuid('id').notNullable().primary()
table.binary('data').notNullable() table.binary('data').notNullable()
table.binary('preview') table.binary('preview')
table.enum('previewState', ['none', 'pending', 'ready', 'failed']).notNullable().defaultTo('none')
}) })
// AUTHENTICATION ---------------------- // AUTHENTICATION ----------------------
.createTable('authentication', table => { .createTable('authentication', table => {
...@@ -284,6 +281,7 @@ exports.up = async knex => { ...@@ -284,6 +281,7 @@ exports.up = async knex => {
table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()')) table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
table.specificType('folderPath', 'ltree').index().index('tree_folderpath_gist_index', { indexType: 'GIST' }) table.specificType('folderPath', 'ltree').index().index('tree_folderpath_gist_index', { indexType: 'GIST' })
table.string('fileName').notNullable().index() table.string('fileName').notNullable().index()
table.string('hash').notNullable().index()
table.enu('type', ['folder', 'page', 'asset']).notNullable().index() table.enu('type', ['folder', 'page', 'asset']).notNullable().index()
table.string('localeCode', 5).notNullable().defaultTo('en').index() table.string('localeCode', 5).notNullable().defaultTo('en').index()
table.string('title').notNullable() table.string('title').notNullable()
...@@ -588,6 +586,10 @@ exports.up = async knex => { ...@@ -588,6 +586,10 @@ exports.up = async knex => {
showPrintBtn: true, showPrintBtn: true,
baseFont: 'roboto', baseFont: 'roboto',
contentFont: 'roboto' contentFont: 'roboto'
},
uploads: {
conflictBehavior: 'overwrite',
normalizeFilename: true
} }
} }
}) })
......
const _ = require('lodash') const _ = require('lodash')
const sanitize = require('sanitize-filename') const sanitize = require('sanitize-filename')
const graphHelper = require('../../helpers/graph') const graphHelper = require('../../helpers/graph')
const assetHelper = require('../../helpers/asset')
const path = require('node:path') const path = require('node:path')
const fs = require('fs-extra') const fs = require('fs-extra')
const { v4: uuid } = require('uuid') const { v4: uuid } = require('uuid')
module.exports = { module.exports = {
Query: { Query: {
async assets(obj, args, context) { async assetById(obj, args, context) {
let cond = { return null
folderId: args.folderId === 0 ? null : args.folderId
}
if (args.kind !== 'ALL') {
cond.kind = args.kind.toLowerCase()
}
const folderHierarchy = await WIKI.db.assetFolders.getHierarchy(args.folderId)
const folderPath = folderHierarchy.map(h => h.slug).join('/')
const results = await WIKI.db.assets.query().where(cond)
return _.filter(results, r => {
const path = folderPath ? `${folderPath}/${r.filename}` : r.filename
return WIKI.auth.checkAccess(context.req.user, ['read:assets'], { path })
}).map(a => ({
...a,
kind: a.kind.toUpperCase()
}))
},
async assetsFolders(obj, args, context) {
const results = await WIKI.db.assetFolders.query().where({
parentId: args.parentFolderId === 0 ? null : args.parentFolderId
})
const parentHierarchy = await WIKI.db.assetFolders.getHierarchy(args.parentFolderId)
const parentPath = parentHierarchy.map(h => h.slug).join('/')
return _.filter(results, r => {
const path = parentPath ? `${parentPath}/${r.slug}` : r.slug
return WIKI.auth.checkAccess(context.req.user, ['read:assets'], { path })
})
} }
}, },
Mutation: { Mutation: {
/** /**
* Create New Asset Folder
*/
async createAssetsFolder(obj, args, context) {
try {
const folderSlug = sanitize(args.slug).toLowerCase()
const parentFolderId = args.parentFolderId === 0 ? null : args.parentFolderId
const result = await WIKI.db.assetFolders.query().where({
parentId: parentFolderId,
slug: folderSlug
}).first()
if (!result) {
await WIKI.db.assetFolders.query().insert({
slug: folderSlug,
name: folderSlug,
parentId: parentFolderId
})
return {
responseResult: graphHelper.generateSuccess('Asset Folder has been created successfully.')
}
} else {
throw new WIKI.Error.AssetFolderExists()
}
} catch (err) {
return graphHelper.generateError(err)
}
},
/**
* Rename an Asset * Rename an Asset
*/ */
async renameAsset(obj, args, context) { async renameAsset(obj, args, context) {
...@@ -113,7 +59,7 @@ module.exports = { ...@@ -113,7 +59,7 @@ module.exports = {
} }
// Update filename + hash // Update filename + hash
const fileHash = assetHelper.generateHash(assetTargetPath) const fileHash = '' // assetHelper.generateHash(assetTargetPath)
await WIKI.db.assets.query().patch({ await WIKI.db.assets.query().patch({
filename: filename, filename: filename,
hash: fileHash hash: fileHash
...@@ -189,41 +135,62 @@ module.exports = { ...@@ -189,41 +135,62 @@ module.exports = {
*/ */
async uploadAssets(obj, args, context) { async uploadAssets(obj, args, context) {
try { try {
// -> Get Folder
const folder = await WIKI.db.tree.query().findById(args.folderId)
if (!folder) {
throw new Error('ERR_INVALID_FOLDER_ID')
}
// -> Get Site
const site = await WIKI.db.sites.query().findById(folder.siteId)
if (!site) {
throw new Error('ERR_INVALID_SITE_ID')
}
const results = await Promise.allSettled(args.files.map(async fl => { const results = await Promise.allSettled(args.files.map(async fl => {
const { filename, mimetype, createReadStream } = await fl const { filename, mimetype, createReadStream } = await fl
WIKI.logger.debug(`Processing asset upload ${filename} of type ${mimetype}...`) WIKI.logger.debug(`Processing asset upload ${filename} of type ${mimetype}...`)
// Format filename
const formattedFilename = ''
if (!WIKI.extensions.ext.sharp.isInstalled) { // Save asset to DB
throw new Error('This feature requires the Sharp extension but it is not installed.') const asset = await WIKI.db.knex('assets').insert({
}
if (!['.png', '.jpg', 'webp', '.gif'].some(s => filename.endsWith(s))) { }).returning('id')
throw new Error('Invalid File Extension. Must be svg, png, jpg, webp or gif.')
} // Add to tree
const destFormat = mimetype.startsWith('image/svg') ? 'svg' : 'png' await WIKI.db.knex('tree').insert({
const destFolder = path.resolve( id: asset.id,
process.cwd(), folderPath: folder.folderPath ? `${folder.folderPath}.${folder.fileName}` : folder.fileName,
WIKI.config.dataPath, fileName: formattedFilename,
`assets` type: 'asset',
) localeCode: ''
const destPath = path.join(destFolder, `logo-${args.id}.${destFormat}`)
await fs.ensureDir(destFolder)
// -> Resize
await WIKI.extensions.ext.sharp.resize({
format: destFormat,
inputStream: createReadStream(),
outputPath: destPath,
height: 72
}) })
// -> Save logo meta to DB
const site = await WIKI.db.sites.query().findById(args.id) // Create thumbnail
if (!site.config.assets.logo) { if (!['.png', '.jpg', 'webp', '.gif'].some(s => filename.endsWith(s))) {
site.config.assets.logo = uuid() if (!WIKI.extensions.ext.sharp.isInstalled) {
WIKI.logger.warn('Cannot generate asset thumbnail because the Sharp extension is not installed.')
} else {
const destFormat = mimetype.startsWith('image/svg') ? 'svg' : 'png'
const destFolder = path.resolve(
process.cwd(),
WIKI.config.dataPath,
`assets`
)
const destPath = path.join(destFolder, `asset-${site.id}-${hash}.${destFormat}`)
await fs.ensureDir(destFolder)
// -> Resize
await WIKI.extensions.ext.sharp.resize({
format: destFormat,
inputStream: createReadStream(),
outputPath: destPath,
height: 72
})
}
} }
site.config.assets.logoExt = destFormat
await WIKI.db.sites.query().findById(args.id).patch({ config: site.config })
await WIKI.db.sites.reloadCache()
// -> Save image data to DB // -> Save image data to DB
const imgBuffer = await fs.readFile(destPath) const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({ await WIKI.db.knex('assetData').insert({
...@@ -254,9 +221,4 @@ module.exports = { ...@@ -254,9 +221,4 @@ module.exports = {
} }
} }
} }
// File: {
// folder(fl) {
// return fl.getFolder()
// }
// }
} }
...@@ -147,7 +147,7 @@ module.exports = { ...@@ -147,7 +147,7 @@ module.exports = {
/** /**
* UPLOAD LOGO * UPLOAD LOGO
*/ */
async uploadSiteLogo (obj, args) { async uploadSiteLogo (obj, args, context) {
try { try {
const { filename, mimetype, createReadStream } = await args.image const { filename, mimetype, createReadStream } = await args.image
WIKI.logger.info(`Processing site logo ${filename} of type ${mimetype}...`) WIKI.logger.info(`Processing site logo ${filename} of type ${mimetype}...`)
...@@ -182,9 +182,18 @@ module.exports = { ...@@ -182,9 +182,18 @@ module.exports = {
await WIKI.db.sites.reloadCache() await WIKI.db.sites.reloadCache()
// -> Save image data to DB // -> Save image data to DB
const imgBuffer = await fs.readFile(destPath) const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({ await WIKI.db.knex('assets').insert({
id: site.config.assets.logo, id: site.config.assets.logo,
data: imgBuffer filename: `_logo.${destFormat}`,
hash: '_logo',
ext: `.${destFormat}`,
isSystem: true,
kind: 'image',
mime: (destFormat === 'svg') ? 'image/svg' : 'image/png',
fileSize: Math.ceil(imgBuffer.byteLength / 1024),
data: imgBuffer,
authorId: context.req.user.id,
siteId: site.id
}).onConflict('id').merge() }).onConflict('id').merge()
WIKI.logger.info('New site logo processed successfully.') WIKI.logger.info('New site logo processed successfully.')
return { return {
...@@ -198,7 +207,7 @@ module.exports = { ...@@ -198,7 +207,7 @@ module.exports = {
/** /**
* UPLOAD FAVICON * UPLOAD FAVICON
*/ */
async uploadSiteFavicon (obj, args) { async uploadSiteFavicon (obj, args, context) {
try { try {
const { filename, mimetype, createReadStream } = await args.image const { filename, mimetype, createReadStream } = await args.image
WIKI.logger.info(`Processing site favicon ${filename} of type ${mimetype}...`) WIKI.logger.info(`Processing site favicon ${filename} of type ${mimetype}...`)
...@@ -234,9 +243,18 @@ module.exports = { ...@@ -234,9 +243,18 @@ module.exports = {
await WIKI.db.sites.reloadCache() await WIKI.db.sites.reloadCache()
// -> Save image data to DB // -> Save image data to DB
const imgBuffer = await fs.readFile(destPath) const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({ await WIKI.db.knex('assets').insert({
id: site.config.assets.favicon, id: site.config.assets.favicon,
data: imgBuffer filename: `_favicon.${destFormat}`,
hash: '_favicon',
ext: `.${destFormat}`,
isSystem: true,
kind: 'image',
mime: (destFormat === 'svg') ? 'image/svg' : 'image/png',
fileSize: Math.ceil(imgBuffer.byteLength / 1024),
data: imgBuffer,
authorId: context.req.user.id,
siteId: site.id
}).onConflict('id').merge() }).onConflict('id').merge()
WIKI.logger.info('New site favicon processed successfully.') WIKI.logger.info('New site favicon processed successfully.')
return { return {
...@@ -250,7 +268,7 @@ module.exports = { ...@@ -250,7 +268,7 @@ module.exports = {
/** /**
* UPLOAD LOGIN BG * UPLOAD LOGIN BG
*/ */
async uploadSiteLoginBg (obj, args) { async uploadSiteLoginBg (obj, args, context) {
try { try {
const { filename, mimetype, createReadStream } = await args.image const { filename, mimetype, createReadStream } = await args.image
WIKI.logger.info(`Processing site login bg ${filename} of type ${mimetype}...`) WIKI.logger.info(`Processing site login bg ${filename} of type ${mimetype}...`)
...@@ -283,9 +301,18 @@ module.exports = { ...@@ -283,9 +301,18 @@ module.exports = {
} }
// -> Save image data to DB // -> Save image data to DB
const imgBuffer = await fs.readFile(destPath) const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({ await WIKI.db.knex('assets').insert({
id: site.config.assets.loginBg, id: site.config.assets.loginBg,
data: imgBuffer filename: '_loginbg.jpg',
hash: '_loginbg',
ext: '.jpg',
isSystem: true,
kind: 'image',
mime: 'image/jpg',
fileSize: Math.ceil(imgBuffer.byteLength / 1024),
data: imgBuffer,
authorId: context.req.user.id,
siteId: site.id
}).onConflict('id').merge() }).onConflict('id').merge()
WIKI.logger.info('New site login bg processed successfully.') WIKI.logger.info('New site login bg processed successfully.')
return { return {
......
...@@ -68,7 +68,7 @@ module.exports = { ...@@ -68,7 +68,7 @@ module.exports = {
} }
} }
// -> Include root items // -> Include root items
if (args.includeRootItems) { if (args.includeRootFolders) {
builder.orWhere({ builder.orWhere({
folderPath: '', folderPath: '',
type: 'folder' type: 'folder'
...@@ -98,7 +98,8 @@ module.exports = { ...@@ -98,7 +98,8 @@ module.exports = {
createdAt: item.createdAt, createdAt: item.createdAt,
updatedAt: item.updatedAt, updatedAt: item.updatedAt,
...(item.type === 'folder') && { ...(item.type === 'folder') && {
childrenCount: item.meta?.children || 0 childrenCount: item.meta?.children || 0,
isAncestor: item.folderPath.length < parentPath.length
} }
})) }))
}, },
......
...@@ -3,34 +3,30 @@ ...@@ -3,34 +3,30 @@
# =============================================== # ===============================================
extend type Query { extend type Query {
assets( assetById(
folderId: Int! id: UUID!
kind: AssetKind!
): [AssetItem] ): [AssetItem]
assetsFolders(
parentFolderId: Int!
): [AssetFolder]
} }
extend type Mutation { extend type Mutation {
createAssetsFolder(
parentFolderId: Int!
slug: String!
name: String
): DefaultResponse
renameAsset( renameAsset(
id: Int! id: UUID!
filename: String! filename: String!
): DefaultResponse ): DefaultResponse
deleteAsset( deleteAsset(
id: Int! id: UUID!
): DefaultResponse ): DefaultResponse
"""
Upload one or more assets.
Must provide either `folderId` or a combination of `folderPath`, `locale` and `siteId`.
"""
uploadAssets( uploadAssets(
siteId: UUID! folderId: UUID
folderPath: String
locale: String
siteId: UUID
files: [Upload!]! files: [Upload!]!
): DefaultResponse ): DefaultResponse
...@@ -42,27 +38,20 @@ extend type Mutation { ...@@ -42,27 +38,20 @@ extend type Mutation {
# ----------------------------------------------- # -----------------------------------------------
type AssetItem { type AssetItem {
id: Int! id: UUID
filename: String! filename: String
ext: String! ext: String
kind: AssetKind! kind: AssetKind
mime: String! mime: String
fileSize: Int! fileSize: Int
metadata: String metadata: JSON
createdAt: Date! createdAt: Date
updatedAt: Date! updatedAt: Date
folder: AssetFolder
author: User author: User
} }
type AssetFolder {
id: Int!
slug: String!
name: String
}
enum AssetKind { enum AssetKind {
IMAGE document
BINARY image
ALL other
} }
...@@ -65,6 +65,7 @@ type Site { ...@@ -65,6 +65,7 @@ type Site {
robots: SiteRobots robots: SiteRobots
features: SiteFeatures features: SiteFeatures
defaults: SiteDefaults defaults: SiteDefaults
uploads: SiteUploads
locale: String locale: String
localeNamespaces: [String] localeNamespaces: [String]
localeNamespacing: Boolean localeNamespacing: Boolean
...@@ -93,6 +94,11 @@ type SiteDefaults { ...@@ -93,6 +94,11 @@ type SiteDefaults {
tocDepth: PageTocDepth tocDepth: PageTocDepth
} }
type SiteUploads {
conflictBehavior: SiteUploadConflictBehavior
normalizeFilename: Boolean
}
type SiteLocale { type SiteLocale {
locale: String locale: String
autoUpdate: Boolean autoUpdate: Boolean
...@@ -142,6 +148,12 @@ enum SiteReasonForChangeMode { ...@@ -142,6 +148,12 @@ enum SiteReasonForChangeMode {
required required
} }
enum SiteUploadConflictBehavior {
overwrite
reject
new
}
type SiteCreateResponse { type SiteCreateResponse {
operation: Operation operation: Operation
site: Site site: Site
...@@ -161,6 +173,7 @@ input SiteUpdateInput { ...@@ -161,6 +173,7 @@ input SiteUpdateInput {
robots: SiteRobotsInput robots: SiteRobotsInput
features: SiteFeaturesInput features: SiteFeaturesInput
defaults: SiteDefaultsInput defaults: SiteDefaultsInput
uploads: SiteUploadsInput
theme: SiteThemeInput theme: SiteThemeInput
} }
...@@ -204,3 +217,8 @@ input SiteThemeInput { ...@@ -204,3 +217,8 @@ input SiteThemeInput {
baseFont: String baseFont: String
contentFont: String contentFont: String
} }
input SiteUploadsInput {
conflictBehavior: SiteUploadConflictBehavior
normalizeFilename: Boolean
}
...@@ -3,18 +3,32 @@ ...@@ -3,18 +3,32 @@
# =============================================== # ===============================================
extend type Query { extend type Query {
"""
Browse the tree.
Must provide either `parentId` or a combination of `parentPath` and `locale`.
"""
tree( tree(
siteId: UUID! siteId: UUID!
parentId: UUID parentId: UUID
parentPath: String parentPath: String
locale: String
types: [TreeItemType] types: [TreeItemType]
limit: Int limit: Int
offset: Int offset: Int
orderBy: TreeOrderBy orderBy: TreeOrderBy
orderByDirection: OrderByDirection orderByDirection: OrderByDirection
"""
How many levels of children to include. Defaults to 1.
"""
depth: Int depth: Int
"""
Include all parent folders up to root
"""
includeAncestors: Boolean includeAncestors: Boolean
includeRootItems: Boolean """
Include all folders at root level
"""
includeRootFolders: Boolean
): [TreeItem] ): [TreeItem]
folderById( folderById(
id: UUID! id: UUID!
...@@ -72,16 +86,24 @@ enum TreeOrderBy { ...@@ -72,16 +86,24 @@ enum TreeOrderBy {
updatedAt updatedAt
} }
type TreeItemFolder { interface TreeItem {
id: UUID
folderPath: String
fileName: String
title: String
}
type TreeItemFolder implements TreeItem {
id: UUID id: UUID
childrenCount: Int childrenCount: Int
depth: Int depth: Int
fileName: String fileName: String
folderPath: String folderPath: String
title: String title: String
isAncestor: Boolean
} }
type TreeItemPage { type TreeItemPage implements TreeItem {
id: UUID id: UUID
createdAt: Date createdAt: Date
depth: Int depth: Int
...@@ -93,7 +115,7 @@ type TreeItemPage { ...@@ -93,7 +115,7 @@ type TreeItemPage {
updatedAt: Date updatedAt: Date
} }
type TreeItemAsset { type TreeItemAsset implements TreeItem {
id: UUID id: UUID
createdAt: Date createdAt: Date
depth: Int depth: Int
...@@ -105,5 +127,3 @@ type TreeItemAsset { ...@@ -105,5 +127,3 @@ type TreeItemAsset {
title: String title: String
updatedAt: Date updatedAt: Date
} }
union TreeItem = TreeItemFolder | TreeItemPage | TreeItemAsset
const crypto = require('crypto')
const path = require('path')
module.exports = {
/**
* Generate unique hash from page
*/
generateHash(assetPath) {
return crypto.createHash('sha1').update(assetPath).digest('hex')
},
getPathInfo(assetPath) {
return path.parse(assetPath.toLowerCase())
}
}
const _ = require('lodash') const _ = require('lodash')
const crypto = require('node:crypto')
module.exports = { module.exports = {
/* eslint-disable promise/param-names */ /* eslint-disable promise/param-names */
...@@ -30,6 +31,33 @@ module.exports = { ...@@ -30,6 +31,33 @@ module.exports = {
} }
}, },
/** /**
* Decode a tree path
*
* @param {string} str String to decode
* @returns Decoded tree path
*/
decodeTreePath (str) {
return str.replaceAll('_', '-').replaceAll('.', '/')
},
/**
* Encode a tree path
*
* @param {string} str String to encode
* @returns Encoded tree path
*/
encodeTreePath (str) {
return str?.toLowerCase()?.replaceAll('-', '_')?.replaceAll('/', '.') || ''
},
/**
* Generate SHA-1 Hash of a string
*
* @param {string} str String to hash
* @returns Hashed string
*/
generateHash (str) {
return crypto.createHash('sha1').update(str).digest('hex')
},
/**
* Get default value of type * Get default value of type
* *
* @param {any} type primitive type name * @param {any} type primitive type name
......
const Model = require('objection').Model
const _ = require('lodash')
/**
* Users model
*/
module.exports = class AssetFolder extends Model {
static get tableName() { return 'assetFolders' }
static get jsonSchema () {
return {
type: 'object',
properties: {
id: {type: 'integer'},
name: {type: 'string'},
slug: {type: 'string'}
}
}
}
static get relationMappings() {
return {
parent: {
relation: Model.BelongsToOneRelation,
modelClass: AssetFolder,
join: {
from: 'assetFolders.folderId',
to: 'assetFolders.id'
}
}
}
}
/**
* Get full folder hierarchy starting from specified folder to root
*
* @param {Number} folderId Id of the folder
*/
static async getHierarchy (folderId) {
let hier
if (WIKI.config.db.type === 'mssql') {
hier = await WIKI.db.knex.with('ancestors', qb => {
qb.select('id', 'name', 'slug', 'parentId').from('assetFolders').where('id', folderId).unionAll(sqb => {
sqb.select('a.id', 'a.name', 'a.slug', 'a.parentId').from('assetFolders AS a').join('ancestors', 'ancestors.parentId', 'a.id')
})
}).select('*').from('ancestors')
} else {
hier = await WIKI.db.knex.withRecursive('ancestors', qb => {
qb.select('id', 'name', 'slug', 'parentId').from('assetFolders').where('id', folderId).union(sqb => {
sqb.select('a.id', 'a.name', 'a.slug', 'a.parentId').from('assetFolders AS a').join('ancestors', 'ancestors.parentId', 'a.id')
})
}).select('*').from('ancestors')
}
// The ancestors are from children to grandparents, must reverse for correct path order.
return _.reverse(hier)
}
/**
* Get full folder paths
*/
static async getAllPaths () {
const all = await WIKI.db.assetFolders.query()
let folders = {}
all.forEach(fld => {
_.set(folders, fld.id, fld.slug)
let parentId = fld.parentId
while (parentId !== null || parentId > 0) {
const parent = _.find(all, ['id', parentId])
_.set(folders, fld.id, `${parent.slug}/${_.get(folders, fld.id)}`)
parentId = parent.parentId
}
})
return folders
}
}
...@@ -3,7 +3,6 @@ const moment = require('moment') ...@@ -3,7 +3,6 @@ const moment = require('moment')
const path = require('path') const path = require('path')
const fs = require('fs-extra') const fs = require('fs-extra')
const _ = require('lodash') const _ = require('lodash')
const assetHelper = require('../helpers/asset')
/** /**
* Users model * Users model
...@@ -16,7 +15,7 @@ module.exports = class Asset extends Model { ...@@ -16,7 +15,7 @@ module.exports = class Asset extends Model {
type: 'object', type: 'object',
properties: { properties: {
id: {type: 'integer'}, id: {type: 'string'},
filename: {type: 'string'}, filename: {type: 'string'},
hash: {type: 'string'}, hash: {type: 'string'},
ext: {type: 'string'}, ext: {type: 'string'},
...@@ -77,18 +76,16 @@ module.exports = class Asset extends Model { ...@@ -77,18 +76,16 @@ module.exports = class Asset extends Model {
static async upload(opts) { static async upload(opts) {
const fileInfo = path.parse(opts.originalname) const fileInfo = path.parse(opts.originalname)
const fileHash = assetHelper.generateHash(opts.assetPath)
// Check for existing asset // Check for existing asset
let asset = await WIKI.db.assets.query().where({ let asset = await WIKI.db.assets.query().where({
hash: fileHash, // hash: fileHash,
folderId: opts.folderId folderId: opts.folderId
}).first() }).first()
// Build Object // Build Object
let assetRow = { let assetRow = {
filename: opts.originalname, filename: opts.originalname,
hash: fileHash,
ext: fileInfo.ext, ext: fileInfo.ext,
kind: _.startsWith(opts.mimetype, 'image/') ? 'image' : 'binary', kind: _.startsWith(opts.mimetype, 'image/') ? 'image' : 'binary',
mime: opts.mimetype, mime: opts.mimetype,
...@@ -138,11 +135,11 @@ module.exports = class Asset extends Model { ...@@ -138,11 +135,11 @@ module.exports = class Asset extends Model {
} }
// Move temp upload to cache // Move temp upload to cache
if (opts.mode === 'upload') { // if (opts.mode === 'upload') {
await fs.move(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true }) // await fs.move(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true })
} else { // } else {
await fs.copy(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true }) // await fs.copy(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true })
} // }
// Add to Storage // Add to Storage
if (!opts.skipStorage) { if (!opts.skipStorage) {
...@@ -165,8 +162,8 @@ module.exports = class Asset extends Model { ...@@ -165,8 +162,8 @@ module.exports = class Asset extends Model {
static async getAsset(assetPath, res) { static async getAsset(assetPath, res) {
try { try {
const fileInfo = assetHelper.getPathInfo(assetPath) const fileInfo = '' // assetHelper.getPathInfo(assetPath)
const fileHash = assetHelper.generateHash(assetPath) const fileHash = '' // assetHelper.generateHash(assetPath)
const cachePath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`) const cachePath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`)
// Force unsafe extensions to download // Force unsafe extensions to download
......
...@@ -341,12 +341,11 @@ module.exports = class Page extends Model { ...@@ -341,12 +341,11 @@ module.exports = class Page extends Model {
// -> Add to tree // -> Add to tree
const pathParts = page.path.split('/') const pathParts = page.path.split('/')
await WIKI.db.knex('tree').insert({ await WIKI.db.tree.addPage({
id: page.id, id: page.id,
folderPath: _.initial(pathParts).join('/'), parentPath: _.initial(pathParts).join('/'),
fileName: _.last(pathParts), fileName: _.last(pathParts),
type: 'page', locale: page.localeCode,
localeCode: page.localeCode,
title: page.title, title: page.title,
meta: { meta: {
authorId: page.authorId, authorId: page.authorId,
......
const Model = require('objection').Model const Model = require('objection').Model
const _ = require('lodash') const _ = require('lodash')
const commonHelper = require('../helpers/common')
const rePathName = /^[a-z0-9-]+$/ const rePathName = /^[a-z0-9-]+$/
const reTitle = /^[^<>"]+$/ const reTitle = /^[^<>"]+$/
...@@ -61,6 +63,95 @@ module.exports = class Tree extends Model { ...@@ -61,6 +63,95 @@ module.exports = class Tree extends Model {
} }
/** /**
* Get a Folder
*
* @param {Object} args - Fetch Properties
* @param {string} [args.id] - UUID of the folder
* @param {string} [args.path] - Path of the folder
* @param {string} [args.locale] - Locale code of the folder (when using path)
* @param {string} [args.siteId] - UUID of the site in which the folder is (when using path)
* @param {boolean} [args.createIfMissing] - Create the folder and its ancestor if it's missing (when using path)
*/
static async getFolder ({ id, path, locale, siteId, createIfMissing = false }) {
// Get by ID
if (id) {
const parent = await WIKI.db.knex('tree').where('id', id).first()
if (!parent) {
throw new Error('ERR_NONEXISTING_FOLDER_ID')
}
return parent
} else {
// Get by path
const parentPath = commonHelper.encodeTreePath(path)
const parentPathParts = parentPath.split('.')
const parentFilter = {
folderPath: _.dropRight(parentPathParts).join('.'),
fileName: _.last(parentPathParts)
}
const parent = await WIKI.db.knex('tree').where({
...parentFilter,
locale,
siteId
}).first()
if (parent) {
return parent
} else if (createIfMissing) {
return WIKI.db.tree.createFolder({
parentPath: parentFilter.folderPath,
pathName: parentFilter.fileName,
title: parentFilter.fileName,
locale,
siteId
})
} else {
throw new Error('ERR_NONEXISTING_FOLDER_PATH')
}
}
}
/**
* Add Page Entry
*
* @param {Object} args - New Page Properties
* @param {string} [args.parentId] - UUID of the parent folder
* @param {string} [args.parentPath] - Path of the parent folder
* @param {string} args.pathName - Path name of the page to add
* @param {string} args.title - Title of the page to add
* @param {string} args.locale - Locale code of the page to add
* @param {string} args.siteId - UUID of the site in which the page will be added
*/
static async addPage ({ id, parentId, parentPath, fileName, title, locale, siteId, meta = {} }) {
const folder = (parentId || parentPath) ? await WIKI.db.tree.getFolder({
parentId,
parentPath,
locale,
siteId,
createIfMissing: true
}) : {
folderPath: '',
fileName: ''
}
const folderPath = commonHelper.decodeTreePath(folder.folderPath ? `${folder.folderPath}.${folder.fileName}` : folder.fileName)
const fullPath = folderPath ? `${folderPath}/${fileName}` : fileName
WIKI.logger.debug(`Adding page ${fullPath} to tree...`)
const pageEntry = await WIKI.db.knex('tree').insert({
id,
folderPath,
fileName,
type: 'page',
title: title,
hash: commonHelper.generateHash(fullPath),
localeCode: locale,
siteId,
meta
}).returning('*')
return pageEntry[0]
}
/**
* Create New Folder * Create New Folder
* *
* @param {Object} args - New Folder Properties * @param {Object} args - New Folder Properties
...@@ -82,8 +173,8 @@ module.exports = class Tree extends Model { ...@@ -82,8 +173,8 @@ module.exports = class Tree extends Model {
throw new Error('ERR_INVALID_TITLE') throw new Error('ERR_INVALID_TITLE')
} }
parentPath = commonHelper.encodeTreePath(parentPath)
WIKI.logger.debug(`Creating new folder ${pathName}...`) WIKI.logger.debug(`Creating new folder ${pathName}...`)
parentPath = parentPath?.replaceAll('/', '.')?.replaceAll('-', '_') || ''
const parentPathParts = parentPath.split('.') const parentPathParts = parentPath.split('.')
const parentFilter = { const parentFilter = {
folderPath: _.dropRight(parentPathParts).join('.'), folderPath: _.dropRight(parentPathParts).join('.'),
...@@ -134,10 +225,12 @@ module.exports = class Tree extends Model { ...@@ -134,10 +225,12 @@ module.exports = class Tree extends Model {
}) })
for (const ancestor of _.differenceWith(expectedAncestors, existingAncestors, (expAnc, exsAnc) => expAnc.folderPath === exsAnc.folderPath && expAnc.fileName === exsAnc.fileName)) { for (const ancestor of _.differenceWith(expectedAncestors, existingAncestors, (expAnc, exsAnc) => expAnc.folderPath === exsAnc.folderPath && expAnc.fileName === exsAnc.fileName)) {
WIKI.logger.debug(`Creating missing parent folder ${ancestor.fileName} at path /${ancestor.folderPath}...`) WIKI.logger.debug(`Creating missing parent folder ${ancestor.fileName} at path /${ancestor.folderPath}...`)
const newAncestorFullPath = ancestor.folderPath ? `${commonHelper.decodeTreePath(ancestor.folderPath)}/${ancestor.fileName}` : ancestor.fileName
const newAncestor = await WIKI.db.knex('tree').insert({ const newAncestor = await WIKI.db.knex('tree').insert({
...ancestor, ...ancestor,
type: 'folder', type: 'folder',
title: ancestor.fileName, title: ancestor.fileName,
hash: commonHelper.generateHash(newAncestorFullPath),
localeCode: locale, localeCode: locale,
siteId: siteId, siteId: siteId,
meta: { meta: {
...@@ -147,24 +240,25 @@ module.exports = class Tree extends Model { ...@@ -147,24 +240,25 @@ module.exports = class Tree extends Model {
// Parent didn't exist until now, assign it // Parent didn't exist until now, assign it
if (!parent && ancestor.folderPath === parentFilter.folderPath && ancestor.fileName === parentFilter.fileName) { if (!parent && ancestor.folderPath === parentFilter.folderPath && ancestor.fileName === parentFilter.fileName) {
parent = newAncestor parent = newAncestor[0]
} }
} }
} }
// Create folder // Create folder
WIKI.logger.debug(`Creating new folder ${pathName} at path /${parentPath}...`) const fullPath = parentPath ? `${commonHelper.decodeTreePath(parentPath)}/${pathName}` : pathName
await WIKI.db.knex('tree').insert({ const folder = await WIKI.db.knex('tree').insert({
folderPath: parentPath, folderPath: parentPath,
fileName: pathName, fileName: pathName,
type: 'folder', type: 'folder',
title: title, title: title,
hash: commonHelper.generateHash(fullPath),
localeCode: locale, localeCode: locale,
siteId: siteId, siteId: siteId,
meta: { meta: {
children: 0 children: 0
} }
}) }).returning('*')
// Update parent ancestor count // Update parent ancestor count
if (parent) { if (parent) {
...@@ -175,6 +269,10 @@ module.exports = class Tree extends Model { ...@@ -175,6 +269,10 @@ module.exports = class Tree extends Model {
} }
}) })
} }
WIKI.logger.debug(`Created folder ${folder[0].id} successfully.`)
return folder[0]
} }
/** /**
...@@ -231,9 +329,11 @@ module.exports = class Tree extends Model { ...@@ -231,9 +329,11 @@ module.exports = class Tree extends Model {
}) })
// Rename the folder itself // Rename the folder itself
const fullPath = folder.folderPath ? `${commonHelper.decodeTreePath(folder.folderPath)}/${pathName}` : pathName
await WIKI.db.knex('tree').where('id', folder.id).update({ await WIKI.db.knex('tree').where('id', folder.id).update({
fileName: pathName, fileName: pathName,
title: title title: title,
hash: commonHelper.generateHash(fullPath)
}) })
} else { } else {
// Update the folder title only // Update the folder title only
......
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 40 40" width="80px" height="80px"><path fill="#fff" d="M13.5 38.5L13.5 26.5 1.5 26.5 1.5 1.5 26.5 1.5 26.5 13.5 38.5 13.5 38.5 38.5z"/><path fill="#4788c7" d="M26,2v11v1h1h11v24H14V27v-1h-1H2V2H26 M27,1H1v26h12v12h26V13H27V1L27,1z"/><path fill="#4788c7" d="M33 31L28 26 31 23 23 23 23 31 26 28 31 33zM14 12L9 7 7 9 12 14 9 17 17 17 17 9zM20.849 19.859l-.99.99c-.194.194-.513.194-.707 0l0 0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0l0 0C21.043 19.346 21.043 19.664 20.849 19.859zM17.849 22.859l-.99.99c-.194.194-.513.194-.707 0l0 0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0l0 0C18.043 22.346 18.043 22.664 17.849 22.859zM14.849 25.859l-.99.99c-.194.194-.513.194-.707 0h0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0h0C15.043 25.346 15.043 25.664 14.849 25.859zM23.849 16.859l-.99.99c-.194.194-.513.194-.707 0l0 0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0l0 0C24.043 16.346 24.043 16.664 23.849 16.859zM26.849 13.859l-.99.99c-.194.194-.513.194-.707 0v0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0v0C27.043 13.346 27.043 13.664 26.849 13.859z"/></svg>
\ No newline at end of file
...@@ -512,35 +512,34 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad = ...@@ -512,35 +512,34 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
query loadTree ( query loadTree (
$siteId: UUID! $siteId: UUID!
$parentId: UUID $parentId: UUID
$parentPath: String
$types: [TreeItemType] $types: [TreeItemType]
$includeAncestors: Boolean
$includeRootFolders: Boolean
) { ) {
tree ( tree (
siteId: $siteId siteId: $siteId
parentId: $parentId parentId: $parentId
parentPath: $parentPath
types: $types types: $types
includeAncestors: $includeAncestors
includeRootFolders: $includeRootFolders
) { ) {
__typename __typename
id
folderPath
fileName
title
... on TreeItemFolder { ... on TreeItemFolder {
id
folderPath
fileName
title
childrenCount childrenCount
isAncestor
} }
... on TreeItemPage { ... on TreeItemPage {
id
folderPath
fileName
title
createdAt createdAt
updatedAt updatedAt
editor editor
} }
... on TreeItemAsset { ... on TreeItemAsset {
id
folderPath
fileName
title
createdAt createdAt
updatedAt updatedAt
fileSize fileSize
...@@ -551,7 +550,10 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad = ...@@ -551,7 +550,10 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
variables: { variables: {
siteId: siteStore.id, siteId: siteStore.id,
parentId, parentId,
types parentPath,
types,
includeAncestors: initLoad,
includeRootFolders: initLoad
}, },
fetchPolicy: 'network-only' fetchPolicy: 'network-only'
}) })
...@@ -579,7 +581,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad = ...@@ -579,7 +581,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
} }
// -> File List // -> File List
if (parentId === state.currentFolderId) { if (parentId === state.currentFolderId && !item.isAncestor) {
state.fileList.push({ state.fileList.push({
id: item.id, id: item.id,
type: 'folder', type: 'folder',
...@@ -682,7 +684,7 @@ function renameFolder (folderId) { ...@@ -682,7 +684,7 @@ function renameFolder (folderId) {
} }
}).onOk(() => { }).onOk(() => {
treeComp.value.resetLoaded() treeComp.value.resetLoaded()
loadTree({ parentId: folderId }) loadTree({ parentId: folderId, initLoad: true })
}) })
} }
......
...@@ -251,7 +251,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad = ...@@ -251,7 +251,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
$parentPath: String $parentPath: String
$types: [TreeItemType] $types: [TreeItemType]
$includeAncestors: Boolean $includeAncestors: Boolean
$includeRootItems: Boolean $includeRootFolders: Boolean
) { ) {
tree ( tree (
siteId: $siteId siteId: $siteId
...@@ -259,7 +259,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad = ...@@ -259,7 +259,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
parentPath: $parentPath parentPath: $parentPath
types: $types types: $types
includeAncestors: $includeAncestors includeAncestors: $includeAncestors
includeRootItems: $includeRootItems includeRootFolders: $includeRootFolders
) { ) {
__typename __typename
... on TreeItemFolder { ... on TreeItemFolder {
...@@ -287,7 +287,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad = ...@@ -287,7 +287,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
parentPath, parentPath,
types, types,
includeAncestors: initLoad, includeAncestors: initLoad,
includeRootItems: initLoad includeRootFolders: initLoad
}, },
fetchPolicy: 'network-only' fetchPolicy: 'network-only'
}) })
......
...@@ -1614,5 +1614,13 @@ ...@@ -1614,5 +1614,13 @@
"fileman.copyURLSuccess": "URL has been copied to the clipboard.", "fileman.copyURLSuccess": "URL has been copied to the clipboard.",
"fileman.folderRename": "Rename Folder", "fileman.folderRename": "Rename Folder",
"fileman.renameFolderInvalidData": "One or more fields are invalid.", "fileman.renameFolderInvalidData": "One or more fields are invalid.",
"fileman.renameFolderSuccess": "Folder renamed successfully." "fileman.renameFolderSuccess": "Folder renamed successfully.",
"admin.general.uploads": "Uploads",
"admin.general.uploadConflictBehavior": "Upload Conflict Behavior",
"admin.general.uploadConflictBehaviorHint": "How should uploads for a file that already exists be handled?",
"admin.general.uploadConflictBehaviorOverwrite": "Overwrite",
"admin.general.uploadConflictBehaviorReject": "Reject",
"admin.general.uploadConflictBehaviorNew": "Append Time to Filename",
"admin.general.uploadNormalizeFilename": "Normalize Filenames",
"admin.general.uploadNormalizeFilenameHint": "Automatically transform filenames to a standard URL-friendly format."
} }
...@@ -407,6 +407,46 @@ q-page.admin-general ...@@ -407,6 +407,46 @@ q-page.admin-general
) )
//- ----------------------- //- -----------------------
//- Uploads
//- -----------------------
q-card.shadow-1.q-pb-sm.q-mt-md(v-if='state.config.uploads')
q-card-section
.text-subtitle1 {{t('admin.general.uploads')}}
q-item
blueprint-icon(icon='merge-files')
q-item-section
q-item-label {{t(`admin.general.uploadConflictBehavior`)}}
q-item-label(caption) {{t(`admin.general.uploadConflictBehaviorHint`)}}
q-item-section
q-select(
outlined
v-model='state.config.uploads.conflictBehavior'
:options='uploadConflictBehaviors'
option-value='value'
option-label='label'
emit-value
map-options
dense
options-dense
:virtual-scroll-slice-size='1000'
:aria-label='t(`admin.general.uploadConflictBehavior`)'
)
q-separator.q-my-sm(inset)
q-item(tag='label')
blueprint-icon(icon='rename')
q-item-section
q-item-label {{t(`admin.general.uploadNormalizeFilename`)}}
q-item-label(caption) {{t(`admin.general.uploadNormalizeFilenameHint`)}}
q-item-section(avatar)
q-toggle(
v-model='state.config.uploads.normalizeFilename'
color='primary'
checked-icon='las la-check'
unchecked-icon='las la-times'
:aria-label='t(`admin.general.uploadNormalizeFilename`)'
)
//- -----------------------
//- SEO //- SEO
//- ----------------------- //- -----------------------
q-card.shadow-1.q-pb-sm.q-mt-md(v-if='state.config.robots') q-card.shadow-1.q-pb-sm.q-mt-md(v-if='state.config.robots')
...@@ -561,6 +601,11 @@ const timeFormats = [ ...@@ -561,6 +601,11 @@ const timeFormats = [
{ value: '12h', label: t('admin.general.defaultTimeFormat12h') }, { value: '12h', label: t('admin.general.defaultTimeFormat12h') },
{ value: '24h', label: t('admin.general.defaultTimeFormat24h') } { value: '24h', label: t('admin.general.defaultTimeFormat24h') }
] ]
const uploadConflictBehaviors = [
{ value: 'overwrite', label: t('admin.general.uploadConflictBehaviorOverwrite') },
{ value: 'reject', label: t('admin.general.uploadConflictBehaviorReject') },
{ value: 'new', label: t('admin.general.uploadConflictBehaviorNew') }
]
const timezones = Intl.supportedValuesOf('timeZone') const timezones = Intl.supportedValuesOf('timeZone')
...@@ -601,6 +646,10 @@ async function load () { ...@@ -601,6 +646,10 @@ async function load () {
pageExtensions pageExtensions
logoText logoText
sitemap sitemap
uploads {
conflictBehavior
normalizeFilename
}
robots { robots {
index index
follow follow
...@@ -669,6 +718,10 @@ async function save () { ...@@ -669,6 +718,10 @@ async function save () {
pageExtensions: state.config.pageExtensions ?? '', pageExtensions: state.config.pageExtensions ?? '',
logoText: state.config.logoText ?? false, logoText: state.config.logoText ?? false,
sitemap: state.config.sitemap ?? false, sitemap: state.config.sitemap ?? false,
uploads: {
conflictBehavior: state.config.uploads?.conflictBehavior ?? 'overwrite',
normalizeFilename: state.config.uploads?.normalizeFilename ?? false
},
robots: { robots: {
index: state.config.robots?.index ?? false, index: state.config.robots?.index ?? false,
follow: state.config.robots?.follow ?? false follow: state.config.robots?.follow ?? false
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment