diff --git a/.prettierrc.json b/.prettierrc.json index 4bf9889d7..c72ffac8e 100644 --- a/.prettierrc.json +++ b/.prettierrc.json @@ -1,11 +1,11 @@ { - "printWidth": 80, - "tabWidth": 2, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "useTabs": false, - "endOfLine": "lf", - "trailingComma": "none", - "arrowParens": "avoid" + "printWidth": 80, + "tabWidth": 2, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "useTabs": false, + "endOfLine": "lf", + "trailingComma": "none", + "arrowParens": "avoid" } diff --git a/apps/web/content/pages/docs/fields/custom-fields.json b/apps/web/content/pages/docs/fields/custom-fields.json index e8e75490c..798b10108 100644 --- a/apps/web/content/pages/docs/fields/custom-fields.json +++ b/apps/web/content/pages/docs/fields/custom-fields.json @@ -48,7 +48,7 @@ { "id": "tjXTNIX1E2w3fts1zXG9Z", "type": "CodeBlock", - "code": "import {alinea} from 'alinea'\nimport {Field, Hint, Label, Shape} from 'alinea/core'\nimport {InputLabel, InputState, useInput} from 'alinea/editor'\n\nexport interface RangeFieldOptions {\n min?: number\n max?: number\n}\n\nexport interface RangeField extends Field.Scalar {\n label: Label\n options?: RangeFieldOptions\n}\n\n// The constructor function is used to create fields in our schema\n// later on. It is usually passed a label and options.\nexport function range(label: Label, options?: RangeFieldOptions): RangeField {\n return {\n shape: Shape.Scalar(label),\n label,\n options,\n view: RangeInput,\n hint: Hint.Number()\n }\n}\n\ninterface RangeInputProps {\n state: InputState>\n field: RangeField\n}\n\n// To view our field we can create a React component. \n// This component can call the useInput hook to receive the\n// current value and a method to update it.\nfunction RangeInput({state, field}: RangeInputProps) {\n const [value = 5, setValue] = useInput(state)\n const {min = 0, max = 10} = field.options || {}\n return (\n \n setValue(Number(e.target.value))} \n />\n \n )\n}" + "code": "import {alinea} from 'alinea'\nimport {Field, Hint, Label, Shape} from 'alinea/core'\nimport {InputLabel, InputState, useInput} from 'alinea/editor'\n\nexport interface RangeFieldOptions {\n min?: number\n max?: number\n}\n\nexport interface RangeField extends Field.Scalar {\n label: Label\n options?: RangeFieldOptions\n}\n\n// The constructor function is used to create fields in our schema\n// later on. It is usually passed a label and options.\nexport function range(label: Label, options?: RangeFieldOptions): RangeField {\n return {\n shape: new ScalarShape(label),\n label,\n options,\n view: RangeInput,\n hint: Hint.Number()\n }\n}\n\ninterface RangeInputProps {\n state: InputState>\n field: RangeField\n}\n\n// To view our field we can create a React component. \n// This component can call the useInput hook to receive the\n// current value and a method to update it.\nfunction RangeInput({state, field}: RangeInputProps) {\n const [value = 5, setValue] = useInput(state)\n const {min = 0, max = 10} = field.options || {}\n return (\n \n setValue(Number(e.target.value))} \n />\n \n )\n}" }, { "type": "paragraph", @@ -68,7 +68,7 @@ { "id": "yhfKjT9ITmHHD0N5JRU4G", "type": "ExampleBlock", - "code": "import {alinea} from 'alinea'\nimport {Field, Hint, Label, Shape} from 'alinea/core'\nimport {InputLabel, InputState, useInput} from 'alinea/editor'\n\nexport interface RangeFieldOptions {\n min?: number\n max?: number\n}\n\nexport interface RangeField extends Field.Scalar {\n label: Label\n options?: RangeFieldOptions\n}\n\n// The constructor function is used to create fields in our schema\n// later on. It is usually passed a label and options.\nexport function range(label: Label, options?: RangeFieldOptions): RangeField {\n return {\n shape: Shape.Scalar(label),\n label,\n options,\n view: RangeInput,\n hint: Hint.Number()\n }\n}\n\ninterface RangeInputProps {\n state: InputState>\n field: RangeField\n}\n\n// To view our field we can create a React component. \n// This component can call the useInput hook to receive the\n// current value and a method to update it.\nfunction RangeInput({state, field}: RangeInputProps) {\n const [value = 5, setValue] = useInput(state)\n const {min = 0, max = 10} = field.options || {}\n return (\n \n setValue(Number(e.target.value))} \n />\n \n )\n}\n\nexport default alinea.type('My type', {\n range: range('A range field', {min: 0, max: 20})\n})" + "code": "import {alinea} from 'alinea'\nimport {Field, Hint, Label, Shape} from 'alinea/core'\nimport {InputLabel, InputState, useInput} from 'alinea/editor'\n\nexport interface RangeFieldOptions {\n min?: number\n max?: number\n}\n\nexport interface RangeField extends Field.Scalar {\n label: Label\n options?: RangeFieldOptions\n}\n\n// The constructor function is used to create fields in our schema\n// later on. It is usually passed a label and options.\nexport function range(label: Label, options?: RangeFieldOptions): RangeField {\n return {\n shape: new ScalarShape(label),\n label,\n options,\n view: RangeInput,\n hint: Hint.Number()\n }\n}\n\ninterface RangeInputProps {\n state: InputState>\n field: RangeField\n}\n\n// To view our field we can create a React component. \n// This component can call the useInput hook to receive the\n// current value and a method to update it.\nfunction RangeInput({state, field}: RangeInputProps) {\n const [value = 5, setValue] = useInput(state)\n const {min = 0, max = 10} = field.options || {}\n return (\n \n setValue(Number(e.target.value))} \n />\n \n )\n}\n\nexport default alinea.type('My type', {\n range: range('A range field', {min: 0, max: 20})\n})" }, { "type": "heading", diff --git a/apps/web/content/pages/docs/fields/intro.json b/apps/web/content/pages/docs/fields/intro.json index c97c38f63..6f0c9a9a5 100644 --- a/apps/web/content/pages/docs/fields/intro.json +++ b/apps/web/content/pages/docs/fields/intro.json @@ -27,7 +27,7 @@ { "id": "HeTPIvCg4LofpD5C23eYi", "type": "ExampleBlock", - "code": "import {alinea} from 'alinea'\nimport {Field, Hint, Label, Shape} from 'alinea/core'\nimport {InputLabel, InputState, useInput} from 'alinea/editor'\n\nexport interface RangeFieldOptions {\n min?: number\n max?: number\n help?: Label\n}\n\nexport interface RangeField extends Field.Scalar {\n label: Label\n options?: RangeFieldOptions\n}\n\n// The constructor function is used to create fields in our schema\n// later on. It is usually passed a label and options.\nexport function range(label: Label, options?: RangeFieldOptions): RangeField {\n return {\n shape: Shape.Scalar(label),\n label,\n options,\n view: RangeInput,\n hint: Hint.Number()\n }\n}\n\ninterface RangeInputProps {\n state: InputState>\n field: RangeField\n}\n\n// To view our field we can create a React component. \n// This component can call the useInput hook to receive the\n// current value and a method to update it.\nfunction RangeInput({state, field}: RangeInputProps) {\n const [value = 5, setValue] = useInput(state)\n const {min = 0, max = 10, help} = field.options || {}\n return (\n \n setValue(Number(e.target.value))} \n />\n \n )\n}\n\nexport default alinea.type('Kitchen sink',\n alinea.tabs(\n alinea.tab('Basic fields', {\n title: alinea.text('Text field'),\n path: alinea.path('Path field', {\n help: 'Creates a slug of the value of another field'\n }),\n richText: alinea.richText('Rich text field'),\n select: alinea.select('Select field', {\n a: 'Option a',\n b: 'Option b'\n }),\n number: alinea.number('Number field', {\n minValue: 0,\n maxValue: 10\n }),\n check: alinea.check('Check field', {label: 'Check me please'}),\n date: alinea.date('Date field'),\n code: alinea.code('Code field')\n }),\n alinea.tab('Link fields', {\n externalLink: alinea.url('External link'),\n entry: alinea.entry('Internal link'),\n linkMultiple: alinea.link.multiple('Mixed links, multiple'),\n image: alinea.entry('Image link'),\n file: alinea.entry('File link')\n }),\n alinea.tab('List fields', {\n list: alinea.list('My list field', {\n schema: alinea.schema({\n Text: alinea.type('Text', {\n title: alinea.text('Item title'),\n text: alinea.richText('Item body text')\n }),\n Image: alinea.type('Image', {\n image: alinea.image('Image')\n })\n })\n }) \n }),\n alinea.tab('Inline fields', {\n street: alinea.text('Street', {width: 0.6, inline: true, multiline: true}),\n number: alinea.text('Number', {width: 0.2, inline: true}),\n box: alinea.text('Box', {width: 0.2, inline: true}),\n zip: alinea.text('Zipcode', {width: 0.2, inline: true}),\n city: alinea.text('City', {width: 0.4, inline: true}),\n country: alinea.text('Country', {\n width: 0.4,\n inline: true\n })\n }),\n alinea.tab('Custom fields', {\n range: range('Range field', {\n help: 'See the custom field guide'\n }) \n })\n )\n)" + "code": "import {alinea} from 'alinea'\nimport {Field, Hint, Label, Shape} from 'alinea/core'\nimport {InputLabel, InputState, useInput} from 'alinea/editor'\n\nexport interface RangeFieldOptions {\n min?: number\n max?: number\n help?: Label\n}\n\nexport interface RangeField extends Field.Scalar {\n label: Label\n options?: RangeFieldOptions\n}\n\n// The constructor function is used to create fields in our schema\n// later on. It is usually passed a label and options.\nexport function range(label: Label, options?: RangeFieldOptions): RangeField {\n return {\n shape: new ScalarShape(label),\n label,\n options,\n view: RangeInput,\n hint: Hint.Number()\n }\n}\n\ninterface RangeInputProps {\n state: InputState>\n field: RangeField\n}\n\n// To view our field we can create a React component. \n// This component can call the useInput hook to receive the\n// current value and a method to update it.\nfunction RangeInput({state, field}: RangeInputProps) {\n const [value = 5, setValue] = useInput(state)\n const {min = 0, max = 10, help} = field.options || {}\n return (\n \n setValue(Number(e.target.value))} \n />\n \n )\n}\n\nexport default alinea.type('Kitchen sink',\n alinea.tabs(\n alinea.tab('Basic fields', {\n title: alinea.text('Text field'),\n path: alinea.path('Path field', {\n help: 'Creates a slug of the value of another field'\n }),\n richText: alinea.richText('Rich text field'),\n select: alinea.select('Select field', {\n a: 'Option a',\n b: 'Option b'\n }),\n number: alinea.number('Number field', {\n minValue: 0,\n maxValue: 10\n }),\n check: alinea.check('Check field', {label: 'Check me please'}),\n date: alinea.date('Date field'),\n code: alinea.code('Code field')\n }),\n alinea.tab('Link fields', {\n externalLink: alinea.url('External link'),\n entry: alinea.entry('Internal link'),\n linkMultiple: alinea.link.multiple('Mixed links, multiple'),\n image: alinea.entry('Image link'),\n file: alinea.entry('File link')\n }),\n alinea.tab('List fields', {\n list: alinea.list('My list field', {\n schema: alinea.schema({\n Text: alinea.type('Text', {\n title: alinea.text('Item title'),\n text: alinea.richText('Item body text')\n }),\n Image: alinea.type('Image', {\n image: alinea.image('Image')\n })\n })\n }) \n }),\n alinea.tab('Inline fields', {\n street: alinea.text('Street', {width: 0.6, inline: true, multiline: true}),\n number: alinea.text('Number', {width: 0.2, inline: true}),\n box: alinea.text('Box', {width: 0.2, inline: true}),\n zip: alinea.text('Zipcode', {width: 0.2, inline: true}),\n city: alinea.text('City', {width: 0.4, inline: true}),\n country: alinea.text('Country', {\n width: 0.4,\n inline: true\n })\n }),\n alinea.tab('Custom fields', {\n range: range('Range field', {\n help: 'See the custom field guide'\n }) \n })\n )\n)" }, { "type": "heading", diff --git a/apps/web/src/cms.tsx b/apps/web/src/cms.tsx index 927f2e1dc..5bc5ac480 100644 --- a/apps/web/src/cms.tsx +++ b/apps/web/src/cms.tsx @@ -22,7 +22,7 @@ export const main = alinea.workspace('Alinea', { export const cms = createNextCMS({ dashboard: { - dashboardUrl: process.env.NODE_ENV === 'development' ? '/' : '/admin.html', + dashboardUrl: '/admin.html', handlerUrl: '/api/cms', staticFile: 'public/admin.html' }, diff --git a/dev.js b/dev.js index cfc362680..bf8fb0bb9 100644 --- a/dev.js +++ b/dev.js @@ -5,8 +5,10 @@ import path from 'node:path' import sade from 'sade' async function run({production, dir, config}) { + const forceProduction = process.env.ALINEA_CLOUD_URL dotenv.config({path: findConfig('.env')}) - process.env.NODE_ENV = production ? 'production' : 'development' + process.env.NODE_ENV = + forceProduction || production ? 'production' : 'development' const {serve} = await import('alinea/cli/Serve') return serve({ alineaDev: true, diff --git a/src/auth/passwordless/PasswordLessAuth.server.ts b/src/auth/passwordless/PasswordLessAuth.server.ts index e82012eb3..7e1eb9591 100644 --- a/src/auth/passwordless/PasswordLessAuth.server.ts +++ b/src/auth/passwordless/PasswordLessAuth.server.ts @@ -1,4 +1,4 @@ -import {Handler, router} from 'alinea/backend/router/Router' +import {Route, router} from 'alinea/backend/router/Router' import {Auth, Connection, HttpError, Outcome, User} from 'alinea/core' import {sign, verify} from 'alinea/core/util/JWT' import type {Transporter} from 'nodemailer' @@ -23,12 +23,12 @@ const LoginBody = object({ // provided in the options to keep state. export class PasswordLessAuth implements Auth.Server { - handler: Handler + router: Route users = new WeakMap() constructor(protected options: PasswordLessAuthOptions) { const matcher = router.startAt(Connection.routes.base) - this.handler = router( + this.router = router( matcher .post(Connection.routes.base + '/auth.passwordless') .map(router.parseJson) diff --git a/src/backend.ts b/src/backend.ts index f536a0f01..101d26a8f 100644 --- a/src/backend.ts +++ b/src/backend.ts @@ -2,7 +2,6 @@ export * from './backend/Database.js' export * from './backend/FS.js' export * from './backend/Handler.js' export * from './backend/Media.js' -export * from './backend/Server.js' export * from './backend/Target.js' export * from './backend/loader/JsonLoader.js' export * from './backend/util/JWTPreviews.js' diff --git a/src/backend/Database.test.ts b/src/backend/Database.test.ts new file mode 100644 index 000000000..960002833 --- /dev/null +++ b/src/backend/Database.test.ts @@ -0,0 +1,163 @@ +import { + CMS, + Entry, + EntryPhase, + EntryRow, + Schema, + Type, + createId, + slugify +} from 'alinea/core' +import {entryChildrenDir, entryFilepath} from 'alinea/core/EntryFilenames' +import {Mutation, MutationType} from 'alinea/core/Mutation' +import {createEntryRow} from 'alinea/core/util/EntryRows' +import {test} from 'uvu' +import * as assert from 'uvu/assert' +import {createExample} from './test/Example.js' + +async function entry( + cms: CMS, + type: Type, + data: Partial = {title: 'Entry'}, + parent?: EntryRow +): Promise { + const typeNames = Schema.typeNames(cms.schema) + const title = data.title ?? 'Entry' + const details = { + entryId: createId(), + phase: EntryPhase.Published, + type: typeNames.get(type)!, + title, + path: data.path ?? slugify(title), + seeded: false, + workspace: 'main', + root: 'pages', + level: 0, + parent: parent?.entryId ?? null, + locale: null, + index: 'a0', + i18nId: createId(), + modifiedAt: 0, + active: true, + main: true, + data: data.data ?? {}, + searchableText: '' + } + const parentPaths = parent?.childrenDir.split('/').filter(Boolean) ?? [] + const filePath = entryFilepath(cms, details, parentPaths) + const childrenDir = entryChildrenDir(cms, details, parentPaths) + const row = { + ...details, + filePath, + childrenDir, + parentDir: childrenDir.split('/').slice(0, -1).join('/'), + url: childrenDir + } + return createEntryRow(cms, row) +} + +function create(entry: EntryRow): Mutation { + return { + type: MutationType.Create, + entry: entry, + entryId: entry.entryId, + file: entry.filePath + } +} + +function remove(entry: EntryRow): Mutation { + return { + type: MutationType.Remove, + entryId: entry.entryId, + file: entry.filePath + } +} + +function edit(entry: EntryRow): Mutation { + return { + type: MutationType.Edit, + entryId: entry.entryId, + file: entry.filePath, + entry: entry + } +} + +function publish(entry: EntryRow): Mutation { + return { + type: MutationType.Publish, + entryId: entry.entryId, + file: entry.filePath, + phase: entry.phase + } +} + +test('create', async () => { + const example = createExample() + const db = await example.db + const entry1 = await entry(example, example.schema.Page, { + title: 'Test title' + }) + await db.applyMutations([create(entry1)], '') + const result = await example.get(Entry({entryId: entry1.entryId})) + assert.is(result.entryId, entry1.entryId) + assert.is(result.title, 'Test title') +}) + +test('remove child entries', async () => { + const example = createExample() + const db = await example.db + const parent = await entry(example, example.schema.Container) + const sub = await entry(example, example.schema.Container, {}, parent) + const subSub = await entry(example, example.schema.Page, {}, sub) + + await db.applyMutations([create(parent), create(sub), create(subSub)], '') + + const res1 = await example.get(Entry({entryId: subSub.entryId})) + assert.ok(res1) + assert.is(res1.parent, sub.entryId) + + await db.applyMutations([remove(parent)], '') + + const res2 = await example.get(Entry({entryId: subSub.entryId})) + assert.not.ok(res2) +}) + +test('change draft path', async () => { + const example = createExample() + const db = await example.db + const parent = await entry(example, example.schema.Container, { + path: 'parent' + }) + const sub = await entry( + example, + example.schema.Container, + {path: 'sub'}, + parent + ) + await db.applyMutations([create(parent), create(sub)], '') + const resParent0 = await example.get(Entry({entryId: parent.entryId})) + assert.is(resParent0.url, '/parent') + + const draft = { + ...parent, + phase: EntryPhase.Draft, + data: {path: 'new-path'} + } + + // Changing entry paths in draft should not have an influence on + // computed properties such as url, filePath etc. until we publish. + await db.applyMutations([edit(draft)], '') + const resParent1 = await example.drafts.get(Entry({entryId: parent.entryId})) + assert.is(resParent1.url, '/parent') + const res1 = await example.get(Entry({entryId: sub.entryId})) + assert.is(res1.url, '/parent/sub') + + // Once we publish, the computed properties should be updated. + await db.applyMutations([publish(draft)], '') + const resParent2 = await example.get(Entry({entryId: parent.entryId})) + assert.is(resParent2.url, '/new-path') + const res2 = await example.get(Entry({entryId: sub.entryId})) + assert.is(res2.url, '/new-path/sub') +}) + +test.run() diff --git a/src/backend/Database.ts b/src/backend/Database.ts index 24dc35a56..a26d25642 100644 --- a/src/backend/Database.ts +++ b/src/backend/Database.ts @@ -5,20 +5,22 @@ import { PageSeed, Root, Schema, + SyncResponse, Syncable, - Type, Workspace, createId, unreachable } from 'alinea/core' -import {entryInfo} from 'alinea/core/EntryFilenames' -import {EntryRecord, META_KEY} from 'alinea/core/EntryRecord' +import {entryFilepath, entryInfo, entryUrl} from 'alinea/core/EntryFilenames' +import {EntryRecord, META_KEY, createRecord} from 'alinea/core/EntryRecord' import {Mutation, MutationType} from 'alinea/core/Mutation' +import {createEntryRow} from 'alinea/core/util/EntryRows' import {Logger} from 'alinea/core/util/Logger' import {entries} from 'alinea/core/util/Objects' import * as path from 'alinea/core/util/Paths' +import * as paths from 'alinea/core/util/Paths' import {timer} from 'alinea/core/util/Timer' -import {Driver, Expr, Table, alias, create} from 'rado' +import {Driver, Expr, Select, alias, create} from 'rado' import {exists} from 'rado/sqlite' import xxhash from 'xxhash-wasm' import {EntryPhase, EntryRow} from '../core/EntryRow.js' @@ -28,11 +30,8 @@ import {Target} from './Target.js' import {ChangeSetCreator} from './data/ChangeSet.js' import {AlineaMeta} from './db/AlineaMeta.js' import {createEntrySearch} from './db/CreateEntrySearch.js' -import {createContentHash} from './util/ContentHash.js' +import {createFileHash, createRowHash} from './util/ContentHash.js' -const decoder = new TextDecoder() - -const ALT_STATUS = [EntryPhase.Draft, EntryPhase.Archived] type Seed = { type: string workspace: string @@ -44,153 +43,300 @@ type Seed = { export class Database implements Syncable { seed: Map - constructor(public store: Store, public config: Config) { + constructor(public config: Config, public store: Store) { this.seed = this.seedData() } - async updates(request: AlineaMeta) { - const current = await this.meta() - if (current.contentHash === request.contentHash) - return { - contentHash: current.contentHash, - entries: [] - } - return { - contentHash: current.contentHash, - entries: await this.store( - EntryRow().where(EntryRow.modifiedAt.isGreater(request.modifiedAt)) + async syncRequired(contentHash: string): Promise { + const meta = await this.meta() + return meta.contentHash !== contentHash + } + + async sync(contentHashes: Array): Promise { + return this.store.transaction(async tx => { + const insert = await tx( + EntryRow().where(EntryRow.rowHash.isNotIn(contentHashes)) ) - } + const keep = new Set( + await tx( + EntryRow() + .where(EntryRow.rowHash.isIn(contentHashes)) + .select(EntryRow.rowHash) + ) + ) + const remove = contentHashes.filter(hash => !keep.has(hash)) + return {insert, remove} + }) } - async versionIds(): Promise> { - return this.store(EntryRow().select(EntryRow.versionId)) + async contentHashes() { + return this.store(EntryRow().select(EntryRow.rowHash)) } // Syncs data with a remote database, returning the ids of changed entries - async syncWith(remote: Syncable): Promise> { + async syncWith(remote: Syncable, force = false): Promise> { await this.init() - const current = await this.meta() - const update = await remote.updates(current) - const {contentHash, entries} = update - if (entries.length) await this.updateEntries(entries) - const updated = await this.meta() - const changedEntries = entries.map(e => e.entryId) - if (updated.contentHash === contentHash) return changedEntries - const remoteVersionIds = await remote.versionIds() - const excessEntries = await this.store.transaction(async query => { - const excess = await query( - EntryRow() - .select({entryId: EntryRow.entryId, versionId: EntryRow.versionId}) - .where( - remoteVersionIds.length > 0 - ? EntryRow.versionId.isNotIn(remoteVersionIds) - : true - ) - ) - await query( - EntryRow() - .delete() - .where(EntryRow.versionId.isIn(excess.map(e => e.versionId))) + const meta = await this.meta() + const isRequired = force || (await remote.syncRequired(meta.contentHash)) + if (!isRequired) return [] + const {insert, remove} = await remote.sync(await this.contentHashes()) + return this.store.transaction(async tx => { + const removed = await tx( + EntryRow().where(EntryRow.rowHash.isIn(remove)).select(EntryRow.entryId) ) - await Database.index(query) - await this.writeMeta(query) - return excess.map(e => e.entryId) + await tx(EntryRow().delete().where(EntryRow.rowHash.isIn(remove))) + const changed = [] + for (const entry of insert) { + await tx(EntryRow().insertOne(entry)) + changed.push(entry.entryId) + changed.push(entry.i18nId) + } + await Database.index(tx) + // This is for a local db, we didn't receive a commit hash here + await this.writeMeta(tx, meta.commitHash) + return removed.concat(changed) }) - const afterRemoves = await this.meta() - if (afterRemoves.contentHash === contentHash) - return changedEntries.concat(excessEntries) - // Todo: we should abandon syncing and just fetch the full db - throw new Error('Sync failed') } - async updateEntries(entries: Array) { - await this.store.transaction(async query => { - for (const entry of entries) { - await query( - EntryRow({ - entryId: entry.entryId, - phase: entry.phase - }).delete() - ) - await query(EntryRow().insertOne(entry)) + applyMutations(mutations: Array, commitHash: string) { + return this.store.transaction(async tx => { + const reHash = [] + for (const mutation of mutations) { + const updateRows = await this.applyMutation(tx, mutation) + if (updateRows) reHash.push(updateRows) } - await Database.index(query) - await this.writeMeta(query) + await Database.index(tx) + const changed = ( + await Promise.all(reHash.map(updateRows => updateRows())) + ).flat() + await this.writeMeta(tx, commitHash) + return changed }) } - async applyMutations(mutations: Array) { - for (const mutation of mutations) { - console.log(`Applying mutation: ${mutation.type} to ${mutation.entryId}`) - this.applyMutation(mutation) + private async applyPublish(tx: Driver.Async, entry: EntryRow) { + const path = entry.data.path + const parentPaths = entry.parentDir.split('/').filter(Boolean) + const filePath = entryFilepath( + this.config, + { + ...entry, + path, + phase: EntryPhase.Published + }, + parentPaths + ) + const parentDir = paths.dirname(filePath) + const extension = paths.extname(filePath) + const fileName = paths.basename(filePath, extension) + const [entryPath] = entryInfo(fileName) + const childrenDir = paths.join(parentDir, entryPath) + const urlMeta: EntryUrlMeta = { + locale: entry.locale, + path, + phase: entry.phase, + parentPaths + } + const url = entryUrl(this.config.schema[entry.type], urlMeta) + const next = { + ...entry, + phase: EntryPhase.Published, + path, + filePath, + parentDir, + childrenDir, + url + } + await tx( + EntryRow({entryId: entry.entryId, phase: entry.phase}).set({ + phase: EntryPhase.Published, + filePath, + parentDir, + childrenDir, + url + }) + ) + return this.updateChildren(tx, entry, next) + } + + private async updateChildren( + tx: Driver.Async, + previous: EntryRow, + next: EntryRow + ) { + const {childrenDir: dir} = previous + if (next.phase !== EntryPhase.Published || dir === next.childrenDir) + return [] + const children = await tx( + EntryRow().where( + EntryRow.parentDir.is(dir).or(EntryRow.childrenDir.like(dir + '/%')) + ) + ) + for (const child of children) { + const filePath = next.childrenDir + child.filePath.slice(dir.length) + const childrenDir = next.childrenDir + child.childrenDir.slice(dir.length) + const parentDir = next.childrenDir + child.parentDir.slice(dir.length) + const parentPaths = parentDir.split('/').filter(Boolean) + if (child.locale) parentPaths.shift() + const url = entryUrl(this.config.schema[child.type], { + ...child, + parentPaths + }) + await tx( + EntryRow({entryId: child.entryId, phase: child.phase}).set({ + filePath, + childrenDir, + parentDir, + url + }) + ) + } + return children + } + + async logEntries() { + const entries = await this.store( + EntryRow().orderBy(EntryRow.url.asc(), EntryRow.index.asc()) + ) + for (const entry of entries) { + console.log( + entry.url.padEnd(35), + entry.entryId.padEnd(12), + entry.phase.padEnd(12), + entry.title + ) } - await Database.index(this.store) } - async applyMutation(mutation: Mutation) { + private async applyMutation( + tx: Driver.Async, + mutation: Mutation + ): Promise<(() => Promise>) | undefined> { switch (mutation.type) { - case MutationType.Create: - case MutationType.Edit: - return this.store( - EntryRow({ - entryId: mutation.entryId, - phase: mutation.entry.phase - }).delete(), - EntryRow().insert(mutation.entry) + case MutationType.Create: { + const row = EntryRow({ + entryId: mutation.entryId, + phase: mutation.entry.phase + }) + const current = await tx(row.maybeFirst()) + if (current) return + await tx(EntryRow().insert(mutation.entry)) + return () => this.updateHash(tx, row) + } + case MutationType.Edit: { + const {entryId, entry} = mutation + const row = EntryRow({ + entryId, + phase: entry.phase + }) + const current = await tx(row.maybeFirst()) + await tx(row.delete(), EntryRow().insert(entry)) + let children: Array = [] + if (entry.phase === EntryPhase.Published) { + if (current) children = await this.updateChildren(tx, current, entry) + } + return () => { + return this.updateHash(tx, row).then(self => + this.updateHash( + tx, + EntryRow().where( + EntryRow.entryId.isIn(children.map(e => e.entryId)) + ) + ).then(children => self.concat(children)) + ) + } + } + case MutationType.Archive: { + const archived = EntryRow({ + entryId: mutation.entryId, + phase: EntryPhase.Archived + }) + const row = EntryRow({ + entryId: mutation.entryId, + phase: EntryPhase.Published + }) + const published = await tx(row.maybeFirst()) + if (!published) return + const filePath = + published.filePath.slice(0, -5) + `.${EntryPhase.Archived}.json` + await tx( + archived.delete(), + row.set({ + phase: EntryPhase.Archived, + filePath + }) ) - case MutationType.Archive: - return this.store( + return () => this.updateHash(tx, archived) + } + case MutationType.Publish: { + const promoting = await tx( EntryRow({ entryId: mutation.entryId, - phase: EntryPhase.Published - }).set({phase: EntryPhase.Archived}) + phase: mutation.phase + }).maybeFirst() ) - case MutationType.Publish: - const phases = await this.store( - EntryRow({ - entryId: mutation.entryId - }).select(EntryRow.phase) - ) - const promoting = phases.find(p => ALT_STATUS.includes(p)) - if (promoting) - await this.store( - EntryRow({ - entryId: mutation.entryId, - phase: EntryPhase.Published - }).delete(), - EntryRow({ - entryId: mutation.entryId, - phase: promoting - }).set({ - phase: EntryPhase.Published - }) - ) - return + if (!promoting) return + const row = EntryRow({ + entryId: mutation.entryId, + phase: EntryPhase.Published + }) + await tx(row.delete()) + const children = await this.applyPublish(tx, promoting) + return () => + this.updateHash(tx, row).then(rows => { + return this.updateHash( + tx, + EntryRow().where( + EntryRow.entryId.isIn(children.map(e => e.entryId)) + ) + ).then(r => rows.concat(r)) + }) + } case MutationType.FileRemove: if (mutation.replace) return case MutationType.Remove: - return this.store(EntryRow({entryId: mutation.entryId}).delete()) + const phases = await tx(EntryRow({entryId: mutation.entryId})) + // Remove child entries + for (const phase of phases) { + await tx( + EntryRow() + .delete() + .where( + EntryRow.parentDir + .is(phase.childrenDir) + .or(EntryRow.childrenDir.like(phase.childrenDir + '/%')) + ) + ) + } + await tx(EntryRow({entryId: mutation.entryId}).delete()) + return case MutationType.Discard: - return this.store( + await tx( EntryRow({ entryId: mutation.entryId, phase: EntryPhase.Draft }).delete() ) - case MutationType.Order: - return this.store( - EntryRow({entryId: mutation.entryId}).set({index: mutation.index}) - ) - case MutationType.Move: - return this.store( - EntryRow({entryId: mutation.entryId}).set({ + return + case MutationType.Order: { + const rows = EntryRow({entryId: mutation.entryId}) + // Todo: apply this to other languages too + await tx(rows.set({index: mutation.index})) + return () => this.updateHash(tx, rows) + } + case MutationType.Move: { + const rows = EntryRow({entryId: mutation.entryId}) + await tx( + rows.set({ index: mutation.index, parent: mutation.parent, workspace: mutation.workspace, root: mutation.root }) ) + // Todo: update file & children paths + return () => this.updateHash(tx, rows) + } case MutationType.Upload: return default: @@ -198,18 +344,35 @@ export class Database implements Syncable { } } + async updateHash(tx: Driver.Async, selection: Select) { + const changed = [] + const entries = await tx(selection) + for (const entry of entries) { + const updated = await createEntryRow(this.config, entry) + changed.push(updated.entryId) + await tx( + EntryRow({entryId: entry.entryId, phase: entry.phase}).set({ + fileHash: updated.fileHash, + rowHash: updated.rowHash + }) + ) + } + return changed + } + async meta() { return ( (await this.store(AlineaMeta().maybeFirst())) ?? { + commitHash: '', contentHash: '', modifiedAt: 0 } ) } - static async index(query: Driver.Async) { + static async index(tx: Driver.Async) { const {Parent} = alias(EntryRow) - const res = await query( + const res = await tx( EntryRow().set({ parent: Parent({childrenDir: EntryRow.parentDir}) .select(Parent.entryId) @@ -221,31 +384,32 @@ export class Database implements Syncable { return res } - private async writeMeta(query: Driver.Async) { - const {h32ToString} = await xxhash() - const contentHashes = await query( - EntryRow() - .select(EntryRow.contentHash.concat('.').concat(EntryRow.phase)) - .orderBy(EntryRow.contentHash) + private async writeMeta(tx: Driver.Async, commitHash: string) { + const {create32} = await xxhash() + let hash = create32() + const contentHashes = await tx( + EntryRow().select(EntryRow.rowHash).orderBy(EntryRow.rowHash) ) - const contentHash = h32ToString(contentHashes.join('')) - const modifiedAt = await query( + for (const c of contentHashes) hash = hash.update(c) + const contentHash = hash.digest().toString(16).padStart(8, '0') + const modifiedAt = await tx( EntryRow() .select(EntryRow.modifiedAt) .orderBy(EntryRow.modifiedAt.desc()) .first() ) - await query(AlineaMeta().delete()) - await query( + await tx(AlineaMeta().delete()) + await tx( AlineaMeta().insertOne({ - modifiedAt, - contentHash + commitHash, + contentHash, + modifiedAt }) ) } inited = false - private async init() { + async init() { if (this.inited) return this.inited = true try { @@ -253,38 +417,24 @@ export class Database implements Syncable { await tx(create(EntryRow, AlineaMeta)) await createEntrySearch(tx) }) + await this.meta() } catch (e) { this.inited = false throw e } } - entryUrl(type: Type, meta: EntryUrlMeta) { - const {entryUrl} = Type.meta(type) - if (entryUrl) return entryUrl(meta) - const segments = meta.locale ? [meta.locale] : [] - return ( - '/' + - segments - .concat( - meta.parentPaths - .concat(meta.path) - .filter(segment => segment !== 'index') - ) - .join('/') - ) - } - computeEntry( - data: EntryRecord, + record: EntryRecord, meta: { workspace: string root: string filePath: string }, seed?: Seed - ): Omit, 'contentHash'> { - const typeName = data[META_KEY].type + ): Omit { + const {[META_KEY]: alineaMeta, ...data} = record + const typeName = alineaMeta.type const parentDir = path.dirname(meta.filePath) const extension = path.extname(meta.filePath) const fileName = path.basename(meta.filePath, extension) @@ -307,7 +457,7 @@ export class Database implements Syncable { ) const childrenDir = path.join(parentDir, entryPath) - if (!data[META_KEY].entryId) throw new Error(`missing id`) + if (!record[META_KEY].entryId) throw new Error(`missing id`) const urlMeta: EntryUrlMeta = { locale, @@ -329,28 +479,27 @@ export class Database implements Syncable { workspace: meta.workspace, root: meta.root, filePath: meta.filePath, - seeded: Boolean(seed || data[META_KEY].seeded || false), - // contentHash, + seeded: Boolean(seed || alineaMeta.seeded || false), modifiedAt: Date.now(), // file.modifiedAt, active: false, main: false, - entryId: data[META_KEY].entryId, + entryId: alineaMeta.entryId, phase: entryPhase, - type: data[META_KEY].type, + type: alineaMeta.type, parentDir, childrenDir, parent: null, level: parentDir === '/' ? 0 : segments.length, - index: data[META_KEY].index, + index: alineaMeta.index, locale, - i18nId: data[META_KEY].i18nId ?? data[META_KEY].entryId, + i18nId: alineaMeta.i18nId ?? alineaMeta.entryId, path: entryPath, - title: data.title ?? seedData?.title ?? '', - url: this.entryUrl(type, urlMeta), + title: record.title ?? seedData?.title ?? '', + url: entryUrl(type, urlMeta), data: entryData, searchableText @@ -391,34 +540,28 @@ export class Database implements Syncable { return res } - async fill(source: Source, target?: Target): Promise { + async fill( + source: Source, + commitHash: string, + target?: Target + ): Promise { // Todo: run a validation step for orders, paths, id matching on statuses // etc await this.init() - const {h32Raw} = await xxhash() const typeNames = Schema.typeNames(this.config.schema) const publishSeed: Array = [] await this.store.transaction(async query => { const seenVersions: Array = [] const seenSeeds = new Set() - let inserted = 0 + const inserted = [] const endScan = timer('Scanning entries') for await (const file of source.entries()) { const seed = this.seed.get(file.filePath) - const extension = path.extname(file.filePath) - const fileName = path.basename(file.filePath, extension) - const [, phase] = entryInfo(fileName) - const contentHash = await createContentHash( - phase, - file.contents, - seed - ? seed.type + JSON.stringify(PageSeed.data(seed.page).partial) - : undefined - ) + const fileHash = await createFileHash(file.contents) const exists = await query( EntryRow({ - contentHash, + fileHash: fileHash, filePath: file.filePath, workspace: file.workspace, root: file.root @@ -443,14 +586,13 @@ export class Database implements Syncable { await query( EntryRow({entryId: entry.entryId, phase: entry.phase}).delete() ) - const withHash = entry as Table.Insert - withHash.contentHash = contentHash + const withHash: EntryRow = {...entry, fileHash, rowHash: ''} seenVersions.push( await query( EntryRow().insert(withHash).returning(EntryRow.versionId) ) ) - inserted++ + inserted.push(`${entry.entryId}.${entry.phase}`) } catch (e: any) { console.log(`> skipped ${file.filePath} — ${e.message}`) } @@ -473,16 +615,14 @@ export class Database implements Syncable { }, seed ) - const seedData = new TextEncoder().encode( - seed.type + JSON.stringify(PageSeed.data(seed.page).partial) - ) - const contentHash = h32Raw(seedData).toString(16).padStart(8, '0') - const withHash = entry as EntryRow - withHash.contentHash = contentHash + const record = createRecord(entry) + const fileContents = JsonLoader.format(this.config.schema, record) + const fileHash = await createFileHash(fileContents) + const withHash = {...entry, fileHash, rowHash: ''} seenVersions.push( await query(EntryRow().insert(withHash).returning(EntryRow.versionId)) ) - inserted++ + inserted.push(`${entry.entryId}.${entry.phase}`) publishSeed.push({ ...withHash, seeded: true, @@ -496,15 +636,25 @@ export class Database implements Syncable { const {rowsAffected: removed} = await query( EntryRow().delete().where(EntryRow.versionId.isNotIn(seenVersions)) ) - const noChanges = inserted === 0 && removed === 0 + const noChanges = inserted.length === 0 && removed === 0 if (noChanges) return // if (inserted) console.log(`> updated ${inserted} entries`) // if (removed) console.log(`> removed ${removed} entries`) //const endIndex = timer('Indexing entries') await Database.index(query) - //endIndex() - await this.writeMeta(query) + const entries = await query( + EntryRow().where(EntryRow.versionId.isIn(inserted)) + ) + for (const entry of entries) { + const rowHash = await createRowHash(entry) + await query( + EntryRow({entryId: entry.entryId, phase: entry.phase}).set({ + rowHash + }) + ) + } + await this.writeMeta(query, commitHash) }) if (target && publishSeed.length > 0) { @@ -524,7 +674,10 @@ export class Database implements Syncable { } }) const changes = changeSetCreator.create(mutations) - await target.mutate({mutations: changes}, {logger: new Logger('seed')}) + await target.mutate( + {commitHash: '', mutations: changes}, + {logger: new Logger('seed')} + ) } } } diff --git a/src/backend/Drafts.ts b/src/backend/Drafts.ts new file mode 100644 index 000000000..c5e9c5e0f --- /dev/null +++ b/src/backend/Drafts.ts @@ -0,0 +1,12 @@ +import {Connection, Draft} from 'alinea/core' + +export interface DraftTransport { + entryId: string + fileHash: string + draft: string +} + +export interface Drafts { + getDraft(entryId: string, ctx: Connection.Context): Promise + storeDraft(draft: Draft, ctx: Connection.Context): Promise +} diff --git a/src/backend/Handler.ts b/src/backend/Handler.ts index 48ad06c34..45faf75fb 100644 --- a/src/backend/Handler.ts +++ b/src/backend/Handler.ts @@ -1,11 +1,241 @@ import {Request, Response} from '@alinea/iso' -import {Auth, Connection, EntryPhase} from 'alinea/core' +import { + Auth, + Config, + Connection, + Draft, + Entry, + EntryPhase, + EntryRow, + PreviewUpdate, + ResolveDefaults, + SyncResponse, + parseYDoc +} from 'alinea/core' +import {EntryRecord} from 'alinea/core/EntryRecord' +import {EditMutation, Mutation, MutationType} from 'alinea/core/Mutation' import {Realm} from 'alinea/core/pages/Realm' import {Selection} from 'alinea/core/pages/Selection' +import {base64, base64url} from 'alinea/core/util/Encoding' import {Logger, LoggerResult, Report} from 'alinea/core/util/Logger' +import * as Y from 'alinea/yjs' import {Type, enums, object, string} from 'cito' -import {Server, ServerOptions} from './Server.js' -import {Handle, Route, router} from './router/Router.js' +import {unzlibSync} from 'fflate' +import {mergeUpdatesV2} from 'yjs' +import {Database} from './Database.js' +import {DraftTransport, Drafts} from './Drafts.js' +import {History, Revision} from './History.js' +import {Media} from './Media.js' +import {Pending} from './Pending.js' +import {Previews} from './Previews' +import {Target} from './Target.js' +import {ChangeSetCreator} from './data/ChangeSet.js' +import {EntryResolver} from './resolver/EntryResolver.js' +import {Route, router} from './router/Router.js' + +export interface HandlerOptions { + config: Config + db: Database + previews: Previews + previewAuthToken: string + auth?: Auth.Server + target?: Target + media?: Media + drafts?: Drafts + history?: History + pending?: Pending + resolveDefaults?: ResolveDefaults +} + +export class Handler { + connect: (ctx: Connection.Context) => Connection + router: Route + resolver: EntryResolver + changes: ChangeSetCreator + lastSync = 0 + + constructor(public options: HandlerOptions) { + this.resolver = new EntryResolver( + options.db, + options.config.schema, + this.parsePreview.bind(this) + ) + this.changes = new ChangeSetCreator(options.config) + const auth = options.auth || Auth.anonymous() + this.connect = ctx => new HandlerConnection(this, ctx) + this.router = createRouter(auth, this.connect) + } + + previewAuth(): Connection.Context { + return { + logger: new Logger('parsePreview'), + token: this.options.previewAuthToken + } + } + + async parsePreview(preview: PreviewUpdate) { + const {config} = this.options + await this.periodicSync() + const update = unzlibSync(base64url.parse(preview.update)) + const entry = await this.resolver.resolve({ + selection: Selection.create( + Entry({entryId: preview.entryId}).maybeFirst() + ), + realm: Realm.PreferDraft + }) + if (!entry) return + const currentDraft = await this.options.drafts?.getDraft( + preview.entryId, + this.previewAuth() + ) + const apply = currentDraft + ? mergeUpdatesV2([currentDraft.draft, update]) + : update + const type = config.schema[entry.type] + if (!type) return + const doc = new Y.Doc() + Y.applyUpdateV2(doc, apply) + const entryData = parseYDoc(type, doc) + return {...entry, ...entryData, path: entry.path} + } + + async periodicSync() { + if (Date.now() - this.lastSync > 30_000) return + try { + await this.syncPending() + } catch {} + this.lastSync = Date.now() + } + + async syncPending() { + const {pending, db} = this.options + const meta = await db.meta() + if (!pending) return meta + const toApply = await pending.pendingSince( + meta.commitHash, + this.previewAuth() + ) + if (!toApply) return meta + await db.applyMutations(toApply.mutations, toApply.toCommitHash) + return db.meta() + } +} + +class HandlerConnection implements Connection { + constructor(protected handler: Handler, protected ctx: Connection.Context) {} + + // Resolver + + resolve = async (params: Connection.ResolveParams) => { + const {resolveDefaults} = this.handler.options + await this.handler.periodicSync() + return this.handler.resolver.resolve({...resolveDefaults, ...params}) + } + + // Target + + async mutate(mutations: Array): Promise<{commitHash: string}> { + const {target, media, db} = this.handler.options + if (!target) throw new Error('Target not available') + if (!media) throw new Error('Media not available') + const changeSet = this.handler.changes.create(mutations) + const {commitHash: fromCommitHash} = await this.handler.syncPending() + const {commitHash: toCommitHash} = await target.mutate( + {commitHash: fromCommitHash, mutations: changeSet}, + this.ctx + ) + await db.applyMutations(mutations, toCommitHash) + const tasks = [] + for (const mutation of mutations) { + switch (mutation.type) { + case MutationType.FileRemove: + tasks.push( + media.deleteUpload( + {location: mutation.location, workspace: mutation.workspace}, + this.ctx + ) + ) + continue + case MutationType.Edit: + tasks.push(this.persistEdit(mutation)) + continue + } + } + await Promise.all(tasks) + return {commitHash: toCommitHash} + } + + previewToken(): Promise { + const {previews} = this.handler.options + const user = this.ctx.user + if (!user) return previews.sign({anonymous: true}) + return previews.sign({sub: user.sub}) + } + + // Media + + prepareUpload(file: string): Promise { + const {media} = this.handler.options + if (!media) throw new Error('Media not available') + return media.prepareUpload(file, this.ctx) + } + + // History + + async revisions(file: string): Promise> { + const {history} = this.handler.options + if (!history) return [] + return history.revisions(file, this.ctx) + } + + async revisionData(file: string, revisionId: string): Promise { + const {history} = this.handler.options + if (!history) throw new Error('History not available') + return history.revisionData(file, revisionId, this.ctx) + } + + // Syncable + + async syncRequired(contentHash: string): Promise { + const {db} = this.handler.options + await this.handler.syncPending() + return db.syncRequired(contentHash) + } + + async sync(contentHashes: Array): Promise { + const {db} = this.handler.options + await this.handler.syncPending() + return db.sync(contentHashes) + } + + // Drafts + + private async persistEdit(mutation: EditMutation) { + const {drafts} = this.handler.options + if (!drafts || !mutation.update) return + const update = base64.parse(mutation.update) + const currentDraft = await this.getDraft(mutation.entryId) + await this.storeDraft({ + entryId: mutation.entryId, + fileHash: mutation.entry.fileHash, + draft: currentDraft + ? mergeUpdatesV2([currentDraft.draft, update]) + : update + }) + } + + getDraft(entryId: string): Promise { + const {drafts} = this.handler.options + if (!drafts) throw new Error('Drafts not available') + return drafts.getDraft(entryId, this.ctx) + } + + storeDraft(draft: Draft): Promise { + const {drafts} = this.handler.options + if (!drafts) throw new Error('Drafts not available') + return drafts.storeDraft(draft, this.ctx) + } +} function respond({result, logger}: LoggerResult) { return router.jsonResponse(result, { @@ -44,7 +274,7 @@ function createRouter( } } return router( - auth.handler, + auth.router, matcher .get(Connection.routes.previewToken()) @@ -55,6 +285,8 @@ function createRouter( }) .map(respond), + // History + matcher .get(Connection.routes.revisions()) .map(context) @@ -79,6 +311,8 @@ function createRouter( }) .map(respond), + // Target + matcher .post(Connection.routes.mutate()) .map(context) @@ -91,26 +325,32 @@ function createRouter( }) .map(respond), + // Syncable + matcher - .get(Connection.routes.updates()) + .get(Connection.routes.sync()) .map(context) .map(({ctx, url}) => { const api = createApi(ctx) const contentHash = url.searchParams.get('contentHash')! - const modifiedAt = Number(url.searchParams.get('modifiedAt'))! - return ctx.logger.result(api.updates({contentHash, modifiedAt})) + return ctx.logger.result(api.syncRequired(contentHash)) }) .map(respond), matcher - .get(Connection.routes.versionIds()) + .post(Connection.routes.sync()) .map(context) - .map(({ctx}) => { + .map(router.parseJson) + .map(({ctx, body}) => { const api = createApi(ctx) - return ctx.logger.result(api.versionIds()) + if (!Array.isArray(body)) throw new Error(`Array expected`) + const contentHashes = body as Array + return ctx.logger.result(api.sync(contentHashes)) }) .map(respond), + // Media + matcher .post(Connection.routes.prepareUpload()) .map(context) @@ -120,20 +360,35 @@ function createRouter( const {filename} = PrepareBody(body) return ctx.logger.result(api.prepareUpload(filename)) }) - .map(respond) - ).recover(router.reportError) -} + .map(respond), -export interface HandlerOptions extends ServerOptions { - auth?: Auth.Server -} + // Drafts -export class Handler { - handle: Handle + matcher + .get(Connection.routes.draft()) + .map(context) + .map(({ctx, url}) => { + const api = createApi(ctx) + const entryId = url.searchParams.get('entryId')! + return ctx.logger.result( + api.getDraft(entryId).then(draft => { + if (!draft) return null + return {...draft, draft: base64.stringify(draft.draft)} + }) + ) + }) + .map(respond), - constructor(public options: HandlerOptions) { - const auth = options.auth || Auth.anonymous() - const {handle} = createRouter(auth, context => new Server(options, context)) - this.handle = handle - } + matcher + .post(Connection.routes.draft()) + .map(context) + .map(router.parseJson) + .map(({ctx, body}) => { + const api = createApi(ctx) + const data = body as DraftTransport + const draft = {...data, draft: new Uint8Array(base64.parse(data.draft))} + return ctx.logger.result(api.storeDraft(draft)) + }) + .map(respond) + ).recover(router.reportError) } diff --git a/src/backend/Media.ts b/src/backend/Media.ts index 905892dc0..1dffa1f28 100644 --- a/src/backend/Media.ts +++ b/src/backend/Media.ts @@ -5,7 +5,7 @@ export interface Media { file: string, ctx: Connection.Context ): Promise - delete( + deleteUpload( params: Connection.DeleteParams, ctx: Connection.Context ): Promise diff --git a/src/backend/Pending.ts b/src/backend/Pending.ts new file mode 100644 index 000000000..553476807 --- /dev/null +++ b/src/backend/Pending.ts @@ -0,0 +1,9 @@ +import {Connection} from 'alinea/core' +import {Mutation} from 'alinea/core/Mutation' + +export interface Pending { + pendingSince( + commitHash: string, + ctx: Connection.Context + ): Promise<{toCommitHash: string; mutations: Array} | undefined> +} diff --git a/src/backend/Server.ts b/src/backend/Server.ts deleted file mode 100644 index dabf62bf1..000000000 --- a/src/backend/Server.ts +++ /dev/null @@ -1,102 +0,0 @@ -import {Config, Connection} from 'alinea/core' -import {EntryRecord} from 'alinea/core/EntryRecord' -import {Graph} from 'alinea/core/Graph' -import {Mutation, MutationType} from 'alinea/core/Mutation' -import {Database} from './Database.js' -import {History, Revision} from './History.js' -import {Media} from './Media.js' -import {Previews} from './Previews' -import {ResolveDefaults, Resolver} from './Resolver.js' -import {Store} from './Store.js' -import {Target} from './Target.js' -import {ChangeSetCreator} from './data/ChangeSet.js' -import {AlineaMeta} from './db/AlineaMeta.js' - -export interface PreviewOptions { - preview?: boolean - previewToken?: string -} - -export type ServerOptions = { - config: Config - store: Store - target: Target - media: Media - previews: Previews - history?: History - resolveDefaults?: ResolveDefaults -} - -export class Server implements Connection { - db: Database - resolver: Resolver - protected graph: Graph - changes: ChangeSetCreator - - constructor( - public options: ServerOptions, - public context: Connection.Context - ) { - this.db = new Database(options.store, options.config) - this.resolver = new Resolver(options.store, options.config.schema) - this.graph = new Graph(this.options.config, this.resolve) - this.changes = new ChangeSetCreator(options.config) - } - - // Api - - resolve = (params: Connection.ResolveParams) => { - const {resolveDefaults} = this.options - return this.resolver.resolve({...resolveDefaults, ...params}) - } - - async mutate(mutations: Array): Promise { - const {target} = this.options - const changes = this.changes.create(mutations) - await target.mutate({mutations: changes}, this.context) - for (const mutation of mutations) { - if (mutation.type === MutationType.FileRemove) { - await this.options.media.delete( - {location: mutation.location, workspace: mutation.workspace}, - this.context - ) - } - } - } - - previewToken(): Promise { - const {previews} = this.options - const user = this.context.user - if (!user) return previews.sign({anonymous: true}) - return previews.sign({sub: user.sub}) - } - - prepareUpload(file: string): Promise { - const {media} = this.options - return media.prepareUpload(file, this.context) - } - - // History - - async revisions(file: string): Promise> { - const {history} = this.options - if (!history) return [] - return history.revisions(file, this.context) - } - - async revisionData(file: string, revisionId: string): Promise { - const {history} = this.options - if (!history) throw new Error('History not available') - return history.revisionData(file, revisionId, this.context) - } - - // Syncable - - versionIds() { - return this.db.versionIds() - } - - updates(request: AlineaMeta) { - return this.db.updates(request) - } -} diff --git a/src/backend/Target.ts b/src/backend/Target.ts index ff68acb5c..6745e307d 100644 --- a/src/backend/Target.ts +++ b/src/backend/Target.ts @@ -1,9 +1,8 @@ import {Connection} from 'alinea/core' export interface Target { - canRename: boolean mutate( params: Connection.MutateParams, ctx: Connection.Context - ): Promise + ): Promise<{commitHash: string}> } diff --git a/src/backend/data/ChangeSet.ts b/src/backend/data/ChangeSet.ts index 1b05a682c..7260f221c 100644 --- a/src/backend/data/ChangeSet.ts +++ b/src/backend/data/ChangeSet.ts @@ -152,8 +152,14 @@ export class ChangeSetCreator { } removeChanges({file}: RemoveEntryMutation): Array { - // Todo: remove all possible phases - return [{type: ChangeType.Delete, file}] + if (!file.endsWith(`.${EntryPhase.Archived}.json`)) return [] + return [ + {type: ChangeType.Delete, file}, + { + type: ChangeType.Delete, + file: file.slice(0, -`.${EntryPhase.Archived}.json`.length) + } + ] } discardChanges({file}: DiscardDraftMutation): Array { diff --git a/src/backend/db/AlineaMeta.ts b/src/backend/db/AlineaMeta.ts index 14c5f8abe..791079d55 100644 --- a/src/backend/db/AlineaMeta.ts +++ b/src/backend/db/AlineaMeta.ts @@ -1,6 +1,7 @@ import {column, table} from 'rado' class AlineaMetaTable { + commitHash = column.string contentHash = column.string modifiedAt = column.number } diff --git a/src/backend/package.json b/src/backend/package.json index b0bf0546d..27ca53606 100644 --- a/src/backend/package.json +++ b/src/backend/package.json @@ -5,7 +5,7 @@ "cito": "^0.2.0", "dataloader": "^2.1.0", "pretty-ms": "^8.0.0", - "rado": "^0.4.3", + "rado": "^0.4.4", "regexparam": "^2.0.1", "xxhash-wasm": "^1.0.2" } diff --git a/src/backend/Resolver.ts b/src/backend/resolver/EntryResolver.ts similarity index 93% rename from src/backend/Resolver.ts rename to src/backend/resolver/EntryResolver.ts index 9fa7f66ee..d8be11a56 100644 --- a/src/backend/Resolver.ts +++ b/src/backend/resolver/EntryResolver.ts @@ -1,18 +1,15 @@ import { Connection, Field, + PreviewUpdate, + ResolveDefaults, Schema, Type, - createYDoc, - parseYDoc, unreachable } from 'alinea/core' import {EntrySearch} from 'alinea/core/EntrySearch' import {Realm} from 'alinea/core/pages/Realm' -import {base64url} from 'alinea/core/util/Encoding' import {entries, fromEntries, keys} from 'alinea/core/util/Objects' -import * as Y from 'alinea/yjs' -import {unzlibSync} from 'fflate' import { BinOpType, Expr, @@ -28,22 +25,10 @@ import { withRecursive } from 'rado' import {iif, match, count as sqlCount} from 'rado/sqlite' -import {EntryPhase, EntryRow, EntryTable} from '../core/EntryRow.js' -import * as pages from '../core/pages/index.js' -import {Database} from './Database.js' -import {Store} from './Store.js' -import {LinkResolver} from './resolver/LinkResolver.js' - -export interface PreviewUpdate { - entryId: string - phase: EntryPhase - update: string -} - -export interface ResolveDefaults { - realm?: Realm - preview?: PreviewUpdate -} +import {EntryPhase, EntryRow, EntryTable} from '../../core/EntryRow.js' +import * as pages from '../../core/pages/index.js' +import {Database} from '../Database.js' +import {LinkResolver} from './LinkResolver.js' const unOps = { [pages.UnaryOp.Not]: UnOpType.Not, @@ -174,10 +159,17 @@ enum ExprContext { InAccess = 1 << 2 } -export class Resolver { +export class EntryResolver { targets: Schema.Targets - constructor(public store: Store, public schema: Schema) { + constructor( + public db: Database, + public schema: Schema, + public parsePreview?: ( + preview: PreviewUpdate + ) => Promise, + public defaults?: ResolveDefaults + ) { this.targets = Schema.targets(schema) } @@ -702,32 +694,24 @@ export class Resolver { selection, location, locale, - realm = Realm.Published, - preview + realm = this.defaults?.realm ?? Realm.Published, + preview = this.defaults?.preview }: Connection.ResolveParams): Promise => { const ctx = new ResolveContext({realm, location, locale}) const queryData = this.query(ctx, selection) const query = new Query(queryData) if (preview) { - const current = EntryRow({ - entryId: preview.entryId, - active: true - }) - const entry = await this.store(current.maybeFirst()) - if (entry) + const updated = await this.parsePreview?.(preview) + if (updated) try { - // Create yjs doc - const type = this.schema[entry.type] - const yDoc = createYDoc(type, entry) - // Apply update - const update = unzlibSync(base64url.parse(preview.update)) - Y.applyUpdateV2(yDoc, update) - const entryData = parseYDoc(type, yDoc) - const previewEntry = {...entry, ...entryData} - await this.store.transaction(async tx => { + await this.db.store.transaction(async tx => { + const current = EntryRow({ + entryId: preview.entryId, + active: true + }) // Temporarily add preview entry await tx(current.delete()) - await tx(EntryRow().insert(previewEntry)) + await tx(EntryRow().insert(updated)) await Database.index(tx) const result = await tx(query) const linkResolver = new LinkResolver(this, tx, ctx.realm) @@ -740,8 +724,8 @@ export class Resolver { // console.warn('Could not decode preview update', err) } } - const result = await this.store(query) - const linkResolver = new LinkResolver(this, this.store, ctx.realm) + const result = await this.db.store(query) + const linkResolver = new LinkResolver(this, this.db.store, ctx.realm) if (result) await this.post({linkResolver}, result, selection) return result } diff --git a/src/backend/resolver/LinkResolver.ts b/src/backend/resolver/LinkResolver.ts index e222d18e5..17603cb7b 100644 --- a/src/backend/resolver/LinkResolver.ts +++ b/src/backend/resolver/LinkResolver.ts @@ -5,8 +5,8 @@ import {Selection} from 'alinea/core/pages/Selection' import {serializeSelection} from 'alinea/core/pages/Serialize' import DataLoader from 'dataloader' import {Query} from 'rado' -import {ResolveContext, Resolver} from '../Resolver.js' import {Store} from '../Store.js' +import {EntryResolver, ResolveContext} from './EntryResolver.js' interface LinkData { entryId: string @@ -17,7 +17,7 @@ export class LinkResolver { loaders = new Map>() constructor( - public resolver: Resolver, + public resolver: EntryResolver, public store: Store, public realm: Realm ) {} diff --git a/src/backend/router/Router.ts b/src/backend/router/Router.ts index e0968ce89..3f123429d 100644 --- a/src/backend/router/Router.ts +++ b/src/backend/router/Router.ts @@ -2,23 +2,21 @@ import {CompressionStream, Headers, Request, Response} from '@alinea/iso' import {Outcome} from 'alinea/core/Outcome' import {parse} from 'regexparam' -export type Handle = { - (input: In): Out | undefined | Promise +export interface HttpRouter { + (input: Request): Promise } -export type Handler = Handle | Route - -function callHandler(handler: Handler, input: In) { - return typeof handler === 'function' ? handler(input) : handler.handle(input) +export interface Handle { + (input: In): Out | undefined | Promise } -export type HttpHandler = (input: Request) => Promise +type Next = Handle | Route export class Route { constructor(public handle: Handle) {} map(next: Handle): Route map(next: Route): Route - map(next: Handler): Route { + map(next: Next): Route { return new Route(input => { const result = this.handle(input) if (result instanceof Promise) @@ -52,10 +50,11 @@ export class Route { } export function router( - ...routes: Array> + ...routes: Array | undefined> ): Route { return new Route(async (request: Request) => { for (const handler of routes) { + if (!handler) continue let result = callHandler(handler, request) if (result instanceof Promise) result = await result if (result !== undefined) return result @@ -63,6 +62,10 @@ export function router( }) } +function callHandler(handler: Next, input: In) { + return typeof handler === 'function' ? handler(input) : handler.handle(input) +} + export namespace router { export function use(handle: Handle) { return new Route(handle) @@ -200,7 +203,7 @@ export namespace router { } export function compress( - ...routes: Array> + ...routes: Array> ): Route { const route = router(...routes) return new Route( diff --git a/src/backend/test/Example.ts b/src/backend/test/Example.ts index 8dd4571f6..6a43471a0 100644 --- a/src/backend/test/Example.ts +++ b/src/backend/test/Example.ts @@ -4,60 +4,62 @@ import {createMediaRoot} from 'alinea/core/media/MediaRoot' import {MediaFile, MediaLibrary} from 'alinea/core/media/MediaSchema' import {path, tab, tabs, text} from 'alinea/input' -const TypeA = type('Type', { - title: text('Title'), - path: path('Path'), - ...tabs( - tab('Tab 1', { - name: path('Name') - }), - tab('Tab 2', { - name: text('Name'), - name2: text('Name') - }) - ), - [type.meta]: { - isContainer: true - } -}) +export function createExample() { + const Page = type('Type', { + title: text('Title'), + path: path('Path'), + ...tabs( + tab('Tab 1', { + name: path('Name') + }), + tab('Tab 2', { + name: text('Name'), + name2: text('Name') + }) + ), + [type.meta]: { + isContainer: true + } + }) -const TypeB = type('TypeB', { - title: text('Title'), - path: path('Path'), - name: text('name'), - [type.meta]: { - isContainer: true - } -}) + const Container = type('TypeB', { + title: text('Title'), + path: path('Path'), + name: text('name'), + [type.meta]: { + isContainer: true + } + }) -const main = workspace('Main', { - pages: root('Pages', { - entry1: page(TypeA({title: 'Test title'})), - entry2: page(TypeA({title: 'Entry 2'}), { - entry3: page(TypeB({title: 'Entry 3'})) + const main = workspace('Main', { + pages: root('Pages', { + entry1: page(Page({title: 'Test title'})), + entry2: page(Container({title: 'Entry 2'}), { + entry3: page(Page({title: 'Entry 3'})) + }), + [root.meta]: { + contains: ['TypeA'] + } + }), + media: createMediaRoot({ + dir: page(MediaLibrary({title: 'Media folder'})), + 'file1.png': page( + MediaFile({ + title: 'File 1', + path: 'file1.png', + extension: '.png', + size: 1000, + hash: 'hash1' + }) + ) }), - [root.meta]: { - contains: ['TypeA'] + [workspace.meta]: { + source: '.' } - }), - media: createMediaRoot({ - dir: page(MediaLibrary({title: 'Media folder'})), - 'file1.png': page( - MediaFile({ - title: 'File 1', - path: 'file1.png', - extension: '.png', - size: 1000, - hash: 'hash1' - }) - ) - }), - [workspace.meta]: { - source: '.' - } -}) + }) -export const example = createTestCMS({ - schema: {TypeA, TypeB}, - workspaces: {main} -}) + return createTestCMS({ + schema: {Page, Container}, + workspaces: {main} + }) +} diff --git a/src/backend/util/ContentHash.ts b/src/backend/util/ContentHash.ts index 1db13b8ec..3804d872e 100644 --- a/src/backend/util/ContentHash.ts +++ b/src/backend/util/ContentHash.ts @@ -1,22 +1,35 @@ -import {EntryPhase} from 'alinea/core' +import {EntryRow} from 'alinea/core' import xxhash from 'xxhash-wasm' const xxHash = xxhash() -const textEncoder = new TextEncoder() -export async function createContentHash( - phase: EntryPhase, - contents: Uint8Array, - seed?: string -) { +export async function createFileHash(data: Uint8Array) { const {h32Raw} = await xxHash - const seedData = seed ? textEncoder.encode(seed) : new Uint8Array(0) - const phaseData = textEncoder.encode(phase) - const hashData = new Uint8Array( - seedData.length + phaseData.length + contents.length - ) - hashData.set(seedData) - hashData.set(phaseData, seedData.length) - hashData.set(contents, seedData.length + phaseData.length) - return h32Raw(hashData).toString(16).padStart(8, '0') + return h32Raw(data).toString(16).padStart(8, '0') +} + +export async function createRowHash(entry: Omit) { + const {create32} = await xxhash() + const hash = create32() + .update(`entryId ${entry.entryId}`) + .update(`phase ${entry.phase}`) + .update(`title ${entry.title}`) + .update(`type ${entry.type}`) + .update(`seeded ${entry.seeded}`) + .update(`workspace ${entry.workspace}`) + .update(`root ${entry.root}`) + .update(`level ${entry.level}`) + .update(`filePath ${entry.filePath}`) + .update(`parentDir ${entry.parentDir}`) + .update(`childrenDir ${entry.childrenDir}`) + .update(`index ${entry.index}`) + .update(`parent ${entry.parent}`) + .update(`i18nId ${entry.i18nId}`) + .update(`locale ${entry.locale}`) + .update(`fileHash ${entry.fileHash}`) + .update(`active ${entry.active}`) + .update(`main ${entry.main}`) + .update(`path ${entry.path}`) + .update(`url ${entry.url}`) + return hash.digest().toString(16).padStart(8, '0') } diff --git a/src/cli/Generate.ts b/src/cli/Generate.ts index eb2135bb6..0e4f39530 100644 --- a/src/cli/Generate.ts +++ b/src/cli/Generate.ts @@ -1,3 +1,4 @@ +import {Database} from 'alinea/backend' import {Store} from 'alinea/backend/Store' import {CMS} from 'alinea/core/CMS' import {Config} from 'alinea/core/Config' @@ -61,7 +62,7 @@ async function createDb(): Promise<[Store, () => Uint8Array]> { export async function* generate(options: GenerateOptions): AsyncGenerator< { cms: CMS - store: Store + db: Database localData: LocalData }, void @@ -116,14 +117,14 @@ export async function* generate(options: GenerateOptions): AsyncGenerator< rootDir, dashboardUrl: await options.dashboardUrl }) - for await (const _ of fillCache( + for await (const db of fillCache( context, fileData, store, cms, nextBuild )) { - yield {cms, store, localData: fileData} + yield {cms, db, localData: fileData} // For debug reasons write out db if (process.env.NODE_ENV === 'development') fs.writeFileSync( @@ -143,7 +144,7 @@ export async function* generate(options: GenerateOptions): AsyncGenerator< break } } catch (e: any) { - console.log(e.message) + console.error(e) } } } diff --git a/src/cli/Serve.ts b/src/cli/Serve.ts index f605c78b2..99e93cb34 100644 --- a/src/cli/Serve.ts +++ b/src/cli/Serve.ts @@ -1,6 +1,7 @@ import {JWTPreviews} from 'alinea/backend' import {Handler} from 'alinea/backend/Handler' -import {HttpHandler} from 'alinea/backend/router/Router' +import {HttpRouter} from 'alinea/backend/router/Router' +import {createCloudDebugHandler} from 'alinea/cloud/server/CloudDebugHandler' import {createCloudHandler} from 'alinea/cloud/server/CloudHandler' import {CMS} from 'alinea/core/CMS' import {BuildOptions} from 'esbuild' @@ -10,6 +11,7 @@ import {buildOptions} from './build/BuildOptions.js' import {createLocalServer} from './serve/CreateLocalServer.js' import {GitHistory} from './serve/GitHistory.js' import {LiveReload} from './serve/LiveReload.js' +import {MemoryDrafts} from './serve/MemoryDrafts.js' import {ServeContext} from './serve/ServeContext.js' import {startNodeServer} from './serve/StartNodeServer.js' import {dirname} from './util/Dirname.js' @@ -77,34 +79,39 @@ export async function serve(options: ServeOptions): Promise { }) } })[Symbol.asyncIterator]() + const drafts = new MemoryDrafts() let nextGen = gen.next() let cms: CMS | undefined - let handle: HttpHandler | undefined + let handle: HttpRouter | undefined while (true) { const current = await nextGen if (!current?.value) return - const {cms: currentCMS, localData: fileData} = current.value + const {cms: currentCMS, localData: fileData, db} = current.value if (currentCMS === cms) { context.liveReload.reload('refetch') } else { - const backend = process.env.ALINEA_CLOUD_URL - ? createCloudHandler( - currentCMS, - current.value.store, - process.env.ALINEA_API_KEY - ) - : new Handler({ - config: currentCMS, - store: current.value.store, - target: fileData, - media: fileData, - history: new GitHistory(currentCMS, rootDir), - previews: new JWTPreviews('dev') - }) + const backend = createBackend() handle = createLocalServer(context, backend) cms = currentCMS context.liveReload.reload('refresh') + + function createBackend(): Handler { + if (process.env.ALINEA_CLOUD_DEBUG) + return createCloudDebugHandler(currentCMS, db) + if (process.env.ALINEA_CLOUD_URL) + return createCloudHandler(currentCMS, db, process.env.ALINEA_API_KEY) + return new Handler({ + config: currentCMS, + db, + target: fileData, + media: fileData, + drafts, + history: new GitHistory(currentCMS, rootDir), + previews: new JWTPreviews('dev'), + previewAuthToken: 'dev' + }) + } } nextGen = gen.next() const {serve} = await server diff --git a/src/cli/generate/FillCache.ts b/src/cli/generate/FillCache.ts index ce307b561..2ce7fd773 100644 --- a/src/cli/generate/FillCache.ts +++ b/src/cli/generate/FillCache.ts @@ -3,6 +3,7 @@ import {Store} from 'alinea/backend/Store' import {Emitter, createEmitter} from 'alinea/cli/util/Emitter' import {Config} from 'alinea/core' import pLimit from 'p-limit' +import {getCommitSha} from '../util/CommitSha.js' import {createWatcher} from '../util/Watcher.js' import {GenerateContext} from './GenerateContext.js' import {LocalData} from './LocalData.js' @@ -13,16 +14,21 @@ export async function* fillCache( store: Store, config: Config, until: Promise -) { - const db = new Database(store, config) +): AsyncGenerator { + const db = new Database(config, store) const limit = pLimit(1) - const cache = () => db.fill(localData, localData) + const commitSha = getCommitSha() + + const cache = async () => { + await db.fill(localData, commitSha ?? '', localData) + return db + } yield limit(cache) if (!watch || !localData.watchFiles) return - const results = createEmitter>() + const results = createEmitter>() const stopWatching = await createWatcher({ watchFiles: localData.watchFiles.bind(localData), async onChange() { @@ -34,7 +40,7 @@ export async function* fillCache( }) try { - yield* results + for await (const result of results) yield result } catch (e) { if (e === Emitter.CANCELLED) return throw e diff --git a/src/cli/generate/LocalData.ts b/src/cli/generate/LocalData.ts index 478f5d670..dd1b6d10d 100644 --- a/src/cli/generate/LocalData.ts +++ b/src/cli/generate/LocalData.ts @@ -43,8 +43,6 @@ async function filesOfPath(fs: FS, dir: string): Promise { } export class LocalData implements Source, Target, Media { - canRename = true - constructor(public options: LocalDataOptions) {} async watchFiles() { @@ -161,6 +159,7 @@ export class LocalData implements Source, Target, Media { } } } + return {commitHash: createId()} } isInMediaLocation(file: string): boolean { @@ -197,7 +196,10 @@ export class LocalData implements Source, Target, Media { } } - async delete({location, workspace}: Connection.DeleteParams): Promise { + async deleteUpload({ + location, + workspace + }: Connection.DeleteParams): Promise { const {fs, rootDir = '.'} = this.options const mediaDir = Workspace.data( this.options.config.workspaces[workspace] diff --git a/src/cli/serve/CreateLocalServer.ts b/src/cli/serve/CreateLocalServer.ts index f17ef5fa3..3306afc25 100644 --- a/src/cli/serve/CreateLocalServer.ts +++ b/src/cli/serve/CreateLocalServer.ts @@ -1,6 +1,7 @@ import {ReadableStream, Request, Response, TextEncoderStream} from '@alinea/iso' import {Handler} from 'alinea/backend' -import {HttpHandler, router} from 'alinea/backend/router/Router' +import {HttpRouter, router} from 'alinea/backend/router/Router' +import {cloudUrl} from 'alinea/cloud/server/CloudConfig' import {Trigger, trigger} from 'alinea/core' import esbuild, {BuildOptions, BuildResult, OutputFile} from 'esbuild' import fs from 'node:fs' @@ -66,7 +67,7 @@ export function createLocalServer( liveReload }: ServeContext, handler: Handler -): HttpHandler { +): HttpRouter { const devDir = path.join(staticDir, 'dev') const matcher = router.matcher() const entry = `alinea/cli/static/dashboard/dev` @@ -74,9 +75,6 @@ export function createLocalServer( const tsconfig = fs.existsSync(altConfig) ? altConfig : undefined let currentBuild: Trigger = trigger(), initial = true - const cloudUrl = process.env.ALINEA_CLOUD_URL - ? `'${process.env.ALINEA_CLOUD_URL}'` - : 'undefined' const config = { external: [ 'next/navigation', @@ -101,11 +99,10 @@ export function createLocalServer( ...buildOptions, plugins: buildOptions?.plugins || [], define: { - 'process.env.NODE_ENV': - production || process.env.ALINEA_CLOUD_URL - ? "'production'" - : "'development'", - 'process.env.ALINEA_CLOUD_URL': cloudUrl, + 'process.env.NODE_ENV': production ? "'production'" : "'development'", + 'process.env.ALINEA_CLOUD_URL': cloudUrl + ? JSON.stringify(cloudUrl) + : 'undefined', ...publicDefines(process.env) }, logOverride: { @@ -210,11 +207,7 @@ export function createLocalServer( } ) }), - matcher - .all('/hub/*') - .map(async ({request}): Promise => { - return handler.handle(request) - }), + handler.router, serveBrowserBuild, matcher.get('/config.css').map((): Response => { return new Response('', {headers: {'content-type': 'text/css'}}) diff --git a/src/cli/serve/MemoryDrafts.ts b/src/cli/serve/MemoryDrafts.ts new file mode 100644 index 000000000..40a530a5f --- /dev/null +++ b/src/cli/serve/MemoryDrafts.ts @@ -0,0 +1,14 @@ +import {Drafts} from 'alinea/backend/Drafts' +import {Draft} from 'alinea/core' + +export class MemoryDrafts implements Drafts { + drafts = new Map() + + async getDraft(entryId: string): Promise { + return this.drafts.get(entryId) + } + + async storeDraft(draft: Draft): Promise { + this.drafts.set(draft.entryId, draft) + } +} diff --git a/src/cli/util/CommitSha.ts b/src/cli/util/CommitSha.ts new file mode 100644 index 000000000..cfa007145 --- /dev/null +++ b/src/cli/util/CommitSha.ts @@ -0,0 +1,8 @@ +export function getCommitSha(): string | undefined { + return ( + process.env.GITHUB_SHA || // Github actions + process.env.VERCEL_GIT_COMMIT_SHA || // Vercel + process.env.COMMIT_REF || // Netlify + process.env.CF_PAGES_COMMIT_SHA // Cloudflare pages + ) +} diff --git a/src/cloud/server/CloudAuthServer.ts b/src/cloud/server/CloudAuthServer.ts index c338f2e5b..b018acd94 100644 --- a/src/cloud/server/CloudAuthServer.ts +++ b/src/cloud/server/CloudAuthServer.ts @@ -1,5 +1,5 @@ import {fetch, Request, Response} from '@alinea/iso' -import {Handler, router} from 'alinea/backend/router/Router' +import {Route, router} from 'alinea/backend/router/Router' import {Auth, Config, Connection, HttpError, outcome, User} from 'alinea/core' import {verify} from 'alinea/core/util/JWT' import PLazy from 'p-lazy' @@ -38,7 +38,7 @@ function loadPublicKey(retry = 0): Promise { const COOKIE_NAME = 'alinea.cloud' export class CloudAuthServer implements Auth.Server { - handler: Handler + router: Route context = new WeakMap() dashboardUrl: string @@ -47,7 +47,7 @@ export class CloudAuthServer implements Auth.Server { this.dashboardUrl = config.dashboard?.dashboardUrl! const matcher = router.startAt(Connection.routes.base) - this.handler = router( + this.router = router( // We start by asking our backend whether we have: // - a logged in user => return the user so we can create a session // - no user, but a valid api key => we can redirect to cloud login @@ -182,8 +182,9 @@ export class CloudAuthServer implements Auth.Server { }) }), - router - .use(async (request: Request) => { + matcher + .all(Connection.routes.base + '/*') + .map(async ({request}) => { try { const {user} = await this.contextFor(request) } catch (error) { diff --git a/src/cloud/server/CloudConfig.ts b/src/cloud/server/CloudConfig.ts index 96f28a857..02f4dc274 100644 --- a/src/cloud/server/CloudConfig.ts +++ b/src/cloud/server/CloudConfig.ts @@ -1,16 +1,27 @@ -const baseUrl = - (typeof process !== 'undefined' && process.env?.ALINEA_CLOUD_URL) || - 'https://www.alinea.cloud' +function createCloudConfig(baseUrl: string) { + return { + url: baseUrl, + jwks: `${baseUrl}/.well-known/jwks.json`, + setup: `${baseUrl}/setup`, + auth: `${baseUrl}/auth`, + handshake: `${baseUrl}/api/v1/handshake`, + mutate: `${baseUrl}/api/v1/mutate`, + upload: `${baseUrl}/api/v1/upload`, + media: `${baseUrl}/api/v1/media`, + logout: `${baseUrl}/api/v1/logout`, + history: `${baseUrl}/api/v1/history`, + pending: `${baseUrl}/api/v1/pending`, + drafts: `${baseUrl}/api/v1/draft` + } +} -export const cloudConfig = { - url: baseUrl, - jwks: `${baseUrl}/.well-known/jwks.json`, - setup: `${baseUrl}/setup`, - auth: `${baseUrl}/auth`, - handshake: `${baseUrl}/api/v1/handshake`, - mutate: `${baseUrl}/api/v1/mutate`, - upload: `${baseUrl}/api/v1/upload`, - media: `${baseUrl}/api/v1/media`, - logout: `${baseUrl}/api/v1/logout`, - history: `${baseUrl}/api/v1/history` +function createCloudUrl() { + if (typeof process !== 'undefined') { + if (process.env.ALINEA_CLOUD_URL) return process.env.ALINEA_CLOUD_URL + if (process.env.ALINEA_CLOUD_DEBUG) return '' + } + return 'https://www.alinea.cloud' } + +export const cloudUrl = createCloudUrl() +export const cloudConfig = createCloudConfig(cloudUrl) diff --git a/src/cloud/server/CloudDebugHandler.ts b/src/cloud/server/CloudDebugHandler.ts new file mode 100644 index 000000000..7f9c8aafb --- /dev/null +++ b/src/cloud/server/CloudDebugHandler.ts @@ -0,0 +1,99 @@ +import {Database, Handler, JWTPreviews, Media, Target} from 'alinea/backend' +import {Drafts} from 'alinea/backend/Drafts' +import {History, Revision} from 'alinea/backend/History' +import {Pending} from 'alinea/backend/Pending' +import {Config, Connection, Draft, createId} from 'alinea/core' +import {EntryRecord} from 'alinea/core/EntryRecord' +import {Mutation} from 'alinea/core/Mutation' + +const latency = 0 + +const lag = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)) + +export class DebugCloud implements Media, Target, History, Drafts, Pending { + drafts = new Map() + pending: Array = [] + + constructor(public config: Config, public db: Database) {} + + async mutate(params: Connection.MutateParams) { + await lag(latency) + const mutations = params.mutations.flatMap(mutate => mutate.meta) + for (const mutation of params.mutations) { + console.log( + `> cloud: mutate ${mutation.meta.type} - ${mutation.meta.entryId}` + ) + } + const toCommitHash = createId() + await this.db.applyMutations(mutations, toCommitHash) + this.pending.push({...params, toCommitHash}) + console.log(`> cloud: current ${toCommitHash}`) + return {commitHash: toCommitHash} + } + + prepareUpload(file: string): Promise { + throw new Error(`Not implemented`) + } + + async deleteUpload({ + location, + workspace + }: Connection.DeleteParams): Promise { + await lag(latency) + console.log(`> cloud: delete`, location, workspace) + } + + async revisions(file: string): Promise> { + await lag(latency) + return [] + } + + async revisionData(file: string, revision: string): Promise { + await lag(latency) + throw new Error(`Not implemented`) + } + + async getDraft(entryId: string): Promise { + await lag(latency) + return this.drafts.get(entryId) + } + + async storeDraft(draft: Draft): Promise { + await lag(latency) + console.log(`> cloud: store draft ${draft.entryId}`) + this.drafts.set(draft.entryId, draft) + } + + async pendingSince( + commitHash: string + ): Promise<{toCommitHash: string; mutations: Array} | undefined> { + await lag(latency) + console.log(`> cloud: pending since ${commitHash}`) + let i = this.pending.length + for (; i >= 0; i--) + if (i > 0 && this.pending[i - 1].toCommitHash === commitHash) break + const pending = this.pending.slice(i) + if (pending.length === 0) return undefined + return { + toCommitHash: pending[pending.length - 1].toCommitHash, + mutations: pending.flatMap(params => + params.mutations.flatMap(mutate => mutate.meta) + ) + } + } +} + +export function createCloudDebugHandler(config: Config, db: Database) { + const api = new DebugCloud(config, db) + return new Handler({ + db, + config, + target: api, + media: api, + history: api, + drafts: api, + pending: api, + previews: new JWTPreviews('dev'), + previewAuthToken: 'dev' + }) +} diff --git a/src/cloud/server/CloudHandler.ts b/src/cloud/server/CloudHandler.ts index e0ff7632e..5fd6ad1fe 100644 --- a/src/cloud/server/CloudHandler.ts +++ b/src/cloud/server/CloudHandler.ts @@ -1,9 +1,13 @@ -import {Handler, JWTPreviews, Media, Target} from 'alinea/backend' +import {Database, Handler, JWTPreviews, Media, Target} from 'alinea/backend' +import {Drafts} from 'alinea/backend/Drafts' import {History, Revision} from 'alinea/backend/History' -import {Store} from 'alinea/backend/Store' -import {Config, Connection, HttpError, Workspace} from 'alinea/core' +import {Pending} from 'alinea/backend/Pending' +import {ChangeSet} from 'alinea/backend/data/ChangeSet' +import {Config, Connection, Draft, HttpError, Workspace} from 'alinea/core' import {EntryRecord} from 'alinea/core/EntryRecord' +import {Mutation} from 'alinea/core/Mutation' import {Outcome, OutcomeJSON} from 'alinea/core/Outcome' +import {base64} from 'alinea/core/util/Encoding' import {join} from 'alinea/core/util/Paths' import {CloudAuthServer} from './CloudAuthServer.js' import {cloudConfig} from './CloudConfig.js' @@ -38,24 +42,24 @@ function asJson(init: RequestInit = {}) { } } -export class CloudApi implements Media, Target, History { - canRename = false - +export class CloudApi implements Media, Target, History, Pending, Drafts { constructor(private config: Config) {} - mutate({mutations}: Connection.MutateParams, ctx: Connection.Context) { + mutate(params: Connection.MutateParams, ctx: Connection.Context) { return fetch( cloudConfig.mutate, withAuth( ctx, asJson({ method: 'POST', - body: JSON.stringify({mutations}) + body: JSON.stringify(params) }) ) ) .then(failOnHttpError) - .then(json) + .then>(json) + .then>(Outcome.fromJSON) + .then(Outcome.unpack) } prepareUpload( @@ -78,7 +82,7 @@ export class CloudApi implements Media, Target, History { .then(Outcome.unpack) } - delete( + deleteUpload( {location, workspace}: Connection.DeleteParams, ctx: Connection.Context ): Promise { @@ -118,21 +122,92 @@ export class CloudApi implements Media, Target, History { .then>(Outcome.fromJSON) .then(Outcome.unpack) } + + pendingSince( + commitHash: string, + ctx: Connection.Context + ): Promise<{toCommitHash: string; mutations: Array} | undefined> { + return fetch( + cloudConfig.pending + '?' + new URLSearchParams({since: commitHash}), + withAuth(ctx) + ) + .then(failOnHttpError) + .then>>( + json + ) + .then>>( + Outcome.fromJSON + ) + .then(Outcome.unpack) + .then(pending => { + if (pending.length === 0) return undefined + return { + toCommitHash: pending[pending.length - 1].commitHashTo, + mutations: pending.flatMap(mutate => + mutate.mutations.flatMap(m => m.meta) + ) + } + }) + } + + storeDraft(draft: Draft, ctx: Connection.Context): Promise { + const body = { + fileHash: draft.fileHash, + update: base64.stringify(draft.draft) + } + return fetch( + cloudConfig.drafts + '/' + draft.entryId, + withAuth( + ctx, + asJson({ + method: 'PUT', + body: JSON.stringify(body) + }) + ) + ) + .then(failOnHttpError) + .then(() => undefined) + } + + getDraft( + entryId: string, + ctx: Connection.Context + ): Promise { + return fetch(cloudConfig.drafts + '/' + entryId, withAuth(ctx)) + .then(failOnHttpError) + .then>(json) + .then>( + Outcome.fromJSON + ) + .then(Outcome.unpack) + .then(data => { + return data + ? { + entryId, + fileHash: data.fileHash, + draft: base64.parse(data.update) + } + : undefined + }) + } } export function createCloudHandler( config: Config, - store: Store, + db: Database, apiKey: string | undefined ) { const api = new CloudApi(config) return new Handler({ auth: new CloudAuthServer({config, apiKey}), - store, + db, config, target: api, media: api, history: api, - previews: new JWTPreviews(apiKey!) + pending: api, + drafts: api, + previews: new JWTPreviews(apiKey!), + previewAuthToken: apiKey! }) } diff --git a/src/core.ts b/src/core.ts index fcc2211da..897853f30 100644 --- a/src/core.ts +++ b/src/core.ts @@ -3,6 +3,7 @@ export * from './core/CMS.js' export * from './core/Config.js' export * from './core/Connection.js' export * from './core/Doc.js' +export * from './core/Draft.js' export * from './core/Entry.js' export * from './core/EntryRow.js' export * from './core/Field.js' @@ -11,6 +12,7 @@ export * from './core/Hint.js' export * from './core/HttpError.js' export * from './core/Id.js' export * from './core/Infer.js' +export * from './core/Resolver.js' export * from './core/Label.js' export * from './core/Meta.js' export * from './core/Outcome.js' diff --git a/src/core/Auth.ts b/src/core/Auth.ts index 5f7fdcbeb..9896f4f10 100644 --- a/src/core/Auth.ts +++ b/src/core/Auth.ts @@ -1,11 +1,11 @@ -import type {Handler} from 'alinea/backend/router/Router' +import type {Route} from 'alinea/backend/router/Router' import type {ComponentType} from 'react' import {Connection} from './Connection.js' import {Session} from './Session.js' export namespace Auth { export type Server = { - handler: Handler + router?: Route contextFor(request: Request): Promise } export type ViewProps = {setSession: (session: Session | undefined) => void} @@ -15,9 +15,6 @@ export namespace Auth { return { async contextFor() { return {} - }, - handler() { - return undefined } } } diff --git a/src/core/CMS.ts b/src/core/CMS.ts index 9678f9dec..8e9cab3ee 100644 --- a/src/core/CMS.ts +++ b/src/core/CMS.ts @@ -1,28 +1,30 @@ import {Store} from 'alinea/backend/Store' import {CloudAuthView} from 'alinea/cloud/view/CloudAuth' +import {Resolver} from 'alinea/core' import {MediaFile, MediaLibrary} from 'alinea/core/media/MediaSchema' import {Config, DashboardConfig} from './Config.js' -import {Connection} from './Connection.js' import {GraphRealm, GraphRealmApi} from './Graph.js' import {Root} from './Root.js' import {Schema} from './Schema.js' import {Workspace} from './Workspace.js' +import {Realm} from './pages/Realm.js' import {entries} from './util/Objects.js' type Attachment = Workspace | Root const attached = new WeakMap() export interface CMSApi extends GraphRealmApi { - connection(): Promise + resolver(): Promise } export abstract class CMS extends GraphRealm implements Config, CMSApi { schema: Schema dashboard: DashboardConfig + drafts: GraphRealmApi constructor(config: Config) { super(config, async params => { - const cnx = await this.connection() + const cnx = await this.resolver() return cnx.resolve(params) }) this.schema = { @@ -34,10 +36,17 @@ export abstract class CMS extends GraphRealm implements Config, CMSApi { auth: CloudAuthView, ...(config.dashboard as DashboardConfig) } + this.drafts = new GraphRealm(this, async params => { + const {resolve} = await this.resolver() + return resolve({ + ...params, + realm: Realm.PreferDraft + }) + }) this.#attach(config) } - abstract connection(): Promise + abstract resolver(): Promise abstract exportStore(cwd: string, store: Uint8Array): Promise abstract readStore(): Promise diff --git a/src/core/Client.ts b/src/core/Client.ts index 3ff992032..2423c828e 100644 --- a/src/core/Client.ts +++ b/src/core/Client.ts @@ -1,11 +1,17 @@ import {AbortController, fetch, Response} from '@alinea/iso' -import {AlineaMeta} from 'alinea/backend/db/AlineaMeta' +import {DraftTransport} from 'alinea/backend/Drafts' import {Revision} from 'alinea/backend/History' -import {Config, Connection, EntryPhase, HttpError} from 'alinea/core' -import {UpdateResponse} from './Connection.js' +import { + Config, + Connection, + Draft, + HttpError, + ResolveDefaults +} from 'alinea/core' +import {SyncResponse} from './Connection.js' import {EntryRecord} from './EntryRecord.js' import {Mutation} from './Mutation.js' -import {Realm} from './pages/Realm.js' +import {base64} from './util/Encoding.js' async function failOnHttpError( res: Response, @@ -23,14 +29,7 @@ export interface ClientOptions { url: string applyAuth?: AuthenticateRequest unauthorized?: () => void - resolveDefaults?: { - realm?: Realm - preview?: { - entryId: string - phase: EntryPhase - update: string - } - } + resolveDefaults?: ResolveDefaults } export class Client implements Connection { @@ -42,6 +41,13 @@ export class Client implements Connection { ) } + prepareUpload(file: string): Promise { + return this.requestJson(Connection.routes.prepareUpload(), { + method: 'POST', + body: JSON.stringify({filename: file}) + }).then(failOnHttpError) + } + resolve(params: Connection.ResolveParams): Promise { const {resolveDefaults} = this.options const body = JSON.stringify({...resolveDefaults, ...params}) @@ -51,17 +57,18 @@ export class Client implements Connection { }).then(failOnHttpError) } - mutate(mutations: Array): Promise { + mutate(mutations: Array): Promise<{commitHash: string}> { return this.requestJson(Connection.routes.mutate(), { method: 'POST', body: JSON.stringify(mutations) - }).then(res => failOnHttpError(res, false)) + }).then<{commitHash: string}>(failOnHttpError) } authenticate(applyAuth: AuthenticateRequest, unauthorized: () => void) { return new Client({...this.options, applyAuth, unauthorized}) } + // History revisions(file: string): Promise> { const params = new URLSearchParams({file}) return this.requestJson( @@ -76,26 +83,40 @@ export class Client implements Connection { ).then(failOnHttpError) } - updates(request: AlineaMeta): Promise { - const params = new URLSearchParams() - params.append('contentHash', request.contentHash) - params.append('modifiedAt', String(request.modifiedAt)) + // Syncable + + syncRequired(contentHash: string): Promise { + const params = new URLSearchParams({contentHash}) return this.requestJson( - Connection.routes.updates() + '?' + params.toString() - ).then(failOnHttpError) + Connection.routes.sync() + '?' + params.toString() + ).then(failOnHttpError) } - versionIds(): Promise> { - return this.requestJson(Connection.routes.versionIds()).then>( - failOnHttpError - ) + sync(contentHashes: Array): Promise { + return this.requestJson(Connection.routes.sync(), { + method: 'POST', + body: JSON.stringify(contentHashes) + }).then(failOnHttpError) } - prepareUpload(file: string): Promise { - return this.requestJson(Connection.routes.prepareUpload(), { + // Drafts + + getDraft(entryId: string): Promise { + const params = new URLSearchParams({entryId}) + return this.requestJson(Connection.routes.draft() + '?' + params.toString()) + .then(failOnHttpError) + .then(draft => + draft + ? {...draft, draft: new Uint8Array(base64.parse(draft.draft))} + : undefined + ) + } + + storeDraft(draft: Draft): Promise { + return this.requestJson(Connection.routes.draft(), { method: 'POST', - body: JSON.stringify({filename: file}) - }).then(failOnHttpError) + body: JSON.stringify({...draft, draft: base64.stringify(draft.draft)}) + }).then(res => failOnHttpError(res, false)) } protected request(endpoint: string, init?: RequestInit): Promise { diff --git a/src/core/Connection.ts b/src/core/Connection.ts index a01a1bf5c..48281c0f4 100644 --- a/src/core/Connection.ts +++ b/src/core/Connection.ts @@ -1,31 +1,33 @@ +import {Drafts} from 'alinea/backend/Drafts' import {History, Revision} from 'alinea/backend/History' -import {ResolveDefaults} from 'alinea/backend/Resolver' import {ChangeSet} from 'alinea/backend/data/ChangeSet' -import {AlineaMeta} from 'alinea/backend/db/AlineaMeta' +import {Draft} from './Draft.js' import {EntryRecord} from './EntryRecord.js' import {EntryRow} from './EntryRow.js' import {Mutation} from './Mutation.js' +import {ResolveDefaults, Resolver} from './Resolver.js' import {User} from './User.js' import {Selection} from './pages/Selection.js' import {Logger} from './util/Logger.js' -export interface UpdateResponse { - contentHash: string - entries: Array +export interface SyncResponse { + insert: Array + remove: Array } export interface Syncable { - updates(request: AlineaMeta): Promise - versionIds(): Promise> + syncRequired(contentHash: string): Promise + sync(contentHashes: Array): Promise } -export interface Connection extends Syncable, History { +export interface Connection extends Resolver, Syncable, History, Drafts { previewToken(): Promise - resolve(params: Connection.ResolveParams): Promise - mutate(mutations: Array): Promise + mutate(mutations: Array): Promise<{commitHash: string}> prepareUpload(file: string): Promise revisions(file: string): Promise> revisionData(file: string, revisionId: string): Promise + getDraft(entryId: string): Promise + storeDraft(draft: Draft): Promise } export namespace Connection { @@ -73,7 +75,8 @@ export namespace Connection { export interface RevisionsParams { file: string } - export type MutateParams = { + export interface MutateParams { + commitHash: string mutations: ChangeSet } export interface AuthContext { @@ -99,11 +102,11 @@ export namespace Connection { revisions() { return base + `/revisions` }, - updates() { - return base + `/updates` + sync() { + return base + `/sync` }, - versionIds() { - return base + `/versionIds` + draft() { + return base + `/draft` }, media() { return base + `/media` diff --git a/src/core/Doc.ts b/src/core/Doc.ts index 7f89ddf4c..152381244 100644 --- a/src/core/Doc.ts +++ b/src/core/Doc.ts @@ -8,21 +8,13 @@ export const ROOT_KEY = '#root' export function createYDoc(type: Type, entry: EntryRow | null) { const doc = new Y.Doc({gc: false}) - const clientID = doc.clientID - doc.clientID = 1 - doc.transact(() => { - const docRoot = doc.getMap(ROOT_KEY) - if (!entry) return - for (const [key, field] of entries(type)) { - const contents = entry.data[key] - docRoot.set(key, Field.shape(field).toY(contents)) - } - }) - doc.clientID = clientID + if (entry) applyEntryData(doc, type, entry) return doc } export function applyEntryData(doc: Y.Doc, type: Type, entry: EntryRow) { + const clientID = doc.clientID + doc.clientID = 1 doc.transact(() => { const docRoot = doc.getMap(ROOT_KEY) for (const [key, field] of entries(type)) { @@ -31,6 +23,7 @@ export function applyEntryData(doc: Y.Doc, type: Type, entry: EntryRow) { docRoot.set(key, Field.shape(field).toY(contents)) } }) + doc.clientID = clientID } export function parseYDoc(type: Type, doc: Y.Doc) { diff --git a/src/core/Draft.ts b/src/core/Draft.ts new file mode 100644 index 000000000..9fbbd99ad --- /dev/null +++ b/src/core/Draft.ts @@ -0,0 +1,5 @@ +export interface Draft { + entryId: string + fileHash: string + draft: Uint8Array +} diff --git a/src/core/Edits.test.ts b/src/core/Edits.test.ts new file mode 100644 index 000000000..e69de29bb diff --git a/src/core/EntryFilenames.ts b/src/core/EntryFilenames.ts index 52e578d2c..b679a24c2 100644 --- a/src/core/EntryFilenames.ts +++ b/src/core/EntryFilenames.ts @@ -1,12 +1,11 @@ import {JsonLoader} from 'alinea/backend' import {Config} from './Config.js' -import {EntryPhase, EntryRow} from './EntryRow.js' +import {ALT_STATUS, EntryPhase, EntryRow} from './EntryRow.js' +import {EntryUrlMeta, Type} from './Type.js' import {Workspace} from './Workspace.js' import {values} from './util/Objects.js' import {join} from './util/Paths.js' -const ALT_STATUS = [EntryPhase.Draft, EntryPhase.Archived] - export function workspaceMediaDir(config: Config, workspace: string): string { return Workspace.data(config.workspaces[workspace])?.mediaDir ?? '' } @@ -107,3 +106,19 @@ export function entryFile(config: Config, entry: EntryRow) { if (!root) throw new Error(`Root "${entry.root}" does not exist`) return join(contentDir, entry.root, filePath) } + +export function entryUrl(type: Type, meta: EntryUrlMeta) { + const {entryUrl} = Type.meta(type) + if (entryUrl) return entryUrl(meta) + const segments = meta.locale ? [meta.locale] : [] + return ( + '/' + + segments + .concat( + meta.parentPaths + .concat(meta.path) + .filter(segment => segment !== 'index') + ) + .join('/') + ) +} diff --git a/src/core/EntryRecord.ts b/src/core/EntryRecord.ts index 197324ef5..bc4784655 100644 --- a/src/core/EntryRecord.ts +++ b/src/core/EntryRecord.ts @@ -27,8 +27,16 @@ export const EntryRecord = object( } ) -export function createRecord(entry: EntryRow): EntryRecord { - const {path, ...data} = entry.data +interface RequiredEntryFields extends Partial { + entryId: string + type: string + index: string + title: string + data: Record +} + +export function createRecord(entry: RequiredEntryFields): EntryRecord { + const {path, title = entry.title, ...data} = entry.data const meta: EntryMeta = { entryId: entry.entryId, type: entry.type, @@ -38,7 +46,7 @@ export function createRecord(entry: EntryRow): EntryRecord { if (entry.locale && entry.i18nId) meta.i18nId = entry.i18nId if (!entry.parent) meta.root = entry.root return { - title: entry.title, + title, ...data, [META_KEY]: meta } diff --git a/src/core/EntryRow.ts b/src/core/EntryRow.ts index c38c8d741..3ba6d1a74 100644 --- a/src/core/EntryRow.ts +++ b/src/core/EntryRow.ts @@ -7,6 +7,11 @@ export enum EntryPhase { Archived = 'archived' } +export const ALT_STATUS: Array = [ + EntryPhase.Draft, + EntryPhase.Archived +] + export type EntryLinks = {[field: string]: Array} export class EntryTable { @@ -23,7 +28,7 @@ export class EntryTable { level = column.number // Amount of parents filePath = column.string parentDir = column.string - childrenDir = column.string.nullable + childrenDir = column.string index = column.string parent = column.string.nullable @@ -36,8 +41,10 @@ export class EntryTable { get versionId() { return this.entryId.concat('.').concat(this.phase) } + /** @deprecated */ modifiedAt = column.number - contentHash = column.string + rowHash = column.string + fileHash = column.string // Entries from which a new draft can be created are marked as active, // there is only one active entry per entryId @@ -68,8 +75,7 @@ export const EntryRow = table({ }, [table.indexes]() { return { - modifiedAt: index(this.modifiedAt), - contentHash: index(this.contentHash), + rowHash: index(this.rowHash), type: index(this.type), parent: index(this.parent), url: index(this.url), diff --git a/src/core/Field.ts b/src/core/Field.ts index 7265f0f3c..7a7d10ce2 100644 --- a/src/core/Field.ts +++ b/src/core/Field.ts @@ -6,14 +6,15 @@ import {Hint} from './Hint.js' import {Label} from './Label.js' import {Shape} from './Shape.js' import {TextDoc} from './TextDoc.js' -import {ListMutator} from './shape/ListShape.js' +import {ListMutator, ListRow, ListShape} from './shape/ListShape.js' import {RecordMutator, RecordShape} from './shape/RecordShape.js' -import {RichTextMutator} from './shape/RichTextShape.js' -import {UnionMutator, UnionRow} from './shape/UnionShape.js' +import {RichTextMutator, RichTextShape} from './shape/RichTextShape.js' +import {ScalarShape} from './shape/ScalarShape.js' +import {UnionMutator, UnionRow, UnionShape} from './shape/UnionShape.js' export interface FieldOptions { hidden?: boolean - readonly?: boolean + readOnly?: boolean } export interface FieldMeta { @@ -55,23 +56,27 @@ export namespace Field { > { constructor(meta: FieldMeta void, Options>) { super({ - shape: Shape.Scalar(meta.label, meta.initialValue), + shape: new ScalarShape(meta.label, meta.initialValue), ...meta }) } } export class List extends Field< - Array, - ListMutator, + Array, + ListMutator, Options > { constructor( shape: {[key: string]: RecordShape}, - meta: FieldMeta, ListMutator, Options> + meta: FieldMeta< + Array, + ListMutator, + Options + > ) { super({ - shape: Shape.List( + shape: new ListShape( meta.label, shape, meta.initialValue, @@ -92,7 +97,7 @@ export namespace Field { meta: FieldMeta, Options> ) { super({ - shape: Shape.Union( + shape: new UnionShape( meta.label, shapes, meta.initialValue, @@ -126,7 +131,7 @@ export namespace Field { meta: FieldMeta, RichTextMutator, Options> ) { super({ - shape: Shape.RichText(meta.label, shape, meta.initialValue), + shape: new RichTextShape(meta.label, shape, meta.initialValue), ...meta }) } diff --git a/src/core/Mutation.ts b/src/core/Mutation.ts index 297de9716..03e6da385 100644 --- a/src/core/Mutation.ts +++ b/src/core/Mutation.ts @@ -1,4 +1,4 @@ -import {EntryRow} from './EntryRow.js' +import {EntryPhase, EntryRow} from './EntryRow.js' export enum MutationProgress { Finished = 'finished', @@ -40,8 +40,9 @@ export interface EditMutation { type: MutationType.Edit entryId: string file: string - previousFile?: string entry: EntryRow + previousFile?: string + update?: string } export interface CreateMutation { @@ -54,6 +55,7 @@ export interface CreateMutation { export interface PublishMutation { type: MutationType.Publish entryId: string + phase: EntryPhase file: string } diff --git a/src/core/Resolver.ts b/src/core/Resolver.ts new file mode 100644 index 000000000..c10f90027 --- /dev/null +++ b/src/core/Resolver.ts @@ -0,0 +1,18 @@ +import {Connection} from './Connection.js' +import {EntryPhase} from './EntryRow.js' +import {Realm} from './pages/Realm.js' + +export interface PreviewUpdate { + entryId: string + phase: EntryPhase + update: string +} + +export interface ResolveDefaults { + realm?: Realm + preview?: PreviewUpdate +} + +export interface Resolver { + resolve(params: Connection.ResolveParams): Promise +} diff --git a/src/core/Shape.ts b/src/core/Shape.ts index e4341af96..078a2db4b 100644 --- a/src/core/Shape.ts +++ b/src/core/Shape.ts @@ -1,15 +1,9 @@ import {LinkResolver} from 'alinea/backend/resolver/LinkResolver' import * as Y from 'yjs' import {Label} from './Label.js' -import {TextDoc} from './TextDoc.js' -import {PostProcess} from './pages/PostProcess.js' -import {ListShape} from './shape/ListShape.js' import {RecordShape} from './shape/RecordShape.js' -import {RichTextShape} from './shape/RichTextShape.js' -import {ScalarShape} from './shape/ScalarShape.js' -import {UnionRow, UnionShape} from './shape/UnionShape.js' -type YType = Y.AbstractType +type YType = Y.Doc | Y.Map export interface ShapeInfo { name: string @@ -24,44 +18,9 @@ export interface Shape { typeOfChild(yValue: any, child: string): Shape toY(value: Value): any fromY(yValue: any): Value - watch(parent: YType, key: string): (fun: () => void) => void - mutator(parent: Y.Doc | YType, key: string): OnChange + applyY(value: Value, parent: YType, key: string): void + watch(parent: YType, key: string): (fun: () => void) => () => void + mutator(parent: YType, key: string, readOnly: boolean): OnChange toString(): string applyLinks(value: Value, loader: LinkResolver): Promise } - -export namespace Shape { - export function Scalar(label: Label, initialValue?: T) { - return new ScalarShape(label, initialValue) - } - export function RichText( - label: Label, - shapes?: Record>, - initialValue?: TextDoc - ) { - return new RichTextShape(label, shapes, initialValue) - } - export function List( - label: Label, - shapes: Record>, - initialValue?: Array, - postProcess?: PostProcess> - ) { - return new ListShape(label, shapes, initialValue, postProcess) - } - export function Record( - label: Label, - shape: Record>, - initialValue?: T - ): RecordShape { - return new RecordShape(label, shape, initialValue) - } - export function Union( - label: Label, - shapes: Record, - initialValue?: UnionRow & T, - postProcess?: PostProcess - ): UnionShape { - return new UnionShape(label, shapes, initialValue, postProcess) - } -} diff --git a/src/core/Type.ts b/src/core/Type.ts index 7da2295a8..a6c4c81c3 100644 --- a/src/core/Type.ts +++ b/src/core/Type.ts @@ -10,7 +10,6 @@ import {createId} from './Id.js' import {Label} from './Label.js' import {Meta, StripMeta} from './Meta.js' import {Section, section} from './Section.js' -import {Shape} from './Shape.js' import type {View} from './View.js' import {RecordShape} from './shape/RecordShape.js' import { @@ -156,7 +155,7 @@ class TypeInstance implements TypeData { constructor(public label: Label, public definition: Definition) { this.meta = this.definition[Meta] || {} - this.shape = Shape.Record( + this.shape = new RecordShape( label, fromEntries( fieldsOfDefinition(definition).map(([key, field]) => { diff --git a/src/core/driver/DefaultDriver.server.tsx b/src/core/driver/DefaultDriver.server.tsx index a4f24f392..2143002e0 100644 --- a/src/core/driver/DefaultDriver.server.tsx +++ b/src/core/driver/DefaultDriver.server.tsx @@ -1,16 +1,19 @@ -import {Server} from 'alinea/backend' +import {Database} from 'alinea/backend' import {Store, createStore} from 'alinea/backend/Store' +import {EntryResolver} from 'alinea/backend/resolver/EntryResolver' import {exportStore} from 'alinea/cli/util/ExportStore' import {base64} from 'alinea/core/util/Encoding' +import PLazy from 'p-lazy' import {CMS, CMSApi} from '../CMS.js' import {Client} from '../Client.js' import {Config} from '../Config.js' -import {Connection} from '../Connection.js' +import {Resolver} from '../Resolver.js' import {Realm} from '../pages/Realm.js' -import {Logger} from '../util/Logger.js' import {join} from '../util/Paths.js' export class DefaultDriver extends CMS { + db = PLazy.from(this.createDb.bind(this)) + exportStore(outDir: string, data: Uint8Array): Promise { return exportStore(data, join(outDir, 'store.js')) } @@ -21,7 +24,7 @@ export class DefaultDriver extends CMS { return createStore(new Uint8Array(base64.parse(storeData))) } - async connection(): Promise { + async resolver(): Promise { const devUrl = process.env.ALINEA_DEV_SERVER if (devUrl) return new Client({ @@ -31,17 +34,11 @@ export class DefaultDriver extends CMS { realm: Realm.Published } }) - const store = await this.readStore() - return new Server( - { - config: this.config, - store, - media: undefined!, - target: undefined!, - previews: undefined! - }, - {logger: new Logger('CMSDriver')} - ) + return new EntryResolver(await this.db, this.config.schema) + } + + private async createDb() { + return new Database(this.config, await this.readStore()) } } diff --git a/src/core/driver/DefaultDriver.tsx b/src/core/driver/DefaultDriver.tsx index ca751df60..036a7e290 100644 --- a/src/core/driver/DefaultDriver.tsx +++ b/src/core/driver/DefaultDriver.tsx @@ -1,7 +1,7 @@ import {Store} from 'alinea/backend/Store' import {CMS, CMSApi} from '../CMS.js' import {Config} from '../Config.js' -import {Connection} from '../Connection.js' +import {Resolver} from '../Resolver.js' export class DefaultDriver extends CMS { exportStore(outDir: string, data: Uint8Array): Promise { @@ -12,7 +12,7 @@ export class DefaultDriver extends CMS { throw new Error('Not implemented') } - async connection(): Promise { + async resolver(): Promise { throw new Error('Not implemented') } } diff --git a/src/core/driver/NextDriver.server.tsx b/src/core/driver/NextDriver.server.tsx index c07434d9f..9e513928a 100644 --- a/src/core/driver/NextDriver.server.tsx +++ b/src/core/driver/NextDriver.server.tsx @@ -1,4 +1,4 @@ -import {JWTPreviews, Media, Server, Target} from 'alinea/backend' +import {JWTPreviews} from 'alinea/backend' import {createCloudHandler} from 'alinea/cloud/server/CloudHandler' import {parseChunkedCookies} from 'alinea/preview/ChunkCookieValue' import { @@ -9,15 +9,14 @@ import { import {enums, object, string} from 'cito' import PLazy from 'p-lazy' import {Suspense, lazy} from 'react' -import {Client, ClientOptions} from '../Client.js' +import {Client} from '../Client.js' import {Config} from '../Config.js' -import {Connection} from '../Connection.js' import {Entry} from '../Entry.js' import {EntryPhase} from '../EntryRow.js' import {outcome} from '../Outcome.js' +import {ResolveDefaults, Resolver} from '../Resolver.js' import {Realm} from '../pages/Realm.js' import {Selection} from '../pages/Selection.js' -import {Logger} from '../util/Logger.js' import {DefaultDriver} from './DefaultDriver.server.js' import {NextApi} from './NextDriver.js' @@ -30,14 +29,13 @@ const SearchParams = object({ class NextDriver extends DefaultDriver implements NextApi { apiKey = process.env.ALINEA_API_KEY jwtSecret = this.apiKey || 'dev' - store = PLazy.from(this.readStore.bind(this)) - async connection(): Promise { + async resolver(): Promise { const {cookies, draftMode} = await import('next/headers.js') const [draftStatus] = outcome(() => draftMode()) const isDraft = draftStatus?.isEnabled const devUrl = process.env.ALINEA_DEV_SERVER - const resolveDefaults: ClientOptions['resolveDefaults'] = { + const resolveDefaults: ResolveDefaults = { realm: Realm.Published } if (isDraft) { @@ -60,36 +58,19 @@ class NextDriver extends DefaultDriver implements NextApi { url: devUrl, resolveDefaults }) - const store = await this.store - return new Server( - { - config: this.config, - store, - get media(): Media { - throw new Error('Cannot access local media') - }, - get target(): Target { - throw new Error('Cannot access local target') - }, - previews: new JWTPreviews(this.jwtSecret), - resolveDefaults - }, - {logger: new Logger('CMSDriver')} - ) + const handler = await this.cloudHandler + return handler.resolver } backendHandler = async (request: Request) => { const handler = await this.cloudHandler - return handler(request) + const response = await handler.router.handle(request) + return response ?? new Response('Not found', {status: 404}) } cloudHandler = PLazy.from(async () => { - const store = await this.store - const handler = createCloudHandler(this, store, this.apiKey) - return async (request: Request) => { - const response = await handler.handle(request) - return response ?? new Response('Not found', {status: 404}) - } + const db = await this.db + return createCloudHandler(this, db, this.apiKey) }) previewHandler = async (request: Request) => { @@ -108,7 +89,7 @@ class NextDriver extends DefaultDriver implements NextApi { cookies().delete(PREVIEW_ENTRYID_NAME) cookies().delete(PREVIEW_PHASE_NAME) } - const cnx = (await this.connection()) as Client + const cnx = (await this.resolver()) as Client const url = (await cnx.resolve({ selection: Selection.create( Entry({entryId: params.entryId}).select(Entry.url).first() diff --git a/src/core/driver/TestDriver.ts b/src/core/driver/TestDriver.ts index 00045918c..6692d5139 100644 --- a/src/core/driver/TestDriver.ts +++ b/src/core/driver/TestDriver.ts @@ -1,40 +1,35 @@ import sqlite from '@alinea/sqlite-wasm' -import {Database, JWTPreviews, Media, Server, Target} from 'alinea/backend' +import {Database, Handler, JWTPreviews} from 'alinea/backend' import {Store} from 'alinea/backend/Store' -import {Connection} from 'alinea/core' -import {DefaultDriver} from 'alinea/core/driver/DefaultDriver' import {connect} from 'rado/driver/sql.js' -import {CMSApi} from '../CMS.js' +import {CMS, CMSApi} from '../CMS.js' import {Config} from '../Config.js' +import {Connection} from '../Connection.js' +import {Resolver} from '../Resolver.js' import {Logger} from '../util/Logger.js' +import {DefaultDriver} from './DefaultDriver.js' export interface TestApi extends CMSApi { - generate(): Promise + db: Promise + connection(): Promise } class TestDriver extends DefaultDriver implements TestApi { store: Promise = sqlite().then(({Database}) => connect(new Database()).toAsync() ) - server = this.store.then(async store => { - const server = new Server( - { - config: this, - store: store, - get target(): Target { - throw new Error('Test driver cannot publish') - }, - get media(): Media { - throw new Error('Test driver has no media backend') - }, - previews: new JWTPreviews('test') - }, - { - logger: new Logger('test') - } - ) - await server.db.fill({async *entries() {}}) - return server + db = this.store.then(async store => { + return new Database(this, store) + }) + handler = this.db.then(async db => { + await db.fill({async *entries() {}}, '') + const handler = new Handler({ + config: this, + db, + previews: new JWTPreviews('test'), + previewAuthToken: 'test' + }) + return handler.connect({logger: new Logger('test')}) }) async readStore(): Promise { @@ -42,19 +37,16 @@ class TestDriver extends DefaultDriver implements TestApi { } async connection(): Promise { - return this.server + return this.handler } - async generate() { - const db = new Database(await this.store, this) - await db.fill({ - async *entries() {} - }) + async resolver(): Promise { + return this.handler } } export function createTestCMS( config: Definition -): Definition & TestApi { +): Definition & TestApi & CMS { return new TestDriver(config) as any } diff --git a/src/core/shape/ListShape.test.ts b/src/core/shape/ListShape.test.ts new file mode 100644 index 000000000..64caec83e --- /dev/null +++ b/src/core/shape/ListShape.test.ts @@ -0,0 +1,96 @@ +import {ListShape} from 'alinea/core' +import {RecordShape} from 'alinea/core/shape/RecordShape' +import {ScalarShape} from 'alinea/core/shape/ScalarShape' +import * as Y from 'alinea/yjs' +import {test} from 'uvu' +import * as assert from 'uvu/assert' + +const ROOT_KEY = '$root' +const FIELD_KEY = '$doc' + +const shape = new ListShape('List', { + Block1: new RecordShape('Block1', { + field1: new ScalarShape('field1'), + blockInner: new RecordShape('Inner block', { + field3: new ScalarShape('field3'), + field4: new ScalarShape('field4') + }) + }), + Block2: new RecordShape('Block2', { + field3: new ScalarShape('field3'), + field4: new ScalarShape('field4') + }) +}) + +const value1 = [ + { + id: 'unique0', + type: 'Block1', + index: 'a0', + field1: 'a', + blockInner: { + field3: 'a', + field4: 'b' + } + }, + { + id: 'unique1', + type: 'Block1', + index: 'a0', + field1: 'a', + blockInner: { + field3: 'a', + field4: 'b' + } + }, + { + id: 'unique2', + type: 'Block2', + index: 'a1', + field3: 'a', + field4: 'b' + } +] + +const value2 = [ + { + id: 'unique1', + type: 'Block1', + index: 'a0', + field1: '00', + blockInner: { + field3: 'a', + field4: 'c' + } + }, + { + id: 'unique3', + type: 'Block1', + index: 'a1', + field1: 'a', + blockInner: { + field3: 'a', + field4: 'b' + } + }, + { + id: 'unique2', + type: 'Block2', + index: 'a2', + field3: 'a11', + field4: 'b' + } +] + +test('apply', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + root.set(FIELD_KEY, shape.toY(value1)) + doc.transact(() => { + shape.applyY(value2, root, FIELD_KEY) + }) + const pass2 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass2, value2) +}) + +test.run() diff --git a/src/core/shape/ListShape.ts b/src/core/shape/ListShape.ts index de77148e6..91186199f 100644 --- a/src/core/shape/ListShape.ts +++ b/src/core/shape/ListShape.ts @@ -7,6 +7,7 @@ import {Shape, ShapeInfo} from '../Shape.js' import {PostProcess} from '../pages/PostProcess.js' import {generateKeyBetween} from '../util/FractionalIndexing.js' import {RecordShape} from './RecordShape.js' +import {ScalarShape} from './ScalarShape.js' export type ListRow = { id: string @@ -42,9 +43,9 @@ export class ListShape return [ key, new RecordShape(label, { - id: Shape.Scalar('Id'), - index: Shape.Scalar('Index'), - type: Shape.Scalar('Type'), + id: new ScalarShape('Id'), + index: new ScalarShape('Index'), + type: new ScalarShape('Type'), ...type.properties }) ] @@ -96,6 +97,42 @@ export class ListShape rows.sort(sort) return rows } + applyY(value: (ListRow & T)[], parent: Y.Map, key: string): void { + if (!Array.isArray(value)) return + const current: Y.Map | undefined = parent.get(key) + if (!current) return void parent.set(key, this.toY(value)) + const currentKeys = new Set(current.keys()) + const valueKeys = new Set(value.map(row => row.id)) + const removed = [...currentKeys].filter(key => !valueKeys.has(key)) + const added = [...valueKeys].filter(key => !currentKeys.has(key)) + const changed = [...valueKeys].filter(key => currentKeys.has(key)) + for (const id of removed) current.delete(id) + for (const id of added) { + const row = value.find(row => row.id === id) + if (!row) continue + const type = row.type + const rowType = this.values[type] + if (!rowType) continue + current.set(id, rowType.toY(row)) + } + for (const id of changed) { + const row = value.find(row => row.id === id) + if (!row) continue + const type = row.type + const currentRow = current.get(id) + if (!currentRow) continue + const currentType = currentRow.get('type') + // This shouldn't normally happen unless we manually change the type + if (currentType !== type) { + current.delete(id) + current.set(id, this.values[type].toY(row)) + continue + } + const rowType = this.values[type] + if (!rowType) continue + rowType.applyY(row, current, id) + } + } watch(parent: Y.Map, key: string) { const record: Y.Map = parent.has(key) ? parent.get(key) @@ -114,9 +151,11 @@ export class ListShape } } } - mutator(parent: Y.Map, key: string) { + mutator(parent: Y.Map, key: string, readOnly: boolean) { const res = { + readOnly, replace: (id: string, row: ListRow & T) => { + if (readOnly) return const record = parent.get(key) const rows: Array = this.fromY(record) as any const index = rows.findIndex(r => r.id === id) @@ -124,6 +163,7 @@ export class ListShape res.push(row, index) }, push: (row: Omit, insertAt?: number) => { + if (readOnly) return const type = row.type const shape = this.values[type] const record = parent.get(key) @@ -142,10 +182,12 @@ export class ListShape record.set(id, item) }, remove(id: string) { + if (readOnly) return const record = parent.get(key) record.delete(id) }, move: (oldIndex: number, newIndex: number) => { + if (readOnly) return const record = parent.get(key) const rows: Array = this.fromY(record) as any const from = rows[oldIndex] diff --git a/src/core/shape/RecordShape.test.ts b/src/core/shape/RecordShape.test.ts new file mode 100644 index 000000000..43dae21ff --- /dev/null +++ b/src/core/shape/RecordShape.test.ts @@ -0,0 +1,46 @@ +import {RecordShape} from 'alinea/core/shape/RecordShape' +import {ScalarShape} from 'alinea/core/shape/ScalarShape' +import * as Y from 'alinea/yjs' +import {test} from 'uvu' +import * as assert from 'uvu/assert' + +const ROOT_KEY = '$root' +const FIELD_KEY = '$doc' + +const shape = new RecordShape('Block1', { + field1: new ScalarShape('field1'), + blockInner: new RecordShape('Inner block', { + field3: new ScalarShape('field3'), + field4: new ScalarShape('field4') + }) +}) + +const value1 = { + field1: 'a', + blockInner: { + field3: 'a', + field4: 'b', + nonsense: 123 + } +} + +const value2 = { + field1: '1', + blockInner: { + field3: '2', + field4: undefined + } +} + +test('apply', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + root.set(FIELD_KEY, shape.toY(value1)) + doc.transact(() => { + shape.applyY(value2, root, FIELD_KEY) + }) + const pass2 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass2, value2) +}) + +test.run() diff --git a/src/core/shape/RecordShape.ts b/src/core/shape/RecordShape.ts index 1e6806620..3297aa409 100644 --- a/src/core/shape/RecordShape.ts +++ b/src/core/shape/RecordShape.ts @@ -2,7 +2,7 @@ import {LinkResolver} from 'alinea/backend/resolver/LinkResolver' import * as Y from 'yjs' import {Label} from '../Label.js' import {Shape} from '../Shape.js' -import {entries} from '../util/Objects.js' +import {entries, keys} from '../util/Objects.js' export type RecordMutator = { set: (k: K, v: T[K]) => void @@ -42,18 +42,27 @@ export class RecordShape implements Shape> { toY(value: T) { const self: Record = value || {} const map = new Y.Map() - for (const key of Object.keys(this.properties)) { + for (const key of keys(this.properties)) { map.set(key, this.properties[key].toY(self[key])) } return map } fromY(map: Y.Map) { const res: Record = {} - for (const key of Object.keys(this.properties)) { + for (const key of keys(this.properties)) { res[key] = this.properties[key].fromY(map?.get(key)) } return res as T } + applyY(value: T, map: Y.Doc | Y.Map, key: string) { + const current: Y.Map | undefined = + 'getMap' in map ? map.getMap(key) : map.get(key) + if (!current) return void (map as Y.Map).set(key, this.toY(value)) + const self: Record = value ?? {} + for (const key of keys(this.properties)) { + this.properties[key].applyY(self[key], current, key) + } + } watch(parent: Y.Map, key: string) { return (fun: () => void) => { const record = !key ? parent : parent.get(key) @@ -61,9 +70,10 @@ export class RecordShape implements Shape> { return () => record.unobserve(fun) } } - mutator(parent: Y.Map, key: string) { + mutator(parent: Y.Map, key: string, readOnly: boolean) { return { set: (k: K, v: T[K]) => { + if (readOnly) return const record = parent.get(key) const field = this.properties[k as string] record.set(k, field.toY(v)) diff --git a/src/core/shape/RichTextShape.test.ts b/src/core/shape/RichTextShape.test.ts index e9f84465e..b77f22d23 100644 --- a/src/core/shape/RichTextShape.test.ts +++ b/src/core/shape/RichTextShape.test.ts @@ -1,57 +1,223 @@ import {RichTextShape} from 'alinea/core' -import {Hint} from 'alinea/core/Hint' import {RecordShape} from 'alinea/core/shape/RecordShape' import {ScalarShape} from 'alinea/core/shape/ScalarShape' import * as Y from 'alinea/yjs' import {test} from 'uvu' import * as assert from 'uvu/assert' -test('serialize', () => { - const type = new RichTextShape('RichText', { - Block1: new RecordShape('Block1', { - field1: new ScalarShape('field1', Hint.String()), - blockInner: new RecordShape('Inner block', { - field3: new ScalarShape('field3', Hint.String()), - field4: new ScalarShape('field4', Hint.String()) - }) - }), - Block2: new RecordShape('Block2', { - field3: new ScalarShape('field3', Hint.String()), - field4: new ScalarShape('field4', Hint.String()) +const ROOT_KEY = '$root' +const FIELD_KEY = '$doc' + +const shape = new RichTextShape('RichText', { + Block1: new RecordShape('Block1', { + field1: new ScalarShape('field1'), + blockInner: new RecordShape('Inner block', { + field3: new ScalarShape('field3'), + field4: new ScalarShape('field4') }) + }), + Block2: new RecordShape('Block2', { + field3: new ScalarShape('field3'), + field4: new ScalarShape('field4') }) +}) - const value = [ - { - type: 'paragraph', - content: [{type: 'text', text: 'Hello'}] - }, - { - id: 'unique1', - type: 'Block1', - field1: 'a', - blockInner: { - field3: 'a', - field4: 'b' - } - }, - { - type: 'paragraph', - content: [{type: 'text', text: 'Hello'}] - }, - { - id: 'unique2', - type: 'Block2', +const value1 = [ + { + type: 'paragraph', + content: [{type: 'text', text: 'Hello'}] + }, + { + id: 'unique0', + type: 'Block2', + field3: 'a', + field4: 'b' + }, + { + id: 'unique1', + type: 'Block1', + field1: 'a', + blockInner: { field3: 'a', field4: 'b' } - ] - const yType = type.toY(value) + }, + { + type: 'paragraph', + content: [{type: 'text', text: 'Hello'}] + }, + { + id: 'unique2', + type: 'Block2', + field3: 'a', + field4: 'b' + } +] + +const value2 = [ + { + type: 'paragraph', + content: [{type: 'text', text: 'Hello 123'}] + }, + { + type: 'paragraph', + content: [{type: 'text', text: 'Hello'}] + }, + { + id: 'unique1', + type: 'Block1', + field1: '1', + blockInner: { + field3: 'a', + field4: 'c' + } + }, + { + id: 'unique2', + type: 'Block2', + field3: 'a', + field4: 'b' + }, + { + id: 'unique3', + type: 'Block2', + field3: 'abc', + field4: 'def' + } +] + +const value3 = [ + { + type: 'paragraph', + textAlign: 'left', + content: [ + { + type: 'text', + text: 'text part 1' + }, + { + type: 'text', + text: 'text part 2', + marks: [ + { + type: 'link', + attrs: { + 'data-id': '2WyS6kjRXyd0vLoZP0p129IPnAA', + 'data-entry': '2Ublmf4UWT5rHeIUSaJmqJYN0L9', + 'data-type': null, + href: null, + title: '' + } + } + ] + } + ] + } +] + +const value4 = [ + { + type: 'paragraph', + textAlign: 'left', + content: [ + { + type: 'text', + text: 'text part 1' + }, + { + type: 'text', + text: 'text part 3', + marks: [ + { + type: 'link', + attrs: { + 'data-id': '2WyS6kjRXyd0vLoZP0p129IPnAA', + 'data-entry': '2Ublmf4UWT5rHeIUSaJmqJYN0L9', + 'data-type': null, + href: null, + title: '' + } + } + ] + } + ] + } +] + +const value5 = [ + { + type: 'paragraph', + textAlign: 'left', + content: [ + { + type: 'text', + text: 'text part 1' + }, + { + type: 'text', + text: 'text part 2', + marks: [ + { + type: 'link', + attrs: { + 'data-id': 'xyz', + 'data-entry': '2Ublmf4UWT5rHeIUSaJmqJYN0L9', + 'data-type': null, + href: null, + title: '' + } + } + ] + } + ] + } +] + +test('serialize', () => { // Changes are not reflected in Y types until mounted in a Y.Doc const doc = new Y.Doc() - doc.getMap('root').set('$doc', yType) - const pass2 = type.fromY(yType) - assert.equal(pass2, value) + const yType = shape.toY(value1) + const root = doc.getMap(ROOT_KEY) + root.set(FIELD_KEY, yType) + const pass2 = shape.fromY(yType) + assert.equal(pass2, value1) +}) + +test('apply over empty', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + shape.applyY(value1, root, FIELD_KEY) + const pass1 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass1, value1) +}) + +test('apply over existing', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + root.set(FIELD_KEY, shape.toY(value1)) + doc.transact(() => { + shape.applyY(value2, root, FIELD_KEY) + }) + const pass2 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass2, value2) +}) + +test.only('update marks', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + /*root.set(FIELD_KEY, shape.toY(value3)) + doc.transact(() => { + shape.applyY(value4, root, FIELD_KEY) + }) + const pass2 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass2, value4)*/ + + root.set(FIELD_KEY, shape.toY(value3)) + doc.transact(() => { + shape.applyY(value5, root, FIELD_KEY) + }) + const pass3 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass3, value5) }) test.run() diff --git a/src/core/shape/RichTextShape.ts b/src/core/shape/RichTextShape.ts index b7114abde..d7fabf5b3 100644 --- a/src/core/shape/RichTextShape.ts +++ b/src/core/shape/RichTextShape.ts @@ -6,8 +6,9 @@ import {Label} from '../Label.js' import {Shape} from '../Shape.js' import {TextDoc, TextNode} from '../TextDoc.js' import type {Expr} from '../pages/Expr.js' -import {entries, fromEntries} from '../util/Objects.js' +import {entries, fromEntries, keys} from '../util/Objects.js' import {RecordShape} from './RecordShape.js' +import {ScalarShape} from './ScalarShape.js' // Adapted from: https://github.com/yjs/y-prosemirror/blob/1c393fb3254cc1ed4933e8326b57c1316793122a/src/lib.js#L245 function serialize( @@ -72,6 +73,7 @@ function unserialize(node: TextNode): Y.XmlText | Y.XmlElement { } export type RichTextMutator = { + readOnly: boolean map: Y.Map fragment: Y.XmlFragment insert: (id: string, block: string) => void @@ -95,25 +97,25 @@ let linkInfoFields = undefined! as { export class RichTextShape implements Shape, RichTextMutator> { - values?: Record + values: Record constructor( public label: Label, public shapes?: Record, public initialValue?: TextDoc ) { - this.values = - shapes && - fromEntries( - entries(shapes).map(([key, value]) => { - return [ - key, - new RecordShape(value.label, { - type: Shape.Scalar('Type'), - ...value.properties - }) - ] - }) - ) + this.values = shapes + ? fromEntries( + entries(shapes).map(([key, value]) => { + return [ + key, + new RecordShape(value.label, { + type: new ScalarShape('Type'), + ...value.properties + }) + ] + }) + ) + : {} } innerTypes(parents: Array) { if (!this.shapes) return [] @@ -134,30 +136,31 @@ export class RichTextShape if (value) return value as unknown as Shape throw new Error(`Type of block "${child}" not found`) } + toXml(rows: TextDoc) { + const types = this.values + return rows + .map(row => { + return row.type in types ? {type: row.type, id: (row as any).id} : row + }) + .map(unserialize) + } toY(value: TextDoc) { const map = new Y.Map() const text = new Y.XmlFragment() map.set('$text', text) - const types = this.values ?? {} + const types = this.values if (!Array.isArray(value)) return map for (const node of value) { const type = types[node.type] if (type && 'id' in node) map.set(node.id, type.toY(node as any)) } - text.insert( - 0, - value - .map(row => { - return types[row.type] ? {type: row.type, id: (row as any).id} : row - }) - .map(unserialize) - ) + text.insert(0, this.toXml(value)) return map } fromY(value: Y.Map): TextDoc { if (!value) return [] const text: Y.XmlFragment = value.get('$text') - const types = this.values || {} + const types = this.values ?? {} const content = text?.toArray()?.map(serialize)?.flat() || [] const isEmpty = content.length === 1 && @@ -176,14 +179,109 @@ export class RichTextShape return node as TextNode }) } + applyY(value: TextDoc, parent: Y.Map, key: string): void { + // Sync blocks + const current: Y.Map | undefined = parent.get(key) + if (!current) return void parent.set(key, this.toY(value)) + const blocks = value.filter( + row => this.values?.[row.type] && 'id' in row + ) as Array> + const currentKeys = new Set( + [...current.keys()].filter(key => key !== '$text') + ) + const valueKeys = new Set(blocks.map(row => row.id)) + const removed = [...currentKeys].filter(key => !valueKeys.has(key)) + const added = [...valueKeys].filter(key => !currentKeys.has(key)) + const changed = [...valueKeys].filter(key => currentKeys.has(key)) + for (const id of removed) current.delete(id) + for (const id of added) { + const row = blocks.find(row => row.id === id) + if (!row) continue + const type = row.type + const rowType = this.values[type] + if (!rowType) continue + current.set(id, rowType.toY(row)) + } + for (const id of changed) { + const row = blocks.find(row => row.id === id) + if (!row) continue + const type = row.type + const currentRow = current.get(id) + if (!currentRow) continue + const currentType = currentRow.get('type') + // This shouldn't normally happen unless we manually change the type + if (currentType !== type) { + current.delete(id) + current.set(id, this.values[type].toY(row)) + continue + } + const rowType = this.values[type] + if (!rowType) continue + rowType.applyY(row, current, id) + } + + // Sync text by simply matching each row. + // Todo: This must be improved by diffing to enable continuous editing + // during deploys without losing all text context + function syncText(source: Y.XmlText, target: TextNode.Text) { + const {text = '', marks = []} = target + const str = Y.Text.prototype.toString.call(source) + if (text === str) { + source.format(0, source.length, unserializeMarks(marks)) + } else { + source.delete(0, source.length) + source.insert(0, text, unserializeMarks(marks)) + } + } + const syncElement = ( + source: Y.XmlElement, + target: TextNode.Element + ) => { + const {type, content, ...attrs} = target + const isBlock = type in this.values + const keysToHandle = isBlock ? ['id'] : keys(attrs) + for (const key of keysToHandle) + source.setAttribute(key, attrs[key] as string) + if (isBlock) return + for (const key of keys(source.getAttributes())) + if (!keysToHandle.includes(key)) source.removeAttribute(key) + syncNodes(source, content ?? []) + } + const syncNodes = (source: Y.XmlElement, value: TextDoc) => { + let i = 0 + for (; i < value.length; i++) { + const row = value[i] + const node = source.get(i) + if (!node) { + source.insert(i, this.toXml([row])) + continue + } + const typeA = node instanceof Y.XmlText ? 'text' : node.nodeName + const typeB = row.type + if (typeA !== typeB) { + source.delete(i) + source.insert(i, this.toXml([row])) + continue + } + if (typeA === 'text') { + syncText(node as Y.XmlText, row as TextNode.Text) + continue + } + syncElement(node as Y.XmlElement, row as TextNode.Element) + } + while (source.length > i) source.delete(i) + } + syncNodes(current.get('$text'), value) + } watch(parent: Y.Map, key: string) { // There's no watching of the fragment involved - return () => {} + return () => () => {} } - mutator(parent: Y.Map, key: string) { + mutator(parent: Y.Map, key: string, readOnly: boolean) { const map = parent.get(key) return { - map: parent.get(key), + readOnly, + map, fragment: map.get('$text'), insert: (id: string, block: string) => { if (!this.values) throw new Error('No types defined') diff --git a/src/core/shape/ScalarShape.test.ts b/src/core/shape/ScalarShape.test.ts new file mode 100644 index 000000000..4ab6cd599 --- /dev/null +++ b/src/core/shape/ScalarShape.test.ts @@ -0,0 +1,26 @@ +import {ScalarShape} from 'alinea/core/shape/ScalarShape' +import * as Y from 'alinea/yjs' +import {test} from 'uvu' +import * as assert from 'uvu/assert' + +const ROOT_KEY = '$root' +const FIELD_KEY = '$doc' + +const shape = new ScalarShape('field') + +const value1 = 'abc' + +const value2 = 132 + +test('apply', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + root.set(FIELD_KEY, shape.toY(value1)) + doc.transact(() => { + shape.applyY(value2, root, FIELD_KEY) + }) + const pass2 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass2, value2) +}) + +test.run() diff --git a/src/core/shape/ScalarShape.ts b/src/core/shape/ScalarShape.ts index d29ff6101..10a6ff252 100644 --- a/src/core/shape/ScalarShape.ts +++ b/src/core/shape/ScalarShape.ts @@ -21,6 +21,10 @@ export class ScalarShape implements Shape> { fromY(yValue: any) { return yValue } + applyY(value: T, parent: Y.Map, key: string): void { + const current = parent.get(key) + if (current !== value) parent.set(key, value) + } watch(parent: Y.Map, key: string) { return (fun: () => void) => { function w(event: Y.YMapEvent) { @@ -30,8 +34,9 @@ export class ScalarShape implements Shape> { return () => parent.unobserve(w) } } - mutator(parent: Y.Map, key: string) { + mutator(parent: Y.Map, key: string, readOnly?: boolean) { return (value: T) => { + if (readOnly) return parent.set(key, value) } } diff --git a/src/core/shape/UnionShape.test.ts b/src/core/shape/UnionShape.test.ts new file mode 100644 index 000000000..ddefe1a5a --- /dev/null +++ b/src/core/shape/UnionShape.test.ts @@ -0,0 +1,56 @@ +import {RecordShape} from 'alinea/core/shape/RecordShape' +import {ScalarShape} from 'alinea/core/shape/ScalarShape' +import * as Y from 'alinea/yjs' +import {test} from 'uvu' +import * as assert from 'uvu/assert' +import {UnionShape} from './UnionShape.js' + +const ROOT_KEY = '$root' +const FIELD_KEY = '$doc' + +const shape = new UnionShape('Blocks', { + Block1: new RecordShape('Block1', { + field1: new ScalarShape('field1'), + blockInner: new RecordShape('Inner block', { + field3: new ScalarShape('field3'), + field4: new ScalarShape('field4') + }) + }), + Block2: new RecordShape('Block2', { + field1: new ScalarShape('field1'), + field5: new ScalarShape('field3'), + field6: new ScalarShape('field4') + }) +}) + +const value1 = { + id: 'unique0', + type: 'Block1', + field1: 'a', + blockInner: { + field3: 'a', + field4: 'b', + nonsense: 123 + } +} + +const value2 = { + id: 'unique1', + type: 'Block2', + field1: '1', + field5: '2', + field6: undefined +} + +test('apply', () => { + const doc = new Y.Doc() + const root = doc.getMap(ROOT_KEY) + root.set(FIELD_KEY, shape.toY(value1)) + doc.transact(() => { + shape.applyY(value2, root, FIELD_KEY) + }) + const pass2 = shape.fromY(root.get(FIELD_KEY)) + assert.equal(pass2, value2) +}) + +test.run() diff --git a/src/core/shape/UnionShape.ts b/src/core/shape/UnionShape.ts index 47f881858..afbce596b 100644 --- a/src/core/shape/UnionShape.ts +++ b/src/core/shape/UnionShape.ts @@ -7,6 +7,7 @@ import {Shape, ShapeInfo} from '../Shape.js' import {PostProcess} from '../pages/PostProcess.js' import {entries, fromEntries} from '../util/Objects.js' import {RecordShape} from './RecordShape.js' +import {ScalarShape} from './ScalarShape.js' export type UnionRow = { id: string @@ -31,8 +32,8 @@ export class UnionShape implements Shape> { return [ key, new RecordShape(label, { - id: Shape.Scalar('Id'), - type: Shape.Scalar('Type'), + id: new ScalarShape('Id'), + type: new ScalarShape('Type'), ...type.properties }) ] @@ -78,6 +79,15 @@ export class UnionShape implements Shape> { if (recordType) return recordType.fromY(map) as UnionRow & T return {} as UnionRow & T } + applyY(value: UnionRow & T, parent: Y.Map, key: string): void { + const current: Y.Map | undefined = parent.get(key) + if (!current || !value) return void parent.set(key, this.toY(value)) + const currentType = current.get('type') + if (currentType !== value.type) return void parent.set(key, this.toY(value)) + const shape = this.shapes[currentType] + if (!shape) return + shape.applyY(value, parent, key) + } watch(parent: Y.Map, key: string) { return (fun: () => void) => { const observe = (event: Y.YMapEvent) => { @@ -87,13 +97,15 @@ export class UnionShape implements Shape> { return () => parent.unobserve(observe) } } - mutator(parent: Y.Map, key: string): UnionMutator { + mutator(parent: Y.Map, key: string, readOnly: boolean): UnionMutator { return { replace: (v: (UnionRow & T) | undefined) => { + if (readOnly) return if (!v) parent.set(key, null) else parent.set(key, this.toY(v)) }, set: (k: any, v: any) => { + if (readOnly) return const record = parent.get(key) const type = record.get('type') const shape = this.shapes[type] diff --git a/src/core/util/EntryRows.test.ts b/src/core/util/EntryRows.test.ts new file mode 100644 index 000000000..e69de29bb diff --git a/src/core/util/EntryRows.ts b/src/core/util/EntryRows.ts new file mode 100644 index 000000000..5f8d113aa --- /dev/null +++ b/src/core/util/EntryRows.ts @@ -0,0 +1,16 @@ +import {JsonLoader} from 'alinea/backend' +import {createFileHash, createRowHash} from 'alinea/backend/util/ContentHash' +import {Config} from '../Config.js' +import {createRecord} from '../EntryRecord.js' +import {EntryRow} from '../EntryRow.js' + +export async function createEntryRow( + config: Config, + input: Omit +): Promise { + const record = createRecord(input) + const fileContents = JsonLoader.format(config.schema, record) + const fileHash = await createFileHash(fileContents) + const rowHash = await createRowHash({...input, fileHash}) + return {...input, fileHash, rowHash} as T +} diff --git a/src/dashboard/App.tsx b/src/dashboard/App.tsx index 3dae3ba46..d6da6ec1e 100644 --- a/src/dashboard/App.tsx +++ b/src/dashboard/App.tsx @@ -1,7 +1,9 @@ import {Config, Connection, Root, renderLabel} from 'alinea/core' import {Icon, Loader, px} from 'alinea/ui' +import {Statusbar} from 'alinea/ui/Statusbar' import {FavIcon} from 'alinea/ui/branding/FavIcon' import {IcRoundDescription} from 'alinea/ui/icons/IcRoundDescription' +import {MaterialSymbolsDatabase} from 'alinea/ui/icons/MaterialSymbolsDatabase' import {MdiSourceBranch} from 'alinea/ui/icons/MdiSourceBranch' import {atom, useAtom, useAtomValue} from 'jotai' import {useEffect} from 'react' @@ -16,11 +18,10 @@ import { sessionAtom, useSetDashboardOptions } from './atoms/DashboardAtoms.js' -import {useDbUpdater} from './atoms/DbAtoms.js' +import {dbHashAtom, useDbUpdater} from './atoms/DbAtoms.js' import {errorAtom} from './atoms/ErrorAtoms.js' import {locationAtom, matchAtoms, useLocation} from './atoms/LocationAtoms.js' import {usePreferredLanguage} from './atoms/NavigationAtoms.js' -import {pendingAtom} from './atoms/PendingAtoms.js' import {RouteView, RouterProvider} from './atoms/RouterAtoms.js' import {useDashboard} from './hook/UseDashboard.js' import {useEntryLocation} from './hook/UseEntryLocation.js' @@ -67,9 +68,8 @@ const isEntryAtom = atom(get => { }) function AppAuthenticated() { - const pending = useAtomValue(pendingAtom) - useDbUpdater(pending.length > 0 ? 30 : 60) - const {fullPage} = useDashboard() + useDbUpdater() + const {alineaDev, fullPage} = useDashboard() const nav = useNav() const isEntry = useAtomValue(isEntryAtom) const {name: workspace, color, roots} = useWorkspace() @@ -78,6 +78,7 @@ function AppAuthenticated() { const locale = useLocale() const [preferredLanguage, setPreferredLanguage] = usePreferredLanguage() const [errorMessage, setErrorMessage] = useAtom(errorAtom) + const dbHash = useAtomValue(dbHashAtom) useEffect(() => { setPreferredLanguage(locale) }, [locale]) @@ -88,9 +89,9 @@ function AppAuthenticated() {
{errorMessage}
)} - - - + + + @@ -134,9 +135,16 @@ function AppAuthenticated() { - - - + {alineaDev && ( + + + {dbHash} + + + )} + + + ) } @@ -148,7 +156,7 @@ function AppRoot() { const Auth = config.dashboard?.auth if (!session) return ( - + <> @@ -157,14 +165,16 @@ function AppRoot() { )} - + ) return ( - }> - - - - + <> + }> + + + + + ) } @@ -177,14 +187,19 @@ export interface AppProps { queryClient?: QueryClient fullPage?: boolean dev?: boolean + alineaDev?: boolean } export function App(props: AppProps) { - useSetDashboardOptions({fullPage: props.fullPage !== false, ...props}) + const fullPage = props.fullPage !== false + useSetDashboardOptions({fullPage, ...props}) + const {color} = Config.mainWorkspace(props.config) const queryClient = useAtomValue(queryClientAtom) return ( - + + + ) diff --git a/src/dashboard/atoms/DbAtoms.ts b/src/dashboard/atoms/DbAtoms.ts index 3071d07fd..3b8a94c2a 100644 --- a/src/dashboard/atoms/DbAtoms.ts +++ b/src/dashboard/atoms/DbAtoms.ts @@ -1,166 +1,90 @@ import {Database} from 'alinea/backend' -import {Resolver} from 'alinea/backend/Resolver' -import {Store} from 'alinea/backend/Store' -import {Connection} from 'alinea/core' +import {EntryResolver} from 'alinea/backend/resolver/EntryResolver' import {Graph} from 'alinea/core/Graph' -import {Mutation, MutationType, PendingMutation} from 'alinea/core/Mutation' +import {CreateMutation, Mutation, MutationType} from 'alinea/core/Mutation' +import debounce from 'debounce-promise' import {atom, useSetAtom} from 'jotai' import {atomFamily} from 'jotai/utils' import pLimit from 'p-limit' -import {sql} from 'rado' import {useEffect} from 'react' -import { - PersistentStore, - createPersistentStore -} from '../util/PersistentStore.js' +import {createPersistentStore} from '../util/PersistentStore.js' import {clientAtom, configAtom} from './DashboardAtoms.js' -import { - addPending, - cleanupPending, - pendingMap, - removePending -} from './PendingAtoms.js' -export const storeAtom = atom(createPersistentStore) +export const persistentStoreAtom = atom(createPersistentStore) const limit = pLimit(1) -async function cancelMutations(store: Store) { - // Cancel previous mutations if they were applied - try { - await store(sql`rollback`) - } catch {} -} - -async function syncDb( - db: Database, - client: Connection, - store: PersistentStore -) { - // Sync the local db with remote - await cancelMutations(store) - const changed = await db.syncWith(client) - if (changed.length > 0) await store.flush() - return changed -} - -export const dbModifiedAtom = atom(Promise.resolve(0)) - -const localDbAtom = atom( - async get => { - let pendingLock: Promise = Promise.resolve() - const config = get(configAtom) - const client = get(clientAtom) - const sourceStore = await get(storeAtom) - const sourceDb = new Database(sourceStore, config) - const sourceResolver = new Resolver(sourceStore, config.schema) - - await limit(() => syncDb(sourceDb, client, sourceStore)) - await sourceDb.meta().then(meta => cleanupPending(meta.modifiedAt)) - - const pendingStore = sourceStore.clone() - const pendingDb = new Database(pendingStore, config) - const pendingResolver = new Resolver(pendingStore, config.schema) - - async function resolvePending(params: Connection.ResolveParams) { - await pendingLock - return pendingResolver.resolve(params) - } - - async function resolveSource(params: Connection.ResolveParams) { - return sourceResolver.resolve(params) - } - - // Todo: debounce this - async function sync() { - const changed = await limit(() => - syncDb(sourceDb, client, sourceStore).catch(() => [] as Array) - ) - await sourceDb.meta().then(meta => cleanupPending(meta.modifiedAt)) - await pendingDb.syncWith(sourceDb) - return changed - } - - const applyPending = (pending: Array) => - (pendingLock = limit(async (): Promise> => { - // Apply all mutations - const remove = [] - for (const mutation of pending) { - try { - await pendingDb.applyMutation(mutation) - } catch (err) { - remove.push(mutation.mutationId) - } - } - await Database.index(pendingDb.store) - - if (remove.length) removePending(...remove) +export const dbHashAtom = atom(async get => { + const db = await get(localDbAtom) + get(changedEntriesAtom) + const meta = await db.db.meta() + return meta.contentHash +}) - return pending.flatMap(mutation => { - switch (mutation.type) { - case MutationType.Create: - case MutationType.Edit: - if (mutation.entry.parent) - return [mutation.entryId, mutation.entry.parent] - case MutationType.Discard: - default: - return mutation.entryId - } - }) - })) +const localDbAtom = atom(async (get, set) => { + const config = get(configAtom) + const client = get(clientAtom) + const {store, clear, flush} = await get(persistentStoreAtom) - return {sourceDb, resolvePending, resolveSource, sync, applyPending} - }, - (get, set) => { - const update = () => set(pendingUpdateAtom) - update() - pendingMap.observeDeep(update) - return () => pendingMap.unobserveDeep(update) + let db = new Database(config, store) + try { + await db.init() + } catch { + await clear() + db = new Database(config, store) } -) -localDbAtom.onMount = init => init() + const resolver = new EntryResolver(db, config.schema) + const syncDb = async (force = false) => { + const changed = await db.syncWith(client, force) + if (changed.length > 0) await flush() + return changed + } + const debounceSync = debounce(syncDb, 100) + const sync = (force: boolean) => + limit(() => debounceSync(force).catch(() => [] as Array)) + const applyMutations = async ( + mutations: Array, + commitHash: string + ) => { + const update = await db.applyMutations(mutations, commitHash) + await flush() + return update + } + await limit(syncDb) -export const sourceDbAtom = atom(async get => (await get(localDbAtom)).sourceDb) + return {db, applyMutations, resolve: resolver.resolve, sync} +}) export const mutateAtom = atom( null, - (get, set, ...mutations: Array) => { + async (get, set, ...mutations: Array) => { const client = get(clientAtom) - const pending = addPending(...mutations) - return client.mutate(mutations).catch(error => { - removePending(...pending.map(m => m.mutationId)) - set( - changedEntriesAtom, - pending.map(m => m.entryId) + const {commitHash} = await client.mutate(mutations) + const {applyMutations} = await get(localDbAtom) + if (mutations.length === 0) return + const changed = await applyMutations(mutations, commitHash) + const i18nIds = mutations + .filter( + (mutation): mutation is CreateMutation => + mutation.type === MutationType.Create ) - throw error - }) + .map(mutation => mutation.entry.i18nId) + set(changedEntriesAtom, changed.concat(i18nIds)) } ) -export const dbUpdateAtom = atom(null, async (get, set) => { - const {sync, applyPending} = await get(localDbAtom) - const changed = await sync() - const pending = [...pendingMap.values()] - const updated = await applyPending(pending) - set(changedEntriesAtom, changed /*.concat(updated)*/) -}) - -export const pendingUpdateAtom = atom(null, async (get, set) => { - const pending = [...pendingMap.values()] - const {applyPending} = await get(localDbAtom) - set(changedEntriesAtom, await applyPending(pending)) -}) - -export const sourceGraphAtom = atom(async get => { - const config = get(configAtom) - const {resolveSource: resolve} = await get(localDbAtom) - return new Graph(config, resolve) -}) +export const dbUpdateAtom = atom( + null, + async (get, set, force: boolean = false) => { + const {sync} = await get(localDbAtom) + const changed = await sync(force) + set(changedEntriesAtom, changed) + } +) export const graphAtom = atom(async get => { const config = get(configAtom) - const {resolvePending: resolve} = await get(localDbAtom) + const {resolve} = await get(localDbAtom) return new Graph(config, resolve) }) @@ -184,10 +108,27 @@ export function useMutate() { return useSetAtom(mutateAtom) } -export function useDbUpdater(everySeconds = 60) { +export function useDbUpdater(everySeconds = 30) { const forceDbUpdate = useSetAtom(dbUpdateAtom) useEffect(() => { - const interval = setInterval(forceDbUpdate, everySeconds * 1000) - return () => clearInterval(interval) + let interval: any = 0 + const focus = () => { + if (document.visibilityState === 'hidden') { + disable() + } else { + forceDbUpdate() + enable() + } + } + const enable = () => + (interval = setInterval(forceDbUpdate, everySeconds * 1000)) + const disable = () => clearInterval(interval) + enable() + window.addEventListener('visibilitychange', focus, false) + window.addEventListener('focus', focus, false) + return () => { + document.removeEventListener('visibilitychange', focus) + document.removeEventListener('focus', focus) + } }, [everySeconds, forceDbUpdate]) } diff --git a/src/dashboard/atoms/Edits.ts b/src/dashboard/atoms/Edits.ts new file mode 100644 index 000000000..91951d8c4 --- /dev/null +++ b/src/dashboard/atoms/Edits.ts @@ -0,0 +1,89 @@ +import {ROOT_KEY, Type} from 'alinea/core' +import {atom} from 'jotai' +import {atomFamily} from 'jotai/utils' +import * as Y from 'yjs' +import {yAtom} from './YAtom.js' + +export class Edits { + /** The mutable doc that we are editing */ + doc = new Y.Doc() + /** The state vector of the source doc */ + sourceVector: Uint8Array | undefined + /** The root map containing field data */ + root = this.doc.getMap(ROOT_KEY) + /** Did we make any local changes? */ + hasChanges = createChangesAtom(this.root) + /** Clear local changes, reset to source */ + resetChanges = atom(null, (get, set) => { + set(this.hasChanges, false) + const copy = new Edits(this.entryId) + copy.applyRemoteUpdate(this.getRemoteUpdate()) + set(entryEditsAtoms(this.entryId), copy) + }) + /** Whether we have a draft loaded */ + isLoading = yAtom(this.root, () => { + return !this.hasData() + }) + yUpdate = yAtom(this.root, () => { + return this.getLocalUpdate() + }) + + constructor(private entryId: string) {} + + hasData() { + return !this.root.keys().next().done + } + + /** Apply updates from the source */ + applyRemoteUpdate(update: Uint8Array) { + this.applyLocalUpdate(update) + this.sourceVector = Y.encodeStateVectorFromUpdateV2(update) + } + + /** Apply local updates */ + applyLocalUpdate(update: Uint8Array) { + Y.applyUpdateV2(this.doc, update, 'self') + } + + /** A Y.js update that contains our own edits, base64 encoded */ + getLocalUpdate() { + return Y.encodeStateAsUpdateV2(this.doc, this.sourceVector) + } + + /** The source doc */ + getRemoteUpdate() { + return Y.encodeStateAsUpdateV2(this.doc) + } + + /** Update entry field data */ + applyEntryData(type: Type, entryData: Record) { + const clientID = this.doc.clientID + this.doc.clientID = 1 + this.doc.transact(() => { + Type.shape(type).applyY(entryData, this.doc, ROOT_KEY) + }, 'self') + this.doc.clientID = clientID + } + + /** The field data */ + getEntryData(type: Type): Record { + return Type.shape(type).fromY(this.root) + } +} + +function createChangesAtom(yMap: Y.Map) { + const hasChanges = atom(false) + hasChanges.onMount = (setAtom: (value: boolean) => void) => { + const listener = (events: Array>, tx: Y.Transaction) => { + if (tx.origin === 'self') return + setAtom(true) + } + yMap.observeDeep(listener) + return () => yMap.unobserveDeep(listener) + } + return hasChanges +} + +export const entryEditsAtoms = atomFamily((entryId: string) => { + return atom(new Edits(entryId)) +}) diff --git a/src/dashboard/atoms/EntryEditorAtoms.ts b/src/dashboard/atoms/EntryEditorAtoms.ts index e69788a84..fa694f742 100644 --- a/src/dashboard/atoms/EntryEditorAtoms.ts +++ b/src/dashboard/atoms/EntryEditorAtoms.ts @@ -3,33 +3,35 @@ import { Connection, EntryPhase, EntryRow, + EntryUrlMeta, ROOT_KEY, Type, - applyEntryData, createId, createYDoc, parseYDoc } from 'alinea/core' import {Entry} from 'alinea/core/Entry' -import {entryFileName} from 'alinea/core/EntryFilenames' +import { + entryFileName, + entryFilepath, + entryInfo, + entryUrl +} from 'alinea/core/EntryFilenames' import {Mutation, MutationType} from 'alinea/core/Mutation' import {MediaFile} from 'alinea/core/media/MediaSchema' +import {base64} from 'alinea/core/util/Encoding' +import {createEntryRow} from 'alinea/core/util/EntryRows' import {entries, fromEntries, values} from 'alinea/core/util/Objects' +import * as paths from 'alinea/core/util/Paths' import {InputState} from 'alinea/editor' import {atom} from 'jotai' import {atomFamily, unwrap} from 'jotai/utils' -import * as Y from 'yjs' import {debounceAtom} from '../util/DebounceAtom.js' -import {clientAtom, configAtom} from './DashboardAtoms.js' -import { - entryRevisionAtoms, - graphAtom, - mutateAtom, - sourceGraphAtom -} from './DbAtoms.js' +import {clientAtom, configAtom, dashboardOptionsAtom} from './DashboardAtoms.js' +import {entryRevisionAtoms, graphAtom, mutateAtom} from './DbAtoms.js' +import {Edits, entryEditsAtoms} from './Edits.js' import {errorAtom} from './ErrorAtoms.js' import {locationAtom} from './LocationAtoms.js' -import {pendingAtom} from './PendingAtoms.js' import {yAtom} from './YAtom.js' export enum EditMode { @@ -49,16 +51,35 @@ interface EntryEditorParams { i18nId: string | undefined } +export enum EntryTransition { + SaveDraft, + SaveTranslation, + PublishEdits, + RestoreRevision, + PublishDraft, + DiscardDraft, + ArchivePublished, + PublishArchived, + DeleteFile, + DeleteArchived +} + +const entryTransitionAtoms = atomFamily((entryId: string) => { + return atom(undefined) +}) + export const entryEditorAtoms = atomFamily( ({locale, i18nId}: EntryEditorParams) => { return atom(async get => { if (!i18nId) return undefined + const {dev} = get(dashboardOptionsAtom) const config = get(configAtom) const client = get(clientAtom) - const sourceGraph = await get(sourceGraphAtom) const graph = await get(graphAtom) const search = locale ? {i18nId, locale} : {i18nId} - let entry = await graph.preferDraft.maybeGet(Entry(search)) + let entry: EntryRow | null = await graph.preferDraft.maybeGet( + Entry(search) + ) if (!entry) { const {searchParams} = get(locationAtom) const preferredLanguage = searchParams.get('from') @@ -71,7 +92,33 @@ export const entryEditorAtoms = atomFamily( if (!entry) return undefined const entryId = entry.entryId get(entryRevisionAtoms(entryId)) - const sourceEntry = await sourceGraph.preferDraft.get(Entry({entryId})) + get(entryRevisionAtoms(entry.i18nId)) + + const type = config.schema[entry.type] + const edits = get(entryEditsAtoms(entryId)) + + const loadDraft = client + .getDraft(entryId) + .then(draft => { + if (draft) { + edits.applyRemoteUpdate(draft.draft) + // The draft is out of sync, this can happen if + // - updates done manually to the content files + // - the draft storage could not be reached after mutation + // We fast forward the draft with the actual current field data + // and will submit new updates including it + if (draft.fileHash !== entry!.fileHash) + edits.applyEntryData(type, entry!.data) + } else { + edits.applyEntryData(type, entry!.data) + } + }) + .catch(() => { + edits.applyEntryData(type, entry!.data) + }) + + if (!edits.hasData()) await loadDraft + const versions = await graph.all.find( Entry({entryId}).select({ ...Entry, @@ -111,7 +158,6 @@ export const entryEditorAtoms = atomFamily( ) const previewToken = await get(previewTokenAtom) return createEntryEditor({ - sourceEntry, parents, translations, parentNeedsTranslation, @@ -121,7 +167,8 @@ export const entryEditorAtoms = atomFamily( entryId, versions, phases, - availablePhases + availablePhases, + edits }) }) }, @@ -129,7 +176,6 @@ export const entryEditorAtoms = atomFamily( ) export interface EntryData { - sourceEntry: EntryRow | null parents: Array<{entryId: string; path: string}> client: Connection config: Config @@ -140,6 +186,7 @@ export interface EntryData { translations: Array<{locale: string; entryId: string}> parentNeedsTranslation: boolean previewToken: string + edits: Edits } export type EntryEditor = ReturnType @@ -147,7 +194,7 @@ export type EntryEditor = ReturnType const showHistoryAtom = atom(false) export function createEntryEditor(entryData: EntryData) { - const {config, availablePhases} = entryData + const {config, availablePhases, edits} = entryData const activePhase = availablePhases[0] const activeVersion = entryData.phases[activePhase] const type = config.schema[activeVersion.type] @@ -157,11 +204,10 @@ export function createEntryEditor(entryData: EntryData) { createYDoc(type, version) ]) ) - const yDoc = docs[activePhase] - const hasChanges = createChangesAtom(yDoc.getMap(ROOT_KEY)) - const draftEntry = yAtom(yDoc.getMap(ROOT_KEY), getDraftEntry) + const yDoc = edits.doc + const hasChanges = edits.hasChanges + const draftEntry = yAtom(edits.doc.getMap(ROOT_KEY), getDraftEntry) const editMode = atom(EditMode.Editing) - const isSaving = atom(false) const view = Type.meta(type).view const previewRevision = atom( undefined as {ref: string; file: string} | undefined @@ -174,29 +220,7 @@ export function createEntryEditor(entryData: EntryData) { } ) - const isPublishing = atom(get => { - return false - const pending = get(pendingAtom) - return pending.some( - mutation => - mutation.type === MutationType.Publish && - mutation.entryId === activeVersion.entryId - ) - }) - - const isArchiving = atom(get => { - return false - const pending = get(pendingAtom) - return pending.some( - mutation => - mutation.type === MutationType.Archive && - mutation.entryId === activeVersion.entryId - ) - }) - - const yStateVector = Y.encodeStateVector( - createYDoc(type, entryData.sourceEntry) - ) + const transition = entryTransitionAtoms(activeVersion.entryId) const phaseInUrl = atom(get => { const {search} = get(locationAtom) @@ -218,23 +242,59 @@ export function createEntryEditor(entryData: EntryData) { ) } - const saveDraft = atom(null, (get, set) => { - const entry = {...getDraftEntry(), phase: EntryPhase.Draft} + let isTransacting = false + const transact = atom( + null, + ( + get, + set, + options: { + clearChanges?: boolean + transition: EntryTransition + errorMessage: string + action: () => Promise + } + ) => { + if (isTransacting) return Promise.resolve() + isTransacting = true + const timeout = setTimeout(() => { + if (options.clearChanges) set(hasChanges, false) + set(transition, options.transition) + }, 250) + const currentChanges = get(hasChanges) + return options + .action() + .then(() => { + if (options.clearChanges) set(hasChanges, false) + }) + .catch(error => { + if (options.clearChanges) set(hasChanges, currentChanges) + set(errorAtom, options.errorMessage, error) + }) + .finally(() => { + clearTimeout(timeout) + set(transition, undefined) + isTransacting = false + }) + } + ) + + const saveDraft = atom(null, async (get, set) => { + const update = base64.stringify(edits.getLocalUpdate()) + const entry = await getDraftEntry({phase: EntryPhase.Published}) const mutation: Mutation = { type: MutationType.Edit, previousFile: entryFile(activeVersion), file: entryFile(entry), entryId: activeVersion.entryId, - entry + entry, + update } - set(hasChanges, false) - return set(mutateAtom, mutation).catch(error => { - set(hasChanges, true) - set( - errorAtom, - 'Could not complete save action, please try again later', - error - ) + return set(transact, { + clearChanges: true, + transition: EntryTransition.SaveDraft, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete save action, please try again later' }) }) @@ -259,39 +319,37 @@ export function createEntryEditor(entryData: EntryData) { : undefined if (activeVersion.parent && !parentData) throw new Error('Parent not translated') + const parentPaths = parentData?.paths + ? parentData.paths.concat(parentData.path) + : [] const entryId = createId() - const entry = { - ...getDraftEntry(), - parent: parentData?.entryId ?? null, + const entry = await getDraftEntry({ entryId, - locale, - phase: EntryPhase.Published - } + phase: EntryPhase.Published, + parent: parentData?.entryId, + parentPaths, + locale + }) const mutation: Mutation = { type: MutationType.Create, - file: entryFile( - entry, - parentData?.paths ? parentData.paths.concat(parentData.path) : [] - ), + file: entryFile(entry, parentPaths), entryId, entry } - const res = set(mutateAtom, mutation) set(entryRevisionAtoms(activeVersion.entryId)) - set(hasChanges, false) - return res.catch(error => { - set(hasChanges, true) - set( - errorAtom, - 'Could not complete translate action, please try again later', - error - ) + return set(transact, { + clearChanges: true, + transition: EntryTransition.SaveTranslation, + action: () => set(mutateAtom, mutation), + errorMessage: + 'Could not complete translate action, please try again later' }) }) - const publishEdits = atom(null, (get, set) => { + const publishEdits = atom(null, async (get, set) => { const currentFile = entryFile(activeVersion) - const entry = {...getDraftEntry(), phase: EntryPhase.Published} + const update = base64.stringify(edits.getLocalUpdate()) + const entry = await getDraftEntry({phase: EntryPhase.Published}) const mutations: Array = [] const editedFile = entryFile(entry) mutations.push({ @@ -299,16 +357,14 @@ export function createEntryEditor(entryData: EntryData) { previousFile: currentFile, file: editedFile, entryId: activeVersion.entryId, - entry + entry, + update }) - set(hasChanges, false) - return set(mutateAtom, ...mutations).catch(error => { - set(hasChanges, true) - set( - errorAtom, - 'Could not complete publish action, please try again later', - error - ) + return set(transact, { + clearChanges: true, + transition: EntryTransition.PublishEdits, + action: () => set(mutateAtom, ...mutations), + errorMessage: 'Could not complete publish action, please try again later' }) }) @@ -316,40 +372,38 @@ export function createEntryEditor(entryData: EntryData) { const revision = get(previewRevision) if (!revision) return const data = await get(revisionData(revision)) - const entry: EntryRow = { - ...activeVersion, - phase: EntryPhase.Published, - data - } + const {edits} = entryData + edits.applyEntryData(type, data) + const update = base64.stringify(edits.getLocalUpdate()) + const entry = await getDraftEntry({phase: EntryPhase.Published}) const editedFile = entryFile(entry) - return set(mutateAtom, { + const mutation: Mutation = { type: MutationType.Edit, previousFile: editedFile, file: editedFile, entryId: activeVersion.entryId, - entry - }).catch(error => { - set(hasChanges, true) - set( - errorAtom, - 'Could not complete publish action, please try again later', - error - ) + entry, + update + } + return set(transact, { + clearChanges: true, + transition: EntryTransition.RestoreRevision, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete publish action, please try again later' }) }) const publishDraft = atom(null, (get, set) => { const mutation: Mutation = { type: MutationType.Publish, + phase: EntryPhase.Draft, entryId: activeVersion.entryId, file: entryFile(activeVersion) } - return set(mutateAtom, mutation).catch(error => { - set( - errorAtom, - 'Could not complete publish action, please try again later', - error - ) + return set(transact, { + transition: EntryTransition.PublishDraft, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete publish action, please try again later' }) }) @@ -359,12 +413,10 @@ export function createEntryEditor(entryData: EntryData) { entryId: activeVersion.entryId, file: entryFile(activeVersion) } - return set(mutateAtom, mutation).catch(error => { - set( - errorAtom, - 'Could not complete remove action, please try again later', - error - ) + return set(transact, { + transition: EntryTransition.DiscardDraft, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete discard action, please try again later' }) }) @@ -375,12 +427,10 @@ export function createEntryEditor(entryData: EntryData) { entryId: published.entryId, file: entryFile(published) } - return set(mutateAtom, mutation).catch(error => { - set( - errorAtom, - 'Could not complete archive action, please try again later', - error - ) + return set(transact, { + transition: EntryTransition.ArchivePublished, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete archive action, please try again later' }) }) @@ -388,15 +438,14 @@ export function createEntryEditor(entryData: EntryData) { const archived = entryData.phases[EntryPhase.Archived] const mutation: Mutation = { type: MutationType.Publish, + phase: EntryPhase.Archived, entryId: archived.entryId, file: entryFile(archived) } - return set(mutateAtom, mutation).catch(error => { - set( - errorAtom, - 'Could not complete publish action, please try again later', - error - ) + return set(transact, { + transition: EntryTransition.PublishArchived, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete publish action, please try again later' }) }) @@ -413,12 +462,10 @@ export function createEntryEditor(entryData: EntryData) { file: entryFile(published), replace: false } - return set(mutateAtom, mutation).catch(error => { - set( - errorAtom, - 'Could not complete delete action, please try again later', - error - ) + return set(transact, { + transition: EntryTransition.DeleteFile, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete delete action, please try again later' }) }) @@ -429,28 +476,52 @@ export function createEntryEditor(entryData: EntryData) { entryId: archived.entryId, file: entryFile(archived) } - return set(mutateAtom, mutation).catch(error => { - set( - errorAtom, - 'Could not complete delete action, please try again later', - error - ) + return set(transact, { + transition: EntryTransition.DeleteArchived, + action: () => set(mutateAtom, mutation), + errorMessage: 'Could not complete delete action, please try again later' }) }) - const discardEdits = atom(null, (get, set) => { - set(hasChanges, false) - set(entryRevisionAtoms(activeVersion.entryId)) - }) - - const activeTitle = yAtom( - yDoc.getMap(ROOT_KEY), - () => yDoc.getMap(ROOT_KEY).get('title') as string - ) - - function getDraftEntry(): EntryRow { - const entryData = parseYDoc(type, yDoc) - return {...activeVersion, ...entryData} + async function getDraftEntry( + meta: Partial & {entryId?: string; parent?: string} = {} + ): Promise { + const data = parseYDoc(type, yDoc) + const locale = meta.locale ?? activeVersion.locale + const path = meta.path ?? data.path + const phase = meta.phase ?? activeVersion.phase + const entryId = meta.entryId ?? activeVersion.entryId + const parent = meta.parent ?? activeVersion.parent + const parentPaths = meta.parentPaths ?? entryData.parents.map(p => p.path) + const draftEntry = { + ...activeVersion, + ...data, + entryId, + parent, + locale, + path, + phase + } + const filePath = entryFilepath(config, draftEntry, parentPaths) + const parentDir = paths.dirname(filePath) + const extension = paths.extname(filePath) + const fileName = paths.basename(filePath, extension) + const [entryPath] = entryInfo(fileName) + const childrenDir = paths.join(parentDir, entryPath) + const urlMeta: EntryUrlMeta = { + locale, + path, + phase, + parentPaths + } + const url = entryUrl(type, urlMeta) + return createEntryRow(config, { + ...draftEntry, + parentDir, + childrenDir, + filePath, + url + }) } const revisionsAtom = atom(async get => { @@ -484,8 +555,11 @@ export function createEntryEditor(entryData: EntryData) { const selectedState = atom(get => { const selected = get(selectedPhase) + if (selected === activePhase) return edits.doc return docs[selected] }) + const activeTitle = yAtom(edits.root, () => edits.root.get('title') as string) + const revisionState = atom(get => { const revision = get(previewRevision) return revision ? get(revisionDocState(revision)) : undefined @@ -496,38 +570,24 @@ export function createEntryEditor(entryData: EntryData) { }) const state = atom(get => { const doc = get(currentDoc) - return new InputState.YDocState(Type.shape(type), doc.getMap(ROOT_KEY), '') - }) - - function createPreviewUpdate(entry: EntryRow) { - const sourceDoc = createYDoc(type, entryData.sourceEntry) - applyEntryData(sourceDoc, type, entry) - return Y.encodeStateAsUpdateV2(sourceDoc, yStateVector) - } - - const docRevision = atomFamily((doc: Y.Doc) => { - let revision = 0 - return debounceAtom( - yAtom(doc.getMap(ROOT_KEY), () => revision++), - 250 - ) + return new InputState.YDocState({ + shape: Type.shape(type), + parentData: doc.getMap(ROOT_KEY), + key: '', + readOnly: doc !== edits.doc + }) }) // The debounce here prevents React warning us about a state change during // render for rich text fields. Some day that should be properly fixed. - const yUpdate = debounceAtom( - atom(get => { - const doc = get(currentDoc) - get(docRevision(doc)) - const entryData = parseYDoc(type, doc) - const entry = {...activeVersion, ...entryData} - return createPreviewUpdate(entry) - }), - 10 - ) + const yUpdate = debounceAtom(edits.yUpdate, 10) + + const discardEdits = edits.resetChanges + const isLoading = edits.isLoading return { ...entryData, + transition, revisionId: createId(), activePhase, phaseInUrl, @@ -551,25 +611,11 @@ export function createEntryEditor(entryData: EntryData) { deleteArchived, saveTranslation, discardEdits, - isSaving, + isLoading, showHistory, - isPublishing, - isArchiving, revisionsAtom, previewRevision, state, view } } - -function createChangesAtom(yMap: Y.Map) { - const hasChanges = atom(false) - hasChanges.onMount = (setAtom: (value: boolean) => void) => { - const listener = (events: Array>, tx: Y.Transaction) => { - setAtom(true) - } - yMap.observeDeep(listener) - return () => yMap.unobserveDeep(listener) - } - return hasChanges -} diff --git a/src/dashboard/atoms/PendingAtoms.ts b/src/dashboard/atoms/PendingAtoms.ts deleted file mode 100644 index 5d5ca8eda..000000000 --- a/src/dashboard/atoms/PendingAtoms.ts +++ /dev/null @@ -1,90 +0,0 @@ -import {createId} from 'alinea/core' -import {Mutation, MutationProgress, PendingMutation} from 'alinea/core/Mutation' -import {Atom, atom} from 'jotai' -import {atomFamily, loadable} from 'jotai/utils' -import {IndexeddbPersistence} from 'y-indexeddb' -import * as Y from 'yjs' -import {dbModifiedAtom} from './DbAtoms.js' -import {yAtom} from './YAtom.js' - -const MAP_KEY = 'mutations' - -export const pendingDoc = new Y.Doc() -export const pendingMap = pendingDoc.getMap(MAP_KEY) - -// Todo: Set up provider here -// Todo: provide a unique name during dev (based on project root dir?) -const local = new IndexeddbPersistence('@alinea/mutations', pendingDoc) - -export const pendingAtom: Atom> = yAtom( - pendingMap, - () => [...pendingMap.values()] -) - -export function cleanupPending(modifiedAt: number) { - // Cleanup mutations which are committed - pendingDoc.transact(() => { - for (const mutation of pendingMap.values()) { - if (modifiedAt > mutation.createdAt) { - pendingMap.delete(mutation.mutationId) - } else { - // Cleanup mutations which are older than 20 minutes as these may - // have silently failed - const duration = Date.now() - mutation.createdAt - if (duration > 1000 * 60 * 20) pendingMap.delete(mutation.mutationId) - } - } - }) -} - -export function addPending(...mutations: Array) { - const res: Array = [] - pendingDoc.transact(() => { - for (const mutation of mutations) { - const mutationId = createId() - const pending = { - ...mutation, - mutationId, - createdAt: Date.now() - } - pendingMap.set(mutationId, pending) - res.push(pending) - } - }) - return res -} - -export function removePending(...mutationIds: Array) { - pendingDoc.transact(() => { - for (const mutationId of mutationIds) { - console.log(`Removing ${mutationId}`) - pendingMap.delete(mutationId) - } - }) -} - -export const pendingProgress = atomFamily((mutationId: string) => { - const finishedAtom = loadable( - atom(async get => { - const mutation = pendingMap.get(mutationId) - if (!mutation) return MutationProgress.Finished - const lastModification = await get(dbModifiedAtom) - return lastModification > mutation.createdAt - ? MutationProgress.Finished - : MutationProgress.Pending - }) - ) - return atom(get => { - const loader = get(finishedAtom) - switch (loader.state) { - case 'hasData': - return loader.data - ? MutationProgress.Finished - : MutationProgress.Pending - case 'hasError': - return MutationProgress.Error - default: - return MutationProgress.Pending - } - }) -}) diff --git a/src/dashboard/dev/DevDashboard.tsx b/src/dashboard/dev/DevDashboard.tsx index 2999ac920..6f901866f 100644 --- a/src/dashboard/dev/DevDashboard.tsx +++ b/src/dashboard/dev/DevDashboard.tsx @@ -57,8 +57,8 @@ export function DevDashboard({loadConfig}: DevDashboardOptions) { useEffect(() => { getConfig() return setupDevReload({ - refresh: () => getConfig().then(forceDbUpdate), - refetch: forceDbUpdate, + refresh: () => getConfig().then(() => forceDbUpdate(true)), + refetch: () => forceDbUpdate(true), open: () => setConnected(true), close: () => setConnected(false) }) @@ -69,7 +69,7 @@ export function DevDashboard({loadConfig}: DevDashboardOptions) { queryClient={queryClient} config={cms} client={client!} - dev={!process.env.ALINEA_CLOUD_URL} + dev={process.env.NODE_ENV === 'development'} /> ) } diff --git a/src/dashboard/hook/UseUploads.ts b/src/dashboard/hook/UseUploads.ts index 27f90abe6..82391c0c3 100644 --- a/src/dashboard/hook/UseUploads.ts +++ b/src/dashboard/hook/UseUploads.ts @@ -1,6 +1,5 @@ -import '@ungap/with-resolvers' import {Media} from 'alinea/backend/Media' -import {createContentHash} from 'alinea/backend/util/ContentHash' +import {createFileHash} from 'alinea/backend/util/ContentHash' import { Connection, Entry, @@ -14,6 +13,7 @@ import {createId} from 'alinea/core/Id' import {Mutation, MutationType} from 'alinea/core/Mutation' import {MediaFile} from 'alinea/core/media/MediaSchema' import {base64} from 'alinea/core/util/Encoding' +import {createEntryRow} from 'alinea/core/util/EntryRows' import {generateKeyBetween} from 'alinea/core/util/FractionalIndexing' import { basename, @@ -30,6 +30,7 @@ import smartcrop from 'smartcrop' import {rgbaToThumbHash, thumbHashToAverageRGBA} from 'thumbhash' import {useMutate} from '../atoms/DbAtoms.js' import {errorAtom} from '../atoms/ErrorAtoms.js' +import {withResolvers} from '../util/WithResolvers.js' import {useConfig} from './UseConfig.js' import {useGraph} from './UseGraph.js' import {useSession} from './UseSession.js' @@ -83,12 +84,8 @@ const tasker = { async function process( upload: Upload, - createEntry: (upload: Upload) => Promise<{ - file: string - entry: Media.File - }>, - client: Connection, - mutate: (...mutations: Array) => Promise + publishUpload: (upload: Upload) => Promise, + client: Connection ): Promise { switch (upload.status) { case UploadStatus.Queued: @@ -181,49 +178,7 @@ async function process( case UploadStatus.Uploaded: { const {replace} = upload const info = upload.info! - const {file, entry} = await createEntry(upload) - if (replace) { - await mutate( - { - type: MutationType.Edit, - entryId: replace.entry.entryId, - file: replace.entryFile, - entry: { - ...replace.entry, - data: {...entry.data, title: replace.entry.title} - } - }, - { - type: MutationType.Upload, - entryId: entry.entryId, - url: info.previewUrl, - file: info.location - }, - { - type: MutationType.FileRemove, - entryId: replace.entry.entryId, - file: replace.entryFile, - workspace: replace.entry.workspace, - location: (replace.entry.data as MediaFile).location, - replace: true - } - ) - } else { - await mutate( - { - type: MutationType.Create, - entryId: entry.entryId, - file, - entry - }, - { - type: MutationType.Upload, - entryId: entry.entryId, - url: info.previewUrl, - file: info.location - } - ) - } + const entry = await publishUpload(upload) return {...upload, result: entry, status: UploadStatus.Done} } case UploadStatus.Done: @@ -232,7 +187,7 @@ async function process( } function createBatch(mutate: (...mutations: Array) => Promise) { - let trigger = Promise.withResolvers() + let trigger = withResolvers() let nextRun: any = undefined const batch = [] as Array async function run() { @@ -243,7 +198,7 @@ function createBatch(mutate: (...mutations: Array) => Promise) { } catch (error) { trigger.reject(error) } finally { - trigger = Promise.withResolvers() + trigger = withResolvers() } } return (...mutations: Array) => { @@ -278,10 +233,6 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { const entryId = upload.info?.entryId ?? createId() const {parentId} = upload.to const buffer = await upload.file.arrayBuffer() - const contentHash = await createContentHash( - EntryPhase.Published, - new Uint8Array(buffer) - ) const parent = await graph.preferPublished.maybeGet( Entry({entryId: parentId}).select({ level: Entry.level, @@ -317,7 +268,8 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { ? location.slice(prefix.length) : location - const entry: Media.File = { + const hash = await createFileHash(new Uint8Array(buffer)) + const entry = await createEntryRow(config, { ...entryLocation, parent: parent?.entryId ?? null, entryId: entryId, @@ -334,7 +286,6 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { parentDir: parentDir, filePath, childrenDir: filePath.slice(0, -'.json'.length), - contentHash, active: true, main: true, data: { @@ -342,7 +293,7 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { location: fileLocation, extension: extension, size: buffer.byteLength, - hash: contentHash, + hash, width: upload.width, height: upload.height, averageColor: upload.averageColor, @@ -350,7 +301,7 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { thumbHash: upload.thumbHash, preview: upload.preview } - } + }) const file = entryFileName( config, entry, @@ -371,7 +322,7 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { } while (true) { const next = await tasker[upload.status](() => - process(upload, createEntry, client, batchMutations) + process(upload, publishUpload, client) ).catch(error => { return {...upload, error, status: UploadStatus.Done} }) @@ -390,6 +341,56 @@ export function useUploads(onSelect?: (entry: EntryRow) => void) { } } + async function publishUpload(upload: Upload) { + const {replace} = upload + const info = upload.info! + const {file, entry} = await createEntry(upload) + if (!replace) { + await batchMutations( + { + type: MutationType.Create, + entryId: entry.entryId, + file, + entry + }, + { + type: MutationType.Upload, + entryId: entry.entryId, + url: info.previewUrl, + file: info.location + } + ) + return entry + } + const newEntry = await createEntryRow(config, { + ...replace.entry, + data: {...entry.data, title: replace.entry.title} + }) + await batchMutations( + { + type: MutationType.Edit, + entryId: replace.entry.entryId, + file: replace.entryFile, + entry: newEntry + }, + { + type: MutationType.Upload, + entryId: entry.entryId, + url: info.previewUrl, + file: info.location + }, + { + type: MutationType.FileRemove, + entryId: replace.entry.entryId, + file: replace.entryFile, + workspace: replace.entry.workspace, + location: (replace.entry.data as MediaFile).location, + replace: true + } + ) + return newEntry + } + async function upload( files: Array, to: UploadDestination, diff --git a/src/dashboard/package.json b/src/dashboard/package.json index 89550d19d..245860644 100644 --- a/src/dashboard/package.json +++ b/src/dashboard/package.json @@ -11,9 +11,11 @@ "@headlessui/react": "^1.7.17", "@react-hook/size": "^2.1.2", "@react-hook/window-size": "^3.0.7", + "@types/debounce-promise": "^3.1.8", "@types/diff-match-patch": "^1.0.32", "color2k": "^1.2.4", "dataloader": "^2.1.0", + "debounce-promise": "^3.1.2", "diff-match-patch": "^1.0.5", "fflate": "^0.8.0", "jotai": "^2.4.2", diff --git a/src/dashboard/util/PersistentStore.ts b/src/dashboard/util/PersistentStore.ts index e696fdd21..0717e611e 100644 --- a/src/dashboard/util/PersistentStore.ts +++ b/src/dashboard/util/PersistentStore.ts @@ -1,5 +1,5 @@ +import sqlInit from '@alinea/sqlite-wasm' import {Store} from 'alinea/backend/Store' -import {assign} from 'alinea/core/util/Objects' import * as idb from 'lib0/indexeddb.js' import prettyMilliseconds from 'pretty-ms' import {DriverOptions} from 'rado' @@ -7,18 +7,18 @@ import {connect} from 'rado/driver/sql.js' const STORAGE_NAME = '@alinea/peristent.store' -export interface PersistentStore extends Store { +export interface PersistentStore { + store: Store flush(): Promise clone(): Store + clear(): Promise } export async function createPersistentStore(): Promise { const storagePromise = idb.openDB(STORAGE_NAME, db => idb.createStores(db, [[STORAGE_NAME, {autoIncrement: true}]]) ) - const sqlitePromise = import('@alinea/sqlite-wasm').then( - ({default: sqlInit}) => sqlInit() - ) + const sqlitePromise = sqlInit() const [storage, {Database}] = await Promise.all([ storagePromise, sqlitePromise @@ -27,7 +27,7 @@ export async function createPersistentStore(): Promise { const [store] = idb.transact(storage, [STORAGE_NAME], 'readonly') const buffer = await idb.get(store, 'db') const init = ArrayBuffer.isView(buffer) ? buffer : undefined - const db = new Database(init) + let db = new Database(init) const driverOptions: DriverOptions = { logQuery(stmt, duration) { if (!stmt.sql.startsWith('SELECT')) return @@ -57,7 +57,8 @@ export async function createPersistentStore(): Promise { } // Return an async connection so we can move the database to a worker later // without have to rewrite the dashboard - return assign(connect(db, driverOptions).toAsync(), { + const persistent = { + store: connect(db, driverOptions).toAsync(), async flush() { const [store] = idb.transact(storage, [STORAGE_NAME], 'readwrite') await idb.put(store, db.export(), 'db') @@ -65,8 +66,15 @@ export async function createPersistentStore(): Promise { clone() { const clone = new Database(db.export()) return connect(clone, driverOptions).toAsync() + }, + async clear() { + const [store] = idb.transact(storage, [STORAGE_NAME], 'readwrite') + await idb.del(store, 'db') + db = new Database() + persistent.store = connect(db, driverOptions).toAsync() } - }) + } + return persistent } interface QueryPlanItem { diff --git a/src/dashboard/util/WithResolvers.ts b/src/dashboard/util/WithResolvers.ts new file mode 100644 index 000000000..8f3151b6f --- /dev/null +++ b/src/dashboard/util/WithResolvers.ts @@ -0,0 +1,3 @@ +import '@ungap/with-resolvers' + +export const withResolvers = Promise.withResolvers.bind(Promise) diff --git a/src/dashboard/view/EntryEdit.tsx b/src/dashboard/view/EntryEdit.tsx index 6e1c8665b..81c82d084 100644 --- a/src/dashboard/view/EntryEdit.tsx +++ b/src/dashboard/view/EntryEdit.tsx @@ -4,13 +4,17 @@ import {InputForm} from 'alinea/editor' import {TabsHeader, TabsSection} from 'alinea/input/tabs/Tabs.browser' import {Button, HStack, Stack, VStack, fromModule} from 'alinea/ui' import {Main} from 'alinea/ui/Main' +import {Statusbar} from 'alinea/ui/Statusbar' import {Tabs} from 'alinea/ui/Tabs' +import {IcOutlineTableRows} from 'alinea/ui/icons/IcOutlineTableRows' +import {IcRoundInsertDriveFile} from 'alinea/ui/icons/IcRoundInsertDriveFile' import {IcRoundTranslate} from 'alinea/ui/icons/IcRoundTranslate' import {useAtomValue, useSetAtom} from 'jotai' import {useEffect, useRef} from 'react' import {EntryEditor} from '../atoms/EntryEditorAtoms.js' import {useRouteBlocker} from '../atoms/RouterAtoms.js' import {useConfig} from '../hook/UseConfig.js' +import {useDashboard} from '../hook/UseDashboard.js' import {useLocale} from '../hook/UseLocale.js' import {useNav} from '../hook/UseNav.js' import {SuspenseBoundary} from '../util/SuspenseBoundary.js' @@ -43,6 +47,7 @@ export interface EntryEditProps { } export function EntryEdit({editor}: EntryEditProps) { + const {alineaDev} = useDashboard() const locale = useLocale() const {preview, enableDrafts} = useConfig() const {isPreviewOpen} = useSidebar() @@ -100,11 +105,43 @@ export function EntryEdit({editor}: EntryEditProps) { hasRootTabs && (sections[0][Section.Data] as TabsSection) const visibleTypes = tabs && tabs.types.filter(type => !Type.meta(type).isHidden) - /*useEffect(() => { + useEffect(() => { if (isBlocking && !isNavigationChange) confirm?.() - }, [isBlocking, isNavigationChange, confirm])*/ + }, [isBlocking, isNavigationChange, confirm]) return ( <> + {alineaDev && ( + <> + + + File path: {editor.activeVersion.filePath} + + + + + Parent dir: {editor.activeVersion.parentDir} + + + + + Children dir: {editor.activeVersion.childrenDir} + + + + Url: {editor.activeVersion.url} + + + + {editor.activeVersion.fileHash} + + + + + {editor.activeVersion.rowHash} + + + + )} {isBlocking && isNavigationChange && ( cancel()}> @@ -203,11 +240,9 @@ export function EntryEdit({editor}: EntryEditProps) { })} ) : ( -
- - - -
+ + + )} diff --git a/src/dashboard/view/EntryTree.tsx b/src/dashboard/view/EntryTree.tsx index b6ee88bb8..22f5663ec 100644 --- a/src/dashboard/view/EntryTree.tsx +++ b/src/dashboard/view/EntryTree.tsx @@ -21,6 +21,7 @@ import { rootId, useEntryTreeProvider } from '../atoms/EntryAtoms.js' +import {entryLocationAtom} from '../atoms/NavigationAtoms.js' import {useConfig} from '../hook/UseConfig.js' import {useLocale} from '../hook/UseLocale.js' import {useNav} from '../hook/UseNav.js' @@ -40,6 +41,7 @@ interface EntryTreeItemProps { } function EntryTreeItem({item, data}: EntryTreeItemProps) { + const {entryId} = useAtomValue(entryLocationAtom) const locale = useLocale() const {schema} = useConfig() const currentData = useRef(data) @@ -52,13 +54,14 @@ function EntryTreeItem({item, data}: EntryTreeItemProps) { const isDraft = selected.phase === EntryPhase.Draft const isUntranslated = locale && selected.locale !== locale const isArchived = selected.phase === EntryPhase.Archived + const isSelected = entryId && itemData.id === entryId return (
- {item.isFolder() && ( + {item.isFolder() ? ( {item.isExpanded() ? ( @@ -78,9 +81,7 @@ function EntryTreeItem({item, data}: EntryTreeItemProps) { )} - )} - - {!item.isFolder() && ( + ) : ( )} - - {/*item.isLoading() && */}
) diff --git a/src/dashboard/view/diff/ChangeBox.tsx b/src/dashboard/view/diff/ChangeBox.tsx index 7b271e881..edba7522f 100644 --- a/src/dashboard/view/diff/ChangeBox.tsx +++ b/src/dashboard/view/diff/ChangeBox.tsx @@ -9,7 +9,7 @@ import css from './ChangeBox.module.scss' const styles = fromModule(css) const icons = { - unchanged: IcOutlineArrowCircleRight, + keep: IcOutlineArrowCircleRight, addition: IcRoundAddCircleOutline, removal: IcRoundRemoveCircleOutline } diff --git a/src/dashboard/view/diff/DiffUtils.ts b/src/dashboard/view/diff/DiffUtils.ts index 64afa4a18..9ef79ce05 100644 --- a/src/dashboard/view/diff/DiffUtils.ts +++ b/src/dashboard/view/diff/DiffUtils.ts @@ -48,7 +48,7 @@ export function computeLcs( export type Change = | {type: 'addition'; value: T} | {type: 'removal'; value: T} - | {type: 'unchanged'; old: T; value: T} + | {type: 'keep'; old: T; value: T} export function diffList( a: Array, @@ -68,7 +68,7 @@ export function diffList( results.push({type: 'removal', value: a[i - 1]}) i -= 1 } else if (equals(a[i - 1], b[j - 1])) { - results.push({type: 'unchanged', old: a[i - 1], value: b[j - 1]}) + results.push({type: 'keep', old: a[i - 1], value: b[j - 1]}) i -= 1 j -= 1 } else if (lcs[i - 1][j] <= lcs[i][j - 1]) { diff --git a/src/dashboard/view/diff/ListDiff.tsx b/src/dashboard/view/diff/ListDiff.tsx index ec29f16e0..ac03f526d 100644 --- a/src/dashboard/view/diff/ListDiff.tsx +++ b/src/dashboard/view/diff/ListDiff.tsx @@ -21,7 +21,7 @@ export function ListDiff({shape, valueA, valueB}: ListDiffProps) { const block = change.value const kind = shape.values[block.type] const compare = - change.type === 'unchanged' + change.type === 'keep' ? [change.old, change.value] : change.type === 'removal' ? [change.value, {}] diff --git a/src/dashboard/view/diff/RichTextDiff.tsx b/src/dashboard/view/diff/RichTextDiff.tsx index 5e0a5b44d..e70ad4b38 100644 --- a/src/dashboard/view/diff/RichTextDiff.tsx +++ b/src/dashboard/view/diff/RichTextDiff.tsx @@ -85,7 +85,7 @@ export function RichTextDiff({shape, valueA, valueB}: RichTextDiffProps) { const name = change.value.block.type const kind = shape.values?.[name] const compare = - change.type === 'unchanged' + change.type === 'keep' ? [ ('block' in change.old && change.old.block) || {}, change.value.block @@ -112,7 +112,7 @@ export function RichTextDiff({shape, valueA, valueB}: RichTextDiffProps) { } case 'text': { const compare = - change.type === 'unchanged' + change.type === 'keep' ? ['text' in change.old && change.old.text, change.value.text] : change.type === 'removal' ? [change.value.text, ''] diff --git a/src/dashboard/view/entry/EditModeToggle.module.scss b/src/dashboard/view/entry/EditModeToggle.module.scss index ccb85e7fe..12dc58a70 100644 --- a/src/dashboard/view/entry/EditModeToggle.module.scss +++ b/src/dashboard/view/entry/EditModeToggle.module.scss @@ -1,5 +1,4 @@ .root { - height: 32px; background: var(--alinea-background); border-radius: 16px; display: flex; diff --git a/src/dashboard/view/entry/EntryHeader.tsx b/src/dashboard/view/entry/EntryHeader.tsx index 71ecca562..99b8637cf 100644 --- a/src/dashboard/view/entry/EntryHeader.tsx +++ b/src/dashboard/view/entry/EntryHeader.tsx @@ -3,7 +3,7 @@ import {entryFile, workspaceMediaDir} from 'alinea/core/EntryFilenames' import {Button, HStack, Icon, Stack, fromModule, px} from 'alinea/ui' import {AppBar} from 'alinea/ui/AppBar' import {DropdownMenu} from 'alinea/ui/DropdownMenu' -import IcOutlineAvTimer from 'alinea/ui/icons/IcOutlineAvTimer' +import {IcOutlineAvTimer} from 'alinea/ui/icons/IcOutlineAvTimer' import {IcOutlineDrafts} from 'alinea/ui/icons/IcOutlineDrafts' import {IcOutlineKeyboardTab} from 'alinea/ui/icons/IcOutlineKeyboardTab' import {IcOutlineRemoveRedEye} from 'alinea/ui/icons/IcOutlineRemoveRedEye' @@ -19,7 +19,7 @@ import {IcRoundTranslate} from 'alinea/ui/icons/IcRoundTranslate' import {IcRoundUnfoldMore} from 'alinea/ui/icons/IcRoundUnfoldMore' import {useAtom, useAtomValue, useSetAtom} from 'jotai' import {useEffect, useState} from 'react' -import {EntryEditor} from '../../atoms/EntryEditorAtoms.js' +import {EntryEditor, EntryTransition} from '../../atoms/EntryEditorAtoms.js' import {useLocation, useNavigate} from '../../atoms/LocationAtoms.js' import {useConfig} from '../../hook/UseConfig.js' import {useLocale} from '../../hook/UseLocale.js' @@ -35,22 +35,32 @@ const variantDescription = { draft: 'Draft', editing: 'Editing', published: 'Published', - publishing: 'Publishing', archived: 'Archived', - archiving: 'Archiving', untranslated: 'Untranslated', revision: 'Revision' } +const transitions = { + [EntryTransition.SaveDraft]: 'Saving', + [EntryTransition.SaveTranslation]: 'Saving', + [EntryTransition.PublishEdits]: 'Publishing', + [EntryTransition.RestoreRevision]: 'Restoring', + [EntryTransition.PublishDraft]: 'Publishing', + [EntryTransition.DiscardDraft]: 'Discarding', + [EntryTransition.ArchivePublished]: 'Archiving', + [EntryTransition.PublishArchived]: 'Publishing', + [EntryTransition.DeleteFile]: 'Deleting', + [EntryTransition.DeleteArchived]: 'Deleting' +} + const variantIcon = { draft: IcOutlineDrafts, editing: IcRoundEdit, published: IcOutlineRemoveRedEye, - publishing: IcOutlineAvTimer, archived: IcRoundArchive, - archiving: IcOutlineAvTimer, untranslated: IcRoundTranslate, - revision: IcRoundPublishedWithChanges + revision: IcRoundPublishedWithChanges, + transition: IcOutlineAvTimer } export interface EntryHeaderProps { @@ -65,21 +75,18 @@ export function EntryHeader({editor, editable = true}: EntryHeaderProps) { const selectedPhase = useAtomValue(editor.selectedPhase) const previewRevision = useAtomValue(editor.previewRevision) const isActivePhase = editor.activePhase === selectedPhase - const isPublishing = useAtomValue(editor.isPublishing) - const isArchiving = useAtomValue(editor.isArchiving) const isMediaFile = editor.activeVersion.type === 'MediaFile' const hasChanges = useAtomValue(editor.hasChanges) + const currentTransition = useAtomValue(editor.transition) const untranslated = locale && locale !== editor.activeVersion.locale - const variant = previewRevision + const variant = currentTransition + ? 'transition' + : previewRevision ? 'revision' : untranslated ? 'untranslated' : hasChanges && !phaseInUrl ? 'editing' - : selectedPhase === EntryPhase.Published && isPublishing - ? 'publishing' - : selectedPhase === EntryPhase.Archived && isArchiving - ? 'archiving' : selectedPhase const saveDraft = useSetAtom(editor.saveDraft) const publishEdits = useSetAtom(editor.publishEdits) @@ -191,7 +198,11 @@ export function EntryHeader({editor, editable = true}: EntryHeaderProps) { - {variantDescription[variant]} + + {variant === 'transition' + ? transitions[currentTransition!] + : variantDescription[variant]} + {!previewRevision && editor.availablePhases.length > 1 && ( )} @@ -224,6 +235,7 @@ export function EntryHeader({editor, editable = true}: EntryHeaderProps) { {editable && + !currentTransition && !hasChanges && isActivePhase && !untranslated && @@ -236,7 +248,8 @@ export function EntryHeader({editor, editable = true}: EntryHeaderProps) { )} - {!hasChanges && + {!currentTransition && + !hasChanges && !isActivePhase && editor.availablePhases.includes(EntryPhase.Draft) && ( <> @@ -247,32 +260,37 @@ export function EntryHeader({editor, editable = true}: EntryHeaderProps) { )} - {untranslated && !editor.parentNeedsTranslation && !hasChanges && ( - <> - -
- - Translate from - locale)} - onChange={locale => { - navigate(pathname + `?from=` + locale) - }} - /> - -
- - )} + {!currentTransition && + untranslated && + !editor.parentNeedsTranslation && + !hasChanges && ( + <> + +
+ + Translate from + locale)} + onChange={locale => { + navigate(pathname + `?from=` + locale) + }} + /> + +
+ + )} - {untranslated && editor.parentNeedsTranslation && ( - <> - -
- Translate parent page first -
- - )} + {!currentTransition && + untranslated && + editor.parentNeedsTranslation && ( + <> + +
+ Translate parent page first +
+ + )} {variant === 'editing' && ( <> @@ -292,49 +310,54 @@ export function EntryHeader({editor, editable = true}: EntryHeaderProps) { - {untranslated && !editor.parentNeedsTranslation && ( - - )} - {config.enableDrafts && variant === 'editing' && ( - - )} - {!config.enableDrafts && variant === 'editing' && ( - - )} - {!untranslated && !hasChanges && selectedPhase === 'draft' && ( - - )} - {variant === 'revision' && ( - - )} - - - - - - - - {!isMediaFile && ( - setShowHistory(!showHistory)} - > - {showHistory ? 'Hide' : 'Show'} history - + {!currentTransition && ( + <> + {untranslated && !editor.parentNeedsTranslation && ( + + )} + {config.enableDrafts && variant === 'editing' && ( + + )} + {!config.enableDrafts && variant === 'editing' && ( + + )} + {!untranslated && + !hasChanges && + selectedPhase === 'draft' && ( + + )} + {variant === 'revision' && ( + )} - {options} - - + + + + + + + {!isMediaFile && ( + setShowHistory(!showHistory)} + > + {showHistory ? 'Hide' : 'Show'} history + + )} + {options} + + + + )} - - - )} + + {isCreating && } + {/*parent && */} + + + +
+ + Cancel + + +
) } diff --git a/src/editor/InputState.ts b/src/editor/InputState.ts index 54293c95c..b0b3d58ea 100644 --- a/src/editor/InputState.ts +++ b/src/editor/InputState.ts @@ -23,29 +23,39 @@ export namespace InputState { export type Text = readonly [TextDoc, RichTextMutator] export type Union = readonly [T, UnionMutator] + export interface YDocStateOptions { + shape: Shape + parentData: Y.Map + key: string | undefined + parent?: InputState + readOnly?: boolean + } + export class YDocState implements InputState { - constructor( - protected shape: Shape, - protected parentData: Y.Map, - protected key: string | undefined, - protected _parent?: InputState - ) {} + constructor(public options: YDocStateOptions) {} parent() { - return this._parent + return this.options.parent } child(field: string): InputState { - const {shape, parentData: data, key} = this + const {readOnly, shape, parentData: data, key} = this.options const child = key ? data.get(key) : data - return new YDocState(shape.typeOfChild(child, field), child, field, this) + return new YDocState({ + shape: shape.typeOfChild(child, field), + parentData: child, + key: field, + parent: this, + readOnly + }) } use() { - const value = this.key ? this.parentData.get(this.key) : this.parentData - const listener = this.shape.watch(this.parentData, this.key!) + const {shape, parentData, key, parent, readOnly} = this.options + const value = key ? parentData.get(key) : parentData + const listener = shape.watch(parentData, key!) const forceUpdate = useForceUpdate() useEffect(() => listener(forceUpdate), [this]) return [ - this.shape.fromY(value), - this.shape.mutator(this.parentData, this.key!) + shape.fromY(value), + shape.mutator(parentData, key!, Boolean(readOnly)) ] as const } } @@ -67,7 +77,7 @@ export namespace InputState { const current = record[field] const mutate = (state: V) => { if (typeof mutator !== 'function') - throw 'Cannot access child field of non-object' + throw new Error('Cannot access child field of non-object') mutator({...this.current, [field]: state}) } // We don't have any field information here so we can only assume diff --git a/src/editor/hook/UseField.tsx b/src/editor/hook/UseField.tsx index c424af007..653d885e3 100644 --- a/src/editor/hook/UseField.tsx +++ b/src/editor/hook/UseField.tsx @@ -37,7 +37,7 @@ export class FieldState implements InputState { const {key, root, shape, attach} = this.options const {current, mutator, observe} = useMemo(() => { const current = (): V => shape.fromY(root.get(key)) - const mutator = shape.mutator(root, key) as M + const mutator = shape.mutator(root, key, false) as M const observe = shape.watch(root, key) return {current, mutator, observe} }, []) diff --git a/src/editor/hook/UseForm.tsx b/src/editor/hook/UseForm.tsx index 480768e12..ec7b7abdb 100644 --- a/src/editor/hook/UseForm.tsx +++ b/src/editor/hook/UseForm.tsx @@ -41,7 +41,7 @@ export class FormState, M> const {root, shape, key, attach} = this.options const {current, mutator, observe} = useMemo(() => { const current = (): V => shape.fromY(root.get(key)) - const mutator = shape.mutator(root, key) as any + const mutator = shape.mutator(root, key, false) as any const observe = (fun: () => void) => { const record = root.get(key) record.observeDeep(fun) diff --git a/src/input/check/CheckField.browser.tsx b/src/input/check/CheckField.browser.tsx index 440ac321c..8366942a3 100644 --- a/src/input/check/CheckField.browser.tsx +++ b/src/input/check/CheckField.browser.tsx @@ -20,7 +20,7 @@ type CheckInputProps = { function CheckInput({state, field}: CheckInputProps) { const {label, options} = field[Field.Data] - const {readonly} = options + const {readOnly: readonly} = options const [value, setValue] = useInput(state) const [focus, setFocus] = useState(false) return ( @@ -46,7 +46,7 @@ function CheckInput({state, field}: CheckInputProps) { autoFocus={options.autoFocus} disabled={readonly} /> - + {value && ( diff --git a/src/input/code/CodeField.browser.tsx b/src/input/code/CodeField.browser.tsx index 51c01f5c7..0d35ce556 100644 --- a/src/input/code/CodeField.browser.tsx +++ b/src/input/code/CodeField.browser.tsx @@ -48,7 +48,7 @@ function CodeInput({state, field}: CodeInputProps) { onBlur={() => setFocus(false)} placeholder={placeholder} spellCheck="false" - disabled={options.readonly} + disabled={options.readOnly} />
diff --git a/src/input/code/CodeField.stories.tsx b/src/input/code/CodeField.stories.tsx index 795ff1d13..5d9b62cf3 100644 --- a/src/input/code/CodeField.stories.tsx +++ b/src/input/code/CodeField.stories.tsx @@ -8,7 +8,7 @@ export function CodeField() { const codeField = useField(code('Code')) const disabledCodeField = useField( code('Code (read-only)', { - readonly: true, + readOnly: true, initialValue: `console.log('Hello world!')` }) ) diff --git a/src/input/date/DateField.browser.tsx b/src/input/date/DateField.browser.tsx index 9aeebf850..3cbddf74e 100644 --- a/src/input/date/DateField.browser.tsx +++ b/src/input/date/DateField.browser.tsx @@ -27,7 +27,7 @@ function DateInput({state, field}: DateInputProps) { value={value || ''} onChange={e => setValue(e.currentTarget.value)} autoFocus={options.autoFocus} - disabled={options.readonly} + disabled={options.readOnly} /> ) diff --git a/src/input/date/DateField.stories.tsx b/src/input/date/DateField.stories.tsx index 5c35c4033..717ba08f1 100644 --- a/src/input/date/DateField.stories.tsx +++ b/src/input/date/DateField.stories.tsx @@ -8,7 +8,7 @@ export function DateField() { const dateField = useField(date('Date', {})) const focusedDateField = useField(date('Date', {autoFocus: true})) const readonlyDateField = useField( - date('Date (read-only)', {readonly: true, initialValue: '1900-01-01'}) + date('Date (read-only)', {readOnly: true, initialValue: '1900-01-01'}) ) return ( diff --git a/src/input/date/DateField.tsx b/src/input/date/DateField.tsx index 0c6c5d19f..f3c7eff3d 100644 --- a/src/input/date/DateField.tsx +++ b/src/input/date/DateField.tsx @@ -17,7 +17,7 @@ export interface DateOptions extends FieldOptions { /** Hide this date field */ hidden?: boolean /** Make this date field read-only*/ - readonly?: boolean + readOnly?: boolean } /** Internal representation of a date field */ diff --git a/src/input/json/JsonField.browser.tsx b/src/input/json/JsonField.browser.tsx index 45da2c13b..c18906b50 100644 --- a/src/input/json/JsonField.browser.tsx +++ b/src/input/json/JsonField.browser.tsx @@ -82,7 +82,7 @@ function JsonInput({state, field}: JsonInputProps) { }} placeholder={placeholder} autoFocus={autoFocus} - disabled={options.readonly} + disabled={options.readOnly} /> diff --git a/src/input/json/JsonField.stories.tsx b/src/input/json/JsonField.stories.tsx index d77ac77b3..2833703bb 100644 --- a/src/input/json/JsonField.stories.tsx +++ b/src/input/json/JsonField.stories.tsx @@ -7,7 +7,7 @@ import {UIStory} from 'alinea/ui/UIStory' export function JsonField() { const jsonField = useField(json('Json')) const focusedJsonField = useField(json('Json (autofocus)', {autoFocus: true})) - const readonlyJsonField = useField(json('Json (read-only)', {readonly: true})) + const readonlyJsonField = useField(json('Json (read-only)', {readOnly: true})) return ( diff --git a/src/input/link/LinkField.browser.tsx b/src/input/link/LinkField.browser.tsx index fc98412ce..94a1acda6 100644 --- a/src/input/link/LinkField.browser.tsx +++ b/src/input/link/LinkField.browser.tsx @@ -21,7 +21,7 @@ import { verticalListSortingStrategy } from '@dnd-kit/sortable' import {CSS, FirstArgument} from '@dnd-kit/utilities' -import {Field, Reference, Type} from 'alinea/core' +import {Field, ListRow, Reference, Type} from 'alinea/core' import {entries} from 'alinea/core/util/Objects' import {Create} from 'alinea/dashboard/view/Create' import {IconButton} from 'alinea/dashboard/view/IconButton' @@ -127,7 +127,7 @@ const layoutMeasuringConfig = { } interface LinksInputProps { - state: InputState> + state: InputState> field: LinksField } @@ -142,7 +142,7 @@ function LinksInput({ const [pickFrom, setPickFrom] = useState() const picker = pickFrom ? options.pickers[pickFrom] : undefined - function handleConfirm(links: Array) { + function handleConfirm(links: Array) { if (!pickFrom || !picker || !links) return const seen = new Set() for (const link of links) { @@ -232,7 +232,7 @@ function LinksInput({ fields={options.pickers[reference.type].fields} state={state.child(reference.id)} picker={options.pickers[reference.type]} - reference={reference as Row} + reference={reference as ListRow & Row} onRemove={() => list.remove(reference.id)} isSortable={options.max !== 1} /> diff --git a/src/input/number/NumberField.browser.tsx b/src/input/number/NumberField.browser.tsx index 55e37ad7b..0896100ef 100644 --- a/src/input/number/NumberField.browser.tsx +++ b/src/input/number/NumberField.browser.tsx @@ -18,8 +18,16 @@ interface NumberInputProps { function NumberInput({state, field}: NumberInputProps) { const {label, options} = field[Field.Data] - const {inline, help, optional, width, minValue, maxValue, readonly, step} = - options + const { + inline, + help, + optional, + width, + minValue, + maxValue, + readOnly: readonly, + step + } = options const [value, setValue] = useInput(state) return ( diff --git a/src/input/number/NumberField.stories.tsx b/src/input/number/NumberField.stories.tsx index 6a8c4189a..683305adc 100644 --- a/src/input/number/NumberField.stories.tsx +++ b/src/input/number/NumberField.stories.tsx @@ -7,7 +7,7 @@ import {UIStory} from 'alinea/ui/UIStory' export function NumberField() { const numberField = useField(number('Number')) const readonlyNumberField = useField( - number('Number (read-only)', {readonly: true, initialValue: 0}) + number('Number (read-only)', {readOnly: true, initialValue: 0}) ) return ( diff --git a/src/input/path/PathField.browser.tsx b/src/input/path/PathField.browser.tsx index f11a578ee..71aee8898 100644 --- a/src/input/path/PathField.browser.tsx +++ b/src/input/path/PathField.browser.tsx @@ -53,7 +53,7 @@ function PathInput({state, field}: PathInputProps) { onFocus={() => setFocus(true)} onBlur={() => setFocus(false)} placeholder={' '} - disabled={options.readonly} + disabled={options.readOnly} /> ) diff --git a/src/input/richtext/RichTextField.browser.tsx b/src/input/richtext/RichTextField.browser.tsx index c1a7afa0b..807b67e9c 100644 --- a/src/input/richtext/RichTextField.browser.tsx +++ b/src/input/richtext/RichTextField.browser.tsx @@ -166,7 +166,7 @@ function RichTextEditor({ const picker = usePickTextLink() const {optional, inline, help, width, schema} = options const [focus, setFocus] = useState(false) - const [value, {fragment, insert}] = useInput(state) + const [value, {readOnly, fragment, insert}] = useInput(state) const toolbarRef = useRef(null) const containerRef = useRef(null) const focusToggle = useCallback( @@ -223,7 +223,7 @@ function RichTextEditor({ onFocus, onBlur, extensions, - editable: !options.readonly + editable: !options.readOnly && !readOnly }, [] ) @@ -252,7 +252,7 @@ function RichTextEditor({ > diff --git a/src/input/richtext/RichTextField.module.scss b/src/input/richtext/RichTextField.module.scss index fb0957968..e6a28e868 100644 --- a/src/input/richtext/RichTextField.module.scss +++ b/src/input/richtext/RichTextField.module.scss @@ -16,6 +16,7 @@ resize: none; cursor: text; line-height: 1.5; + min-height: 40px; } &-editor.is-focus > * { diff --git a/src/input/richtext/RichTextField.tsx b/src/input/richtext/RichTextField.tsx index 28b84429d..b601ac26f 100644 --- a/src/input/richtext/RichTextField.tsx +++ b/src/input/richtext/RichTextField.tsx @@ -18,7 +18,7 @@ export interface RichTextOptions { /** Hide this rich text field */ hidden?: boolean /** Make this rich text field read-only */ - readonly?: boolean + readOnly?: boolean } /** Internal representation of a rich text field */ diff --git a/src/input/select/SelectField.browser.tsx b/src/input/select/SelectField.browser.tsx index 773314708..cd3ed2a4a 100644 --- a/src/input/select/SelectField.browser.tsx +++ b/src/input/select/SelectField.browser.tsx @@ -56,7 +56,7 @@ function SelectInput>({ return (
- + {({open}) => (
diff --git a/src/input/select/SelectField.tsx b/src/input/select/SelectField.tsx index fa9afabac..04137e146 100644 --- a/src/input/select/SelectField.tsx +++ b/src/input/select/SelectField.tsx @@ -21,7 +21,7 @@ export interface SelectConfig extends FieldOptions { /** Hide this select field */ hidden?: boolean /** Make this select field read-only */ - readonly?: boolean + readOnly?: boolean } export interface SelectOptions extends SelectConfig { diff --git a/src/input/text/TextField.browser.tsx b/src/input/text/TextField.browser.tsx index 803ecfbea..6ba354ac8 100644 --- a/src/input/text/TextField.browser.tsx +++ b/src/input/text/TextField.browser.tsx @@ -61,7 +61,7 @@ function TextInput({state, field}: TextInputProps) { onBlur={() => setFocus(false)} placeholder={placeholder} autoFocus={autoFocus} - disabled={options.readonly} + disabled={options.readOnly} /> {IconRight && } diff --git a/src/input/text/TextField.stories.tsx b/src/input/text/TextField.stories.tsx index 09f9ea138..b03ee6171 100644 --- a/src/input/text/TextField.stories.tsx +++ b/src/input/text/TextField.stories.tsx @@ -8,7 +8,7 @@ export function TextField() { const textField = useField(text('Text')) const focusedTextField = useField(text('Text (autofocus)', {autoFocus: true})) const readonlyTextField = useField( - text('Text (read-only)', {readonly: true, initialValue: 'Hello world'}) + text('Text (read-only)', {readOnly: true, initialValue: 'Hello world'}) ) return ( diff --git a/src/picker/entry/EntryPicker.stories.tsx b/src/picker/entry/EntryPicker.stories.tsx index 7ac5f9b47..8dd491413 100644 --- a/src/picker/entry/EntryPicker.stories.tsx +++ b/src/picker/entry/EntryPicker.stories.tsx @@ -1,4 +1,4 @@ -import {example} from 'alinea/backend/test/Example' +import {createExample} from 'alinea/backend/test/Example' import { queryClientAtom, useSetDashboardOptions @@ -9,6 +9,7 @@ import {useState} from 'react' import {QueryClientProvider} from 'react-query' import {EntryPickerModal} from './EntryPicker.browser.js' +const example = createExample() const client = await example.connection() export function ImagePicker() { diff --git a/src/picker/entry/EntryPicker.ts b/src/picker/entry/EntryPicker.ts index ea4e2f1e5..4ea5c579c 100644 --- a/src/picker/entry/EntryPicker.ts +++ b/src/picker/entry/EntryPicker.ts @@ -2,11 +2,12 @@ import {Entry} from 'alinea/core/Entry' import {Hint} from 'alinea/core/Hint' import {Label} from 'alinea/core/Label' import {Reference} from 'alinea/core/Reference' -import {Shape} from 'alinea/core/Shape' import {Type} from 'alinea/core/Type' import {MediaFile} from 'alinea/core/media/MediaSchema' import {Expr} from 'alinea/core/pages/Expr' import {Projection} from 'alinea/core/pages/Projection' +import {RecordShape} from 'alinea/core/shape/RecordShape' +import {ScalarShape} from 'alinea/core/shape/ScalarShape' import {assign} from 'alinea/core/util/Objects' import {Picker} from 'alinea/editor/Picker' import {EntryLinkReference} from './EntryReference.js' @@ -59,8 +60,8 @@ export function entryPicker( package: 'alinea/picker/entry' })*/ return { - shape: Shape.Record('Entry', { - entry: Shape.Scalar('Entry') + shape: new RecordShape('Entry', { + entry: new ScalarShape('Entry') }).concat(extra), hint: options.fields ? Hint.Intersection(options.hint, Type.hint(options.fields)) diff --git a/src/picker/url/UrlPicker.ts b/src/picker/url/UrlPicker.ts index 79d0e841f..fe3661607 100644 --- a/src/picker/url/UrlPicker.ts +++ b/src/picker/url/UrlPicker.ts @@ -1,7 +1,8 @@ import {Hint} from 'alinea/core/Hint' import {Reference} from 'alinea/core/Reference' -import {Shape} from 'alinea/core/Shape' import {Type} from 'alinea/core/Type' +import {RecordShape} from 'alinea/core/shape/RecordShape' +import {ScalarShape} from 'alinea/core/shape/ScalarShape' import {Picker} from 'alinea/editor/Picker' export interface UrlReference extends Reference { @@ -26,10 +27,10 @@ export function urlPicker( ): Picker> { const extra = options.fields && Type.shape(options.fields) return { - shape: Shape.Record('Url', { - url: Shape.Scalar('Url'), - description: Shape.Scalar('Description'), - target: Shape.Scalar('Target') + shape: new RecordShape('Url', { + url: new ScalarShape('Url'), + description: new ScalarShape('Description'), + target: new ScalarShape('Target') }).concat(extra), hint: Hint.Extern({name: 'UrlReference', package: 'alinea/picker/url'}), label: 'External website', diff --git a/src/preview/PreviewMessage.ts b/src/preview/PreviewMessage.ts index c79764332..ec1c8b7c0 100644 --- a/src/preview/PreviewMessage.ts +++ b/src/preview/PreviewMessage.ts @@ -1,4 +1,4 @@ -import type {PreviewUpdate} from 'alinea/backend/Resolver' +import type {PreviewUpdate} from 'alinea/core/Resolver' export enum PreviewAction { Ping = '[alinea-ping]', diff --git a/src/preview/RegisterPreview.ts b/src/preview/RegisterPreview.ts index b84d1101b..100c2d8e9 100644 --- a/src/preview/RegisterPreview.ts +++ b/src/preview/RegisterPreview.ts @@ -1,4 +1,4 @@ -import {PreviewUpdate} from 'alinea/backend/Resolver' +import type {PreviewUpdate} from 'alinea/core/Resolver' import {PreviewAction, PreviewMessage} from 'alinea/preview/PreviewMessage' export interface PreviewApi { diff --git a/src/ui/AppBar.module.scss b/src/ui/AppBar.module.scss index f5aa41259..ac4f0525f 100644 --- a/src/ui/AppBar.module.scss +++ b/src/ui/AppBar.module.scss @@ -36,8 +36,7 @@ background: var(--alinea-variant-disabled-background); } } - &.is-publishing, - &.is-archiving { + &.is-transition { color: var(--alinea-variant-progress-foreground); &::before { background: var(--alinea-variant-progress-background); diff --git a/src/ui/AppBar.tsx b/src/ui/AppBar.tsx index d6007377b..d368d0967 100644 --- a/src/ui/AppBar.tsx +++ b/src/ui/AppBar.tsx @@ -14,10 +14,9 @@ export namespace AppBar { | 'editing' | 'published' | 'archived' - | 'archiving' - | 'publishing' | 'untranslated' | 'revision' + | 'transition' } export function Root({variant, ...props}: RootProps) { diff --git a/src/ui/Loader.module.scss b/src/ui/Loader.module.scss index f863bbcb5..700ea2576 100644 --- a/src/ui/Loader.module.scss +++ b/src/ui/Loader.module.scss @@ -4,10 +4,10 @@ animation: fadein 0.25s; &-inner { - border-top: 0.15em solid rgba(255, 255, 255, 0.2); - border-right: 0.15em solid rgba(255, 255, 255, 0.2); - border-bottom: 0.15em solid rgba(255, 255, 255, 0.2); - border-left: 0.15em solid #ffffff; + border-top: 0.18em solid currentColor; + border-right: 0.18em solid transparent; + border-bottom: 0.18em solid transparent; + border-left: 0.18em solid transparent; transform: translateZ(0); animation: load8 1.1s infinite linear; &, diff --git a/src/ui/Loader.tsx b/src/ui/Loader.tsx index fd3740821..65fb175a6 100644 --- a/src/ui/Loader.tsx +++ b/src/ui/Loader.tsx @@ -16,14 +16,7 @@ export function Loader({light, absolute, size = 22, ...props}: LoaderProps) { style={{fontSize: size}} className={styles.loader.mergeProps(props)({absolute})} > -
+
) } diff --git a/src/ui/Main.module.scss b/src/ui/Main.module.scss index e0c9420f7..e31d4cf62 100644 --- a/src/ui/Main.module.scss +++ b/src/ui/Main.module.scss @@ -11,6 +11,13 @@ background: var(--alinea-content); min-width: 0; + &-loading { + position: absolute; + background: hsla(var(--alinea-negative), 0.05); + inset: 0; + z-index: 5; + } + &-inner { width: 100%; flex-grow: 1; diff --git a/src/ui/Main.tsx b/src/ui/Main.tsx index 96c08deba..846d900e5 100644 --- a/src/ui/Main.tsx +++ b/src/ui/Main.tsx @@ -1,5 +1,6 @@ import {assign} from 'alinea/core/util/Objects' import {HTMLProps, ReactNode, Ref, forwardRef} from 'react' +import {Loader} from './Loader.js' import css from './Main.module.scss' import {fromModule} from './util/Styler.js' @@ -9,14 +10,22 @@ export interface MainProps extends HTMLProps { head?: ReactNode scrollRef?: Ref scrollable?: boolean + isLoading?: boolean } function MainRoot( - {children, head, scrollRef, scrollable = true, ...props}: MainProps, + { + children, + head, + scrollRef, + isLoading, + scrollable = true, + ...props + }: MainProps, ref: Ref ) { return ( -
+
{head}
{children}
+ {isLoading && ( +
+ +
+ )}
) } diff --git a/src/ui/Statusbar.module.scss b/src/ui/Statusbar.module.scss index 8bcb46582..a3a7029c6 100644 --- a/src/ui/Statusbar.module.scss +++ b/src/ui/Statusbar.module.scss @@ -11,8 +11,9 @@ .status { display: flex; + align-items: center; height: 100%; font-size: 0.8em; - cursor: pointer; padding: 0 6px; + cursor: default; } diff --git a/src/ui/Statusbar.tsx b/src/ui/Statusbar.tsx index d066b5c58..fd5a68137 100644 --- a/src/ui/Statusbar.tsx +++ b/src/ui/Statusbar.tsx @@ -21,7 +21,7 @@ export namespace Statusbar { } export type StatusProps = PropsWithChildren<{ - icon: ComponentType + icon?: ComponentType }> export function Status({children, icon}: StatusProps) { diff --git a/src/ui/icons/IcOutlineAvTimer.tsx b/src/ui/icons/IcOutlineAvTimer.tsx index 199f49028..9b7792f31 100644 --- a/src/ui/icons/IcOutlineAvTimer.tsx +++ b/src/ui/icons/IcOutlineAvTimer.tsx @@ -1,8 +1,18 @@ -import React, { SVGProps } from 'react' +import {SVGProps} from 'react' export function IcOutlineAvTimer(props: SVGProps) { return ( - + + + ) } -export default IcOutlineAvTimer \ No newline at end of file diff --git a/src/ui/icons/IcOutlineTableRows.tsx b/src/ui/icons/IcOutlineTableRows.tsx new file mode 100644 index 000000000..ff6f3e81b --- /dev/null +++ b/src/ui/icons/IcOutlineTableRows.tsx @@ -0,0 +1,18 @@ +import {SVGProps} from 'react' + +export function IcOutlineTableRows(props: SVGProps) { + return ( + + + + ) +} diff --git a/src/ui/icons/MaterialSymbolsDatabase.tsx b/src/ui/icons/MaterialSymbolsDatabase.tsx new file mode 100644 index 000000000..19dfd5414 --- /dev/null +++ b/src/ui/icons/MaterialSymbolsDatabase.tsx @@ -0,0 +1,18 @@ +import {SVGProps} from 'react' + +export function MaterialSymbolsDatabase(props: SVGProps) { + return ( + + + + ) +} diff --git a/yarn.lock b/yarn.lock index 52ab04a97..b04d1a32a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -29,7 +29,7 @@ __metadata: cito: ^0.2.0 dataloader: ^2.1.0 pretty-ms: ^8.0.0 - rado: ^0.4.3 + rado: ^0.4.4 regexparam: ^2.0.1 xxhash-wasm: ^1.0.2 languageName: unknown @@ -75,9 +75,11 @@ __metadata: "@headlessui/react": ^1.7.17 "@react-hook/size": ^2.1.2 "@react-hook/window-size": ^3.0.7 + "@types/debounce-promise": ^3.1.8 "@types/diff-match-patch": ^1.0.32 color2k: ^1.2.4 dataloader: ^2.1.0 + debounce-promise: ^3.1.2 diff-match-patch: ^1.0.5 fflate: ^0.8.0 jotai: ^2.4.2 @@ -2169,6 +2171,13 @@ __metadata: languageName: node linkType: hard +"@types/debounce-promise@npm:^3.1.8": + version: 3.1.8 + resolution: "@types/debounce-promise@npm:3.1.8" + checksum: 50ef1c9843ffef3f72fc7b176a90c231c8e2ece6ccc41bb7efaf9461f9111f337aae19fd55770357d0215d0292bd1c614ce59c8bf25a3d4d3f63be0f0f78b7d2 + languageName: node + linkType: hard + "@types/debug@npm:^4.0.0": version: 4.1.8 resolution: "@types/debug@npm:4.1.8" @@ -3663,6 +3672,13 @@ __metadata: languageName: node linkType: hard +"debounce-promise@npm:^3.1.2": + version: 3.1.2 + resolution: "debounce-promise@npm:3.1.2" + checksum: 29bac4524c423cc852319d7455363909ea3d933a3b9e3eb1149d963cffc34c475fe37219d0bafc61af566500b5d663cba579bbad7ee4023bef06f8394ed900ad + languageName: node + linkType: hard + "debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.3.1": version: 4.3.2 resolution: "debug@npm:4.3.2" @@ -8739,10 +8755,10 @@ fsevents@~2.3.2: languageName: node linkType: hard -"rado@npm:^0.4.3": - version: 0.4.3 - resolution: "rado@npm:0.4.3" - checksum: 910cd98e4f5b0fcd2c0fe89eec364c59147bce478cb28400f4165daad14d7977383fca8fc9c5249bdbfc19580c921adadd14bcc4bcef37a5a6014065c62eb642 +"rado@npm:^0.4.4": + version: 0.4.4 + resolution: "rado@npm:0.4.4" + checksum: c20af5ed1fbd7d564c915399e94314ab2f91f32f2956aa57e136eb93b82f6357f6639bb118567a5d6834223bd677a24ca5f2644f840ca59c9f14d0e010d216bc languageName: node linkType: hard