} The updated accumulator.
+ *
+ * @example
+ * const links = page.story.reduce(extractPageLinks, new Map())
+ * // Map { 'some-page' => 'item-id-1', 'other-page' => 'item-id-3' }
+ */
+export const extractPageLinks = (collaborativeLinks, currentItem, currentIndex, array) => {
+ try {
+ const linkRe = /\[\[([^\]]+)\]\]/g
+ let match = undefined
+ while ((match = linkRe.exec(currentItem.text)) != null) {
+ if (!collaborativeLinks.has(asSlug(match[1]))) {
+ collaborativeLinks.set(asSlug(match[1]), currentItem.id)
+ }
+ }
+ if ('reference' == currentItem.type) {
+ if (!collaborativeLinks.has(currentItem.slug)) {
+ collaborativeLinks.set(currentItem.slug, currentItem.id)
+ }
+ }
+ } catch (err) {
+ console.log(`METADATA *** Error extracting links from ${currentIndex} of ${JSON.stringify(array)}`, err.message)
+ }
+ return collaborativeLinks
+}
diff --git a/package.json b/package.json
index d55e354..6a56919 100644
--- a/package.json
+++ b/package.json
@@ -51,8 +51,8 @@
"scripts": {
"prettier:format": "prettier --write './**/*.js'",
"prettier:check": "prettier --check ./**/*.js",
- "test": "cd test; node --test",
- "watch": "cd test; node --test --watch",
+ "test": "node --test",
+ "watch": "node --test --watch",
"update-authors": "node scripts/update-authors.js"
},
"devDependencies": {
diff --git a/start.js b/start.js
new file mode 100644
index 0000000..be23655
--- /dev/null
+++ b/start.js
@@ -0,0 +1,23 @@
+#!/usr/bin/env node
+
+import path from 'node:path'
+import { fileURLToPath } from 'node:url'
+import server from './lib/server.js'
+
+const __dirname = path.dirname(fileURLToPath(import.meta.url))
+
+const argv = {
+ root: __dirname,
+ port: parseInt(process.env.PORT || '3000', 10),
+ data: process.env.WIKI_DATA || undefined,
+ packageFile: path.join(__dirname, 'package.json'),
+}
+
+const app = await server(argv)
+
+const { port, host } = app.startOpts
+
+const srv = app.listen(port, host, () => {
+ console.log(`wiki listening on http://${host || 'localhost'}:${port}`)
+ app.emit('running-serv', srv)
+})
diff --git a/test/page.js b/test/page.js
index 590d8a4..77c8bf7 100644
--- a/test/page.js
+++ b/test/page.js
@@ -8,16 +8,17 @@ import { fileURLToPath } from 'node:url'
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
-// ESM module imports (assuming default exports)
import random from '../lib/random_id.js'
import defaultargs from '../lib/defaultargs.js'
import pageFactory from '../lib/page.js'
+import { PageNotFoundError } from '../lib/errors.js'
const testid = random()
const argv = defaultargs({
data: path.join('/tmp', 'sfwtests', testid),
root: path.join(__dirname, '..'),
packageDir: path.join(__dirname, '..', 'node_modules'),
+ packageFile: path.join(__dirname, 'package.json'),
security_legacy: true,
})
@@ -30,71 +31,42 @@ console.log('testid', testid)
describe('page', () => {
describe('#page.put()', () => {
it('should save a page', async () => {
- return new Promise(resolve => {
- page.put('asdf', testpage, e => {
- if (e) throw e
- resolve()
- })
- })
+ await page.put('asdf', testpage)
})
})
+
describe('#page.get()', () => {
it('should get a page if it exists', async () => {
- return new Promise(resolve => {
- page.get('asdf', (e, got) => {
- if (e) throw e
- assert.equal(got.title, 'Asdf')
- resolve()
- })
- })
+ const got = await page.get('asdf')
+ assert.equal(got.title, 'Asdf')
})
+
it('should copy a page from default if nonexistant in db', async () => {
- return new Promise(resolve => {
- page.get('welcome-visitors', (e, got) => {
- if (e) throw e
- assert.equal(got.title, 'Welcome Visitors')
- resolve()
- })
- })
+ const got = await page.get('welcome-visitors')
+ assert.equal(got.title, 'Welcome Visitors')
})
- // note: here we assume the wiki-plugin-activity repo has been cloned into an adjacent directory
+
it('should copy a page from plugins if nonexistant in db', async () => {
- return new Promise(resolve => {
- page.get('recent-changes', (e, got) => {
- if (e) throw e
- assert.equal(got.title, 'Recent Changes')
- resolve()
- })
- })
+ const got = await page.get('recent-changes')
+ assert.equal(got.title, 'Recent Changes')
})
- // note: here we assume the wiki-plugin-activity repo has been cloned into an adjacent directory
+
it('should mark a page from plugins with the plugin name', async () => {
- return new Promise(resolve => {
- page.get('recent-changes', (e, got) => {
- if (e) throw e
- assert.equal(got.plugin, 'activity')
- resolve()
- })
- })
+ const got = await page.get('recent-changes')
+ assert.equal(got.plugin, 'activity')
})
- it('should create a page if it exists nowhere', async () => {
- return new Promise(resolve => {
- page.get(random(), (e, got) => {
- if (e) throw e
- assert.equal(got, 'Page not found')
- resolve()
- })
- })
+
+ it('should throw PageNotFoundError if it exists nowhere', async () => {
+ await assert.rejects(
+ () => page.get(random()),
+ err => err instanceof PageNotFoundError,
+ )
})
+
it('should eventually write the page to disk', async () => {
- return new Promise(resolve => {
- page.get('asdf', (e, got) => {
- if (e) throw e
- const page = JSON.parse(fs.readFileSync(path.join(path.sep, 'tmp', 'sfwtests', testid, 'pages', 'asdf')))
- assert.equal(got.title, page.title)
- resolve()
- })
- })
+ const got = await page.get('asdf')
+ const ondisk = JSON.parse(fs.readFileSync(path.join('/tmp', 'sfwtests', testid, 'pages', 'asdf'), 'utf8'))
+ assert.equal(got.title, ondisk.title)
})
})
})
diff --git a/test/server.js b/test/server.js
index b6ed51d..69a0ced 100644
--- a/test/server.js
+++ b/test/server.js
@@ -9,10 +9,8 @@ import { fileURLToPath } from 'node:url'
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
-// CommonJS server module (.cjs)
const server = await import('../index.js')
-// ESM modules
import random from '../lib/random_id.js'
import defaultargs from '../lib/defaultargs.js'
@@ -20,26 +18,26 @@ const testid = random()
const argv = defaultargs({
data: path.join('/tmp', 'sfwtests', testid),
packageDir: path.join(__dirname, '..', 'node_modules'),
+ packageFile: path.join(__dirname, 'package.json'),
port: 55557,
security_legacy: true,
test: true,
})
describe('server', () => {
- var app = {}
+ let app = {}
let runningServer = null
- before(async done => {
- // as starting the server this was does not create a sitemap file, create an empty one
+
+ before(async () => {
const sitemapLoc = path.join('/tmp', 'sfwtests', testid, 'status', 'sitemap.json')
- fs.mkdirSync(path.join('/tmp', 'sfwtests', testid))
- fs.mkdirSync(path.join('/tmp', 'sfwtests', testid, 'status'))
+ fs.mkdirSync(path.join('/tmp', 'sfwtests', testid), { recursive: true })
+ fs.mkdirSync(path.join('/tmp', 'sfwtests', testid, 'status'), { recursive: true })
fs.writeFileSync(sitemapLoc, JSON.stringify([]))
- let x = await server.default(argv)
- app = x
- // app = server(argv)
- app.once('owner-set', async () => {
- runningServer = await app.listen(app.startOpts.port, app.startOpts.host, done)
+ app = await server.default(argv)
+
+ await new Promise(resolve => {
+ runningServer = app.listen(app.startOpts.port, app.startOpts.host, resolve)
})
})
@@ -48,27 +46,17 @@ describe('server', () => {
})
const request = supertest('http://localhost:55557')
-
- // location of the test page
const loc = path.join('/tmp', 'sfwtests', testid, 'pages', 'adsf-test-page')
it('factories should return a list of plugin', async () => {
- await request
- .get('/system/factories.json')
- .expect(200)
- .expect('Content-Type', /json/)
- .then(res => {
- assert.equal(res.body[1].name, 'Video')
- assert.equal(res.body[1].category, 'format')
- })
+ const res = await request.get('/system/factories.json').expect(200).expect('Content-Type', /json/)
+ assert.equal(res.body[1].name, 'Video')
+ assert.equal(res.body[1].category, 'format')
})
it('new site should have an empty list of pages', async () => {
- await request
- .get('/system/slugs.json')
- .expect(200)
- .expect('Content-Type', /json/)
- .then(res => assert.deepEqual(res.body, []))
+ const res = await request.get('/system/slugs.json').expect(200).expect('Content-Type', /json/)
+ assert.deepEqual(res.body, [])
})
it('should create a page', async () => {
@@ -92,27 +80,18 @@ describe('server', () => {
.expect(200)
})
- it('should move the paragraphs to the order given ', async () => {
+ it('should move the paragraphs to the order given', async () => {
const body = '{ "type": "move", "order": [ "a1", "a3", "a2", "a4"] }'
await request
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(200)
- .then(
- () => {
- const page = JSON.parse(fs.readFileSync(loc))
- assert.equal(page.story[1].id, 'a3')
- assert.equal(page.story[2].id, 'a2')
- assert.equal(page.journal[1].type, 'move')
- },
- err => {
- throw err
- },
- )
- .catch(err => {
- throw err
- })
+
+ const page = JSON.parse(fs.readFileSync(loc, 'utf8'))
+ assert.equal(page.story[1].id, 'a3')
+ assert.equal(page.story[2].id, 'a2')
+ assert.equal(page.journal[1].type, 'move')
})
it('should add a paragraph', async () => {
@@ -126,15 +105,11 @@ describe('server', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(200)
- .then(() => {
- const page = JSON.parse(fs.readFileSync(loc))
- assert.equal(page.story.length, 5)
- assert.equal(page.story[3].id, 'a5')
- assert.equal(page.journal[2].type, 'add')
- })
- .catch(err => {
- throw err
- })
+
+ const page = JSON.parse(fs.readFileSync(loc, 'utf8'))
+ assert.equal(page.story.length, 5)
+ assert.equal(page.story[3].id, 'a5')
+ assert.equal(page.journal[2].type, 'add')
})
it('should remove a paragraph with given id', async () => {
@@ -147,17 +122,13 @@ describe('server', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(200)
- .then(() => {
- const page = JSON.parse(fs.readFileSync(loc))
- assert.equal(page.story.length, 4)
- assert.equal(page.story[1].id, 'a3')
- assert.notEqual(page.story[2].id, 'a2')
- assert.equal(page.story[2].id, 'a5')
- assert.equal(page.journal[3].type, 'remove')
- })
- .catch(err => {
- throw err
- })
+
+ const page = JSON.parse(fs.readFileSync(loc, 'utf8'))
+ assert.equal(page.story.length, 4)
+ assert.equal(page.story[1].id, 'a3')
+ assert.notEqual(page.story[2].id, 'a2')
+ assert.equal(page.story[2].id, 'a5')
+ assert.equal(page.journal[3].type, 'remove')
})
it('should edit a paragraph in place', async () => {
@@ -171,14 +142,10 @@ describe('server', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(200)
- .then(() => {
- const page = JSON.parse(fs.readFileSync(loc))
- assert.equal(page.story[1].text, 'edited')
- assert.equal(page.journal[4].type, 'edit')
- })
- .catch(err => {
- throw err
- })
+
+ const page = JSON.parse(fs.readFileSync(loc, 'utf8'))
+ assert.equal(page.story[1].text, 'edited')
+ assert.equal(page.journal[4].type, 'edit')
})
it('should default to no change', async () => {
@@ -190,17 +157,13 @@ describe('server', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(500)
- .then(() => {
- const page = JSON.parse(fs.readFileSync(loc))
- assert.equal(page.story.length, 4)
- assert.equal(page.journal.length, 5)
- assert.equal(page.story[0].id, 'a1')
- assert.equal(page.story[3].text, 'this is the fourth paragraph')
- assert.equal(page.journal[4].type, 'edit')
- })
- .catch(err => {
- throw err
- })
+
+ const page = JSON.parse(fs.readFileSync(loc, 'utf8'))
+ assert.equal(page.story.length, 4)
+ assert.equal(page.journal.length, 5)
+ assert.equal(page.story[0].id, 'a1')
+ assert.equal(page.story[3].text, 'this is the fourth paragraph')
+ assert.equal(page.journal[4].type, 'edit')
})
it('should refuse to create over a page', async () => {
@@ -214,44 +177,23 @@ describe('server', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(409)
- .then(() => {
- const page = JSON.parse(fs.readFileSync(loc))
- assert.notEqual(page.title, 'Doh')
- })
- .catch(err => {
- throw err
- })
+
+ const page = JSON.parse(fs.readFileSync(loc, 'utf8'))
+ assert.notEqual(page.title, 'Doh')
})
it('site should now have one page', async () => {
- await request
- .get('/system/slugs.json')
- .expect(200)
- .expect('Content-Type', /json/)
- .then(res => {
- assert.equal(res.body.length, 1)
- assert.equal(res.body[0], 'adsf-test-page')
- })
- .catch(err => {
- throw err
- })
+ const res = await request.get('/system/slugs.json').expect(200).expect('Content-Type', /json/)
+ assert.equal(res.body.length, 1)
+ assert.equal(res.body[0], 'adsf-test-page')
})
- // Should be a version test, but doesn't seem it's supported in test mode yet.
it.skip('server should return a version', async () => {
- await request
- .get('/system/version.json')
- .expect(200)
- .expect('Content-Type', /json/)
- .then(res => {
- assert.equal(res.body.wiki, '0.1')
- assert.equal(res.body['wiki-server'], '0.2')
- assert.equal(res.body['wiki-client'], '0.3')
- assert.equal(res.body.plugins['wiki-plugin-activity'], '0.4')
- assert.equal(res.body.plugins['wiki-plugin-video'], '0.5')
- })
- .catch(err => {
- throw err
- })
+ const res = await request.get('/system/version.json').expect(200).expect('Content-Type', /json/)
+ assert.equal(res.body.wiki, '0.1')
+ assert.equal(res.body['wiki-server'], '0.2')
+ assert.equal(res.body['wiki-client'], '0.3')
+ assert.equal(res.body.plugins['wiki-plugin-activity'], '0.4')
+ assert.equal(res.body.plugins['wiki-plugin-video'], '0.5')
})
})
diff --git a/test/sitemap.js b/test/sitemap.js
index adf8504..002bf82 100644
--- a/test/sitemap.js
+++ b/test/sitemap.js
@@ -6,20 +6,19 @@ import fs from 'node:fs'
import path from 'node:path'
import { fileURLToPath } from 'node:url'
-// Emulate __dirname in ESM
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
-// Dynamic import of CommonJS module
const server = await import('../index.js')
-// ESM imports
import random from '../lib/random_id.js'
import defaultargs from '../lib/defaultargs.js'
const testid = random()
const argv = defaultargs({
data: path.join('/tmp', 'sfwtests', testid),
+ packageDir: path.join(__dirname, '..', 'node_modules'),
+ packageFile: path.join(__dirname, 'package.json'),
port: 55556,
security_legacy: true,
test: true,
@@ -29,13 +28,12 @@ describe('sitemap', () => {
let app = {}
let runningServer = null
- before(async done => {
- let x = await server.default(argv)
- app = x
+ before(async () => {
+ fs.mkdirSync(path.join('/tmp', 'sfwtests', testid, 'pages'), { recursive: true })
+ app = await server.default(argv)
- // app = server(argv)
- app.once('owner-set', () => {
- runningServer = app.listen(app.startOpts.port, app.startOpts.host, done)
+ await new Promise(resolve => {
+ runningServer = app.listen(app.startOpts.port, app.startOpts.host, resolve)
})
})
@@ -44,19 +42,13 @@ describe('sitemap', () => {
})
const request = supertest('http://localhost:55556')
- fs.mkdirSync(path.join('/tmp', 'sfwtests', testid, 'pages'), { recursive: true })
-
- // location of the sitemap
const sitemapLoc = path.join('/tmp', 'sfwtests', testid, 'status', 'sitemap.json')
+ const waitForSitemap = () => new Promise(resolve => app.sitemaphandler.once('finished', resolve))
+
it('new site should have an empty sitemap', async () => {
- await request
- .get('/system/sitemap.json')
- .expect(200)
- .expect('Content-Type', /json/)
- .then(res => {
- assert.equal(res.body.length, 0)
- })
+ const res = await request.get('/system/sitemap.json').expect(200).expect('Content-Type', /json/)
+ assert.equal(res.body.length, 0)
})
it('creating a page should add it to the sitemap', async () => {
@@ -78,19 +70,13 @@ describe('sitemap', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(200)
- // sitemap update does not happen until after the put has returned, so wait for it to finish
- .then(() => new Promise(resolve => app.sitemaphandler.once('finished', () => resolve())))
- .then(
- () => {
- const sitemap = JSON.parse(fs.readFileSync(sitemapLoc))
- assert.equal(sitemap[0].slug, 'adsf-test-page')
- assert.equal(sitemap[0].synopsis, 'this is the first paragraph')
- assert.deepEqual(sitemap[0].links, { third: 'a3' })
- },
- err => {
- throw err
- },
- )
+
+ await waitForSitemap()
+
+ const sitemap = JSON.parse(fs.readFileSync(sitemapLoc, 'utf8'))
+ assert.equal(sitemap[0].slug, 'adsf-test-page')
+ assert.equal(sitemap[0].synopsis, 'this is the first paragraph')
+ assert.deepEqual(sitemap[0].links, { third: 'a3' })
})
it('synopsis should reflect edit to first paragraph', async () => {
@@ -104,23 +90,20 @@ describe('sitemap', () => {
.put('/page/adsf-test-page/action')
.send('action=' + body)
.expect(200)
- .then(() => new Promise(resolve => app.sitemaphandler.once('finished', () => resolve())))
- .then(() => {
- const sitemap = JSON.parse(fs.readFileSync(sitemapLoc))
- assert.equal(sitemap[0].slug, 'adsf-test-page')
- assert.equal(sitemap[0].synopsis, 'edited')
- })
+
+ await waitForSitemap()
+
+ const sitemap = JSON.parse(fs.readFileSync(sitemapLoc, 'utf8'))
+ assert.equal(sitemap[0].slug, 'adsf-test-page')
+ assert.equal(sitemap[0].synopsis, 'edited')
})
it('deleting a page should remove it from the sitemap', async () => {
- await request
- .delete('/adsf-test-page.json')
- .send()
- .expect(200)
- .then(() => new Promise(resolve => app.sitemaphandler.once('finished', () => resolve())))
- .then(() => {
- const sitemap = JSON.parse(fs.readFileSync(sitemapLoc))
- assert.deepEqual(sitemap, [])
- })
+ await request.delete('/adsf-test-page.json').send().expect(200)
+
+ await waitForSitemap()
+
+ const sitemap = JSON.parse(fs.readFileSync(sitemapLoc, 'utf8'))
+ assert.deepEqual(sitemap, [])
})
})
diff --git a/test/utils.js b/test/utils.js
new file mode 100644
index 0000000..ecd9d9e
--- /dev/null
+++ b/test/utils.js
@@ -0,0 +1,188 @@
+import { describe, it } from 'node:test'
+import assert from 'node:assert/strict'
+
+import { asSlug, lastEdit, extractPageLinks, synopsis } from '../lib/utils.js'
+import { resolveLinks, escape } from '../lib/render.js'
+
+describe('utils', () => {
+ describe('asSlug', () => {
+ it('should replace spaces with hyphens', () => {
+ assert.equal(asSlug('Hello World'), 'hello-world')
+ })
+ it('should lowercase the result', () => {
+ assert.equal(asSlug('FooBar'), 'foobar')
+ })
+ it('should strip non-alphanumeric non-hyphen characters', () => {
+ assert.equal(asSlug('Hello, World!'), 'hello-world')
+ })
+ it('should handle multiple consecutive spaces', () => {
+ assert.equal(asSlug('a b c'), 'a--b---c')
+ })
+ it('should return empty string for empty input', () => {
+ assert.equal(asSlug(''), '')
+ })
+ it('should handle tabs and newlines as spaces', () => {
+ assert.equal(asSlug('a\tb\nc'), 'a-b-c')
+ })
+ it('should preserve digits and hyphens', () => {
+ assert.equal(asSlug('page-123'), 'page-123')
+ })
+ it('should strip unicode characters', () => {
+ assert.equal(asSlug('café'), 'caf')
+ })
+ })
+
+ describe('lastEdit', () => {
+ it('should return undefined for undefined journal', () => {
+ assert.equal(lastEdit(undefined), undefined)
+ })
+ it('should return undefined for empty journal', () => {
+ assert.equal(lastEdit([]), undefined)
+ })
+ it('should return the date of the last non-fork entry', () => {
+ const journal = [
+ { type: 'create', date: 100 },
+ { type: 'edit', date: 200 },
+ { type: 'fork', date: 300 },
+ ]
+ assert.equal(lastEdit(journal), 200)
+ })
+ it('should return undefined if all entries are forks', () => {
+ const journal = [
+ { type: 'fork', date: 100 },
+ { type: 'fork', date: 200 },
+ ]
+ assert.equal(lastEdit(journal), undefined)
+ })
+ it('should skip entries without a date', () => {
+ const journal = [{ type: 'edit', date: 100 }, { type: 'edit' }]
+ assert.equal(lastEdit(journal), 100)
+ })
+ })
+
+ describe('extractPageLinks', () => {
+ it('should extract wiki-style links from text', () => {
+ const item = { id: 'i1', type: 'paragraph', text: 'see [[Some Page]] for details' }
+ const links = [item].reduce(extractPageLinks, new Map())
+ assert.equal(links.size, 1)
+ assert.equal(links.get('some-page'), 'i1')
+ })
+ it('should extract multiple links from one item', () => {
+ const item = { id: 'i1', type: 'paragraph', text: '[[Alpha]] and [[Beta]]' }
+ const links = [item].reduce(extractPageLinks, new Map())
+ assert.equal(links.size, 2)
+ assert.equal(links.get('alpha'), 'i1')
+ assert.equal(links.get('beta'), 'i1')
+ })
+ it('should not overwrite an existing slug with a later item id', () => {
+ const items = [
+ { id: 'i1', type: 'paragraph', text: '[[Target]]' },
+ { id: 'i2', type: 'paragraph', text: '[[Target]]' },
+ ]
+ const links = items.reduce(extractPageLinks, new Map())
+ assert.equal(links.get('target'), 'i1')
+ })
+ it('should extract slug from reference items', () => {
+ const item = { id: 'i1', type: 'reference', slug: 'ref-page', text: '', site: 'example.com' }
+ const links = [item].reduce(extractPageLinks, new Map())
+ assert.equal(links.get('ref-page'), 'i1')
+ })
+ it('should return empty map when no links present', () => {
+ const item = { id: 'i1', type: 'paragraph', text: 'no links here' }
+ const links = [item].reduce(extractPageLinks, new Map())
+ assert.equal(links.size, 0)
+ })
+ })
+
+ describe('synopsis', () => {
+ it('should use explicit synopsis field if present', () => {
+ const page = { synopsis: 'explicit', story: [{ type: 'paragraph', text: 'from story' }] }
+ assert.equal(synopsis(page), 'explicit')
+ })
+ it('should use first paragraph text', () => {
+ const page = { story: [{ type: 'paragraph', text: 'first para' }] }
+ assert.equal(synopsis(page), 'first para')
+ })
+ it('should fall back to second paragraph if first is not a paragraph', () => {
+ const page = {
+ story: [
+ { type: 'image', text: 'img' },
+ { type: 'paragraph', text: 'second para' },
+ ],
+ }
+ assert.equal(synopsis(page), 'second para')
+ })
+ it('should use first item text of any type if no paragraphs', () => {
+ const page = { story: [{ type: 'markdown', text: 'md text' }] }
+ assert.equal(synopsis(page), 'md text')
+ })
+ it('should report item count when no text available', () => {
+ const page = { story: [{ type: 'factory' }, { type: 'factory' }] }
+ assert.equal(synopsis(page), 'A page with 2 items.')
+ })
+ it('should handle page with no story', () => {
+ assert.equal(synopsis({}), 'A page with no story.')
+ })
+ it('should truncate at first line break', () => {
+ const page = { story: [{ type: 'paragraph', text: 'line one\nline two' }] }
+ assert.equal(synopsis(page), 'line one')
+ })
+ it('should cap output at 560 characters', () => {
+ const long = 'x'.repeat(600)
+ const page = { story: [{ type: 'paragraph', text: long }] }
+ assert.equal(synopsis(page).length, 560)
+ })
+ })
+
+ describe('escape', () => {
+ it('should escape ampersands', () => {
+ assert.equal(escape('a & b'), 'a & b')
+ })
+ it('should escape angle brackets', () => {
+ assert.equal(escape(''), '<div>')
+ })
+ it('should handle empty string', () => {
+ assert.equal(escape(''), '')
+ })
+ it('should handle undefined', () => {
+ assert.equal(escape(undefined), '')
+ })
+ })
+
+ describe('resolveLinks', () => {
+ it('should convert internal wiki links to anchor tags', () => {
+ const result = resolveLinks('see [[Hello World]] here')
+ assert.match(result, /class="internal"/)
+ assert.match(result, /href="\/hello-world\.html"/)
+ assert.match(result, /data-page-name="hello-world"/)
+ })
+ it('should convert external links to anchor tags', () => {
+ const result = resolveLinks('see [http://example.com Example] here')
+ assert.match(result, /class="external"/)
+ assert.match(result, /href="http:\/\/example\.com"/)
+ assert.match(result, /Example/)
+ })
+ it('should escape plain text', () => {
+ const result = resolveLinks('a < b & c > d')
+ assert.match(result, /</)
+ assert.match(result, /&/)
+ assert.match(result, />/)
+ })
+ it('should pass resolution context into link titles', () => {
+ const result = resolveLinks('[[Test]]', undefined, ['page-a', 'page-b'])
+ assert.match(result, /title="page-a => page-b"/)
+ })
+ it('should handle empty string', () => {
+ assert.equal(resolveLinks(''), '')
+ })
+ it('should mark spaced internal links', () => {
+ const result = resolveLinks('[[ Hello ]]')
+ assert.match(result, /class="internal spaced"/)
+ })
+ it('should accept a custom sanitizer', () => {
+ const upper = s => s.toUpperCase()
+ const result = resolveLinks('plain text', upper)
+ assert.equal(result, 'PLAIN TEXT')
+ })
+ })
+})