diff --git a/build/engines.js b/build/engines.js
index f4f8c2a..b6e60af 100644
--- a/build/engines.js
+++ b/build/engines.js
@@ -4,11 +4,11 @@ const path = require('path');
const fs = require('fs-extra');
const log = require('fancy-log');
const { minify } = require('html-minifier-terser');
-const { resolve, readFile, ENGINE } = require('./resolve');
+const { resolve, readFile, ENGINE, TYPE } = require('./resolve');
-const handlebars = require('handlebars');
+const Handlebars = require('handlebars');
const HandlebarsKit = require('hbs-kit');
-HandlebarsKit.load(handlebars);
+HandlebarsKit.load(Handlebars);
const slugs = require('slugify');
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
@@ -44,7 +44,7 @@ const markdownEngines = {
function markdown (mode, input, env) {
input = input.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => {
try {
- const result = handlebars.compile(contents)(env);
+ const result = Handlebars.compile(contents)(env);
return 'æææ' + result + 'æææ';
} catch (e) {
log.error(e);
@@ -66,6 +66,11 @@ function markdown (mode, input, env) {
return input ? markdownEngines[mode].render(input, env) : '';
}
+function handlebars (input, env) {
+ const template = Handlebars.compile(input);
+ return template(env);
+}
+
function stripIndent (input) {
const match = input.match(/^[^\S\n]*(?=\S)/gm);
const indent = match && Math.min(...match.map((el) => el.length));
@@ -88,47 +93,45 @@ const MINIFY_CONFIG = {
const HANDLEBARS_PARTIALS = {
layout: 'templates/layout.hbs',
+ page: 'templates/page.hbs',
+ post: 'templates/post.hbs',
};
module.exports = exports = async function (prod) {
+ const templates = {};
for (const [ name, file ] of Object.entries(HANDLEBARS_PARTIALS)) {
try {
const contents = await readFile(file);
- const template = handlebars.compile(contents.toString('utf8'));
- handlebars.registerPartial(name, template);
+ const template = Handlebars.compile(contents.toString('utf8'));
+ templates[name] = template;
+ Handlebars.registerPartial(name, template);
} catch (e) {
log.error('Could not execute load partial ' + file, e);
}
}
- const pageTemplateRaw = await readFile('templates/page.hbs');
- if (!pageTemplateRaw) throw new Error('Post template was empty?');
- try {
- var pageTemplate = handlebars.compile(pageTemplateRaw.toString('utf8'));
- } catch (e) {
- log.error('Crash while loading page template', e);
- }
-
const revManifest = prod && await fs.readJson(resolve('rev-manifest.json')).catch(() => {}).then((r) => r || {});
const helpers = new Injectables(prod, revManifest);
- handlebars.registerHelper('import', helpers.import());
- handlebars.registerHelper('markdown', helpers.markdown());
- handlebars.registerHelper('icon', helpers.icon());
- handlebars.registerHelper('prod', helpers.production());
- handlebars.registerHelper('rev', helpers.rev());
+ Handlebars.registerHelper('import', helpers.import());
+ Handlebars.registerHelper('markdown', helpers.markdown());
+ Handlebars.registerHelper('icon', helpers.icon());
+ Handlebars.registerHelper('prod', helpers.production());
+ Handlebars.registerHelper('rev', helpers.rev());
const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input);
const result = {
- [ENGINE.HANDLEBARS]: (source, env) => {
- const template = handlebars.compile(source);
- return shrink(template(env));
- },
- [ENGINE.MARKDOWN]: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
- [ENGINE.OTHER]: (source) => shrink(source),
- MARKDOWN_CONTENT: (source, env) => markdown('full', source, env),
- MARKDOWN_PREVIEW: (source, env) => markdown('preview', source, env),
+ [TYPE.HANDLEBARS]: handlebars,
+ [TYPE.MARKDOWN]: (source, env) => markdown('full', source, env),
+ [TYPE.OTHER]: (source) => source,
+
+ [ENGINE.PAGE]: (source, env) => shrink(templates.page({ ...env, contents: markdown('full', source, env) })),
+ [ENGINE.POST]: (source, env) => shrink(templates.post({ ...env, contents: markdown('full', source, env) })),
+ [ENGINE.HTML]: (source) => shrink(source),
+ [ENGINE.OTHER]: (source) => source,
+
+ preview: (source, env) => markdown('preview', source, env),
};
return result;
@@ -206,7 +209,7 @@ class Injectables {
contents = markdown('full', contents, data);
- return new handlebars.SafeString(contents);
+ return new Handlebars.SafeString(contents);
};
}
@@ -215,14 +218,14 @@ class Injectables {
return function (tpath, ...args) {
const { hash, data } = args.pop();
const value = args.shift();
- const context = handlebars.createFrame(value || data.root);
+ const context = Handlebars.createFrame(value || data.root);
Object.assign(context, hash || {});
tpath = self._parsePath(tpath, data.root.local, 'hbs');
try {
- const contents = self._template(tpath, handlebars.compile)(context);
- return new handlebars.SafeString(contents);
+ const contents = self._template(tpath, Handlebars.compile)(context);
+ return new Handlebars.SafeString(contents);
} catch (e) {
log.error('Could not execute import template ' + tpath, e);
return '';
@@ -238,10 +241,10 @@ class Injectables {
try {
const contents = self._template(tpath, (s) =>
- handlebars.compile(`${s}`),
+ Handlebars.compile(`${s}`),
)({ size: hash && hash.size });
- return new handlebars.SafeString(contents);
+ return new Handlebars.SafeString(contents);
} catch (e) {
log.error('Could not execute import template ' + tpath, e);
return '';
diff --git a/build/file.js b/build/file.js
index 498cfcd..046e215 100644
--- a/build/file.js
+++ b/build/file.js
@@ -19,22 +19,16 @@ module.exports = exports = class File {
}
const file = path.parse(filepath);
- let { base: basename, name } = file;
- this.preprocessed = false;
- if (name[0] === '_') {
- this.preprocessed = true;
- file.name = name = name.slice(1);
- file.basename = basename = basename.slice(1);
- }
+ this._basename();
this.kind = kind(filepath);
this.type = type(filepath);
this.input = filepath; // public/file.ext
this.cwd = file.dir;
this.ext = this.preprocessed ? file.ext : normalizedExt(file.ext);
- this.name = name; // index, fileA, fileB
- this.basename = basename; // index.ext, fileA.ext, fileB.ext
+ this.name = file.name; // index, fileA, fileB
+ this.basename = file.basename; // index.ext, fileA.ext, fileB.ext
const dir = this._dir(file.dir);
if (dir) {
@@ -60,6 +54,15 @@ module.exports = exports = class File {
];
}
+ _basename (file) {
+ this.preprocessed = false;
+ if (file.name[0] === '_') {
+ this.preprocessed = true;
+ file.name = file.name.slice(1);
+ file.basename = file.basename.slice(1);
+ }
+ }
+
_dir (dir) {
dir = dir.split('/');
if (dir[0] === 'public') dir.shift();
diff --git a/build/index.js b/build/index.js
index f118963..53c8dcc 100644
--- a/build/index.js
+++ b/build/index.js
@@ -8,6 +8,7 @@ const Promise = require('bluebird');
const fs = require('fs-extra');
const { sortBy } = require('lodash');
+const getEngines = require('./engines');
const primeTweets = require('./page-tweets');
const pageWriter = require('./page-writer');
const evaluate = require('./evaluate');
@@ -27,9 +28,10 @@ exports.everything = function (prod = false) {
async function fn () {
// load a directory scan of the public and post folders
- const [ PublicFiles, PostFiles ] = await Promise.all([
+ const [ PublicFiles, PostFiles, engines ] = await Promise.all([
loadPublicFiles(),
loadPostFiles(),
+ getEngines(prod),
]);
// load data for all the files in that folder
@@ -71,8 +73,8 @@ exports.everything = function (prod = false) {
await evaluate(tasks.flat(), cache);
await cache.save();
- posts = await pageWriter(pages, posts, prod);
- await writeIndex('dist/tweets/index.json', posts.filter(Boolean), true);
+ const postIndex = await pageWriter(engines, pages, posts, prod);
+ await fs.writeFile(resolve('dist/tweets/index.json'), prod ? JSON.stringify(postIndex) : JSON.stringify(postIndex, null, 2));
}
fn.displayName = prod ? 'buildForProd' : 'build';
diff --git a/build/page-writer.js b/build/page-writer.js
index a538391..1639567 100644
--- a/build/page-writer.js
+++ b/build/page-writer.js
@@ -1,58 +1,105 @@
const path = require('path');
const Promise = require('bluebird');
const fs = require('fs-extra');
-const getEngines = require('./engines');
-const { resolve, ROOT, ENGINE } = require('./resolve');
+const { map, uniq } = require('lodash');
+const { resolve, ROOT } = require('./resolve');
const { siteInfo } = require(resolve('package.json'));
-module.exports = exports = async function writePageContent (pages, posts, prod) {
- const engines = await getEngines(prod);
- const postIndex = await processPages(engines, posts, null, prod);
- await processPages(engines, pages, posts, prod);
+module.exports = exports = async function writePageContent (engines, pages, posts, prod) {
+ const postIndex = index(posts, engines);
+ await processPages(engines, [ ...posts, ...pages ], postIndex, prod);
return postIndex;
};
+function index (posts, engines) {
+ posts = posts.filter((p) => !p.draft);
+
+ siblings(posts);
+
+ // fill in post content
+ posts.forEach((p) => { p.content = engines[p.type](p.source, pageState(p)); });
+
+ const reducedPosts = posts.map(pageJSON);
+
+ const authors = uniq(map(reducedPosts, 'author').flat()).sort((a, b) => (a.toUpperCase() > b.toUpperCase() ? 1 : -1));
+
+ const tagMap = reducedPosts.reduce((o, p) => Object.assign(o, p.tags), {});
+ const tags = Object.keys(tagMap).sort().reduce((result, tagslug) => {
+ result[tagslug] = tagMap[tagslug];
+ return result;
+ }, {});
+
+ return {
+ posts: reducedPosts,
+ authors,
+ tags,
+ latest: posts[0],
+ };
+}
+
+function siblings (posts) {
+ let first, prev, next, last;
+ for (let i = 0; i < posts.length; i++) {
+ const post = posts[i];
+ first = i > 0 && posts[0];
+ prev = posts[i - 1] || false;
+ next = posts[i + 1] || false;
+ last = i < posts.length - 1 && posts[posts.length - 1];
+
+ post.siblings = {
+ first: first && first.url,
+ prev: prev && prev.url,
+ next: next && next.url,
+ last: last && last.url,
+ };
+ }
+}
+
+function pageState (page, posts) {
+ return {
+ ...page,
+ meta: { ...page.meta, ...page },
+ page: {
+ domain: siteInfo.domain,
+ title: page.meta.title
+ ? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
+ : siteInfo.title,
+ description: page.meta.description || siteInfo.description,
+ },
+ site: siteInfo,
+ local: {
+ cwd: resolve(page.cwd),
+ root: ROOT,
+ basename: page.basename,
+ },
+ posts,
+ };
+}
+
+function pageJSON (post) {
+ return {
+ url: post.url,
+ fullurl: post.fullurl,
+ json: '/' + post.json,
+ title: post.meta.title,
+ subtitle: post.meta.subtitle,
+ description: post.meta.description,
+ date: post.dateCreated,
+ titlecard: post.titlecard,
+ tags: post.meta.tags,
+ author: post.meta.author,
+ siblings: post.sibling,
+ };
+}
+
function processPages (engines, pages, posts, prod) {
return Promise.map(pages, async (page) => {
- // page = new Page(page);
- var data = {
- ...page,
- meta: { ...page.meta, ...page },
- page: {
- domain: siteInfo.domain,
- title: page.meta.title
- ? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
- : siteInfo.title,
- description: page.meta.description || siteInfo.description,
- },
- site: siteInfo,
- local: {
- cwd: resolve(page.cwd),
- root: ROOT,
- basename: page.basename,
- },
- posts,
- };
+ const state = pageState(page, posts);
+ const json = pageJSON(page);
+ const html = String(engines[page.engine](page.source, state));
- const json = {
- url: page.url,
- fullurl: page.fullurl,
- title: page.meta.title,
- subtitle: page.meta.subtitle,
- description: page.meta.description,
- date: page.dateCreated,
- titlecard: page.titlecard,
- tags: page.meta.tags,
- author: page.meta.author,
- };
-
- const html = String(engines[page.engine](data.source, data));
- if (page.engine === ENGINE.MARKDOWN) {
- json.preview = String(engines.MARKDOWN_PREVIEW(data.source, data));
- page.content = String(engines.MARKDOWN_CONTENT(data.source, data));
- json.content = page.content;
- }
+ json.content = page.content;
const output = resolve('dist', page.out);
await fs.ensureDir(path.dirname(output));
@@ -62,16 +109,5 @@ function processPages (engines, pages, posts, prod) {
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)),
]);
-
- return !page.draft && {
- url: page.url,
- json: page.json,
- title: page.meta.title,
- subtitle: page.meta.subtitle,
- description: page.meta.description,
- date: page.dateCreated,
- tags: page.meta.tags,
- author: page.meta.author,
- };
});
}
diff --git a/build/page.js b/build/page.js
index 51cb671..7b1f895 100644
--- a/build/page.js
+++ b/build/page.js
@@ -6,7 +6,7 @@ const log = require('fancy-log');
const File = require('./file');
const actions = require('./actions');
const { URL } = require('url');
-const { resolve, readFile, isCleanUrl, ENGINE } = require('./resolve');
+const { resolve, readFile, isCleanUrl, TYPE, ENGINE } = require('./resolve');
const { isObject, isString } = require('./lib/util');
const pkg = require(resolve('package.json'));
@@ -29,9 +29,21 @@ module.exports = exports = class Page extends File {
'dateModified',
'classes',
'flags',
+ 'siblings',
);
- this.engine = ENGINE[this.type] || ENGINE.COPY;
+ this.engine = this._engine();
+ }
+
+ _engine () {
+ switch (this.type) {
+ case TYPE.HANDLEBARS:
+ return TYPE.HANDLEBARS;
+ case TYPE.MARKDOWN:
+ return ENGINE.PAGE;
+ default:
+ return ENGINE.OTHER;
+ }
}
_out () {
diff --git a/build/post.js b/build/post.js
index 50cbb80..52d1795 100644
--- a/build/post.js
+++ b/build/post.js
@@ -1,7 +1,7 @@
const path = require('path');
const { without } = require('lodash');
-const { resolve, isCleanUrl } = require('./resolve');
+const { resolve, isCleanUrl, TYPE, ENGINE } = require('./resolve');
const Page = require('./page');
const slugs = require('slugify');
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
@@ -17,6 +17,17 @@ function arrayify (input) {
module.exports = exports = class Post extends Page {
+ _engine () {
+ switch (this.type) {
+ case TYPE.HANDLEBARS:
+ return TYPE.HANDLEBARS;
+ case TYPE.MARKDOWN:
+ return ENGINE.POST;
+ default:
+ return ENGINE.OTHER;
+ }
+ }
+
_dir (dir) {
// if the file name matches the postmatch pattern, then this needs to be /p/ file
const match = this.name.match(postmatch);
@@ -61,6 +72,8 @@ module.exports = exports = class Post extends Page {
_parse (...args) {
super._parse(...args);
+ if (!this.titlecard) this.titlecard = '/tweets/titlecard.png';
+
this.meta.tags = (this.meta.tags || []).reduce((result, tag) => {
result[slugify(tag)] = tag;
return result;
diff --git a/build/resolve.js b/build/resolve.js
index edf3943..91dc93b 100644
--- a/build/resolve.js
+++ b/build/resolve.js
@@ -78,13 +78,13 @@ exports.isCleanUrl = is(HBS, MD);
const TYPE = exports.TYPE = {
- IMAGE: 'IMAGE',
- VIDEO: 'VIDEO',
- HANDLEBARS: 'HANDLEBARS',
- MARKDOWN: 'MARKDOWN',
- SCRIPT: 'SCRIPT',
- STYLE: 'STYLE',
- OTHER: 'OTHER',
+ IMAGE: 'TYPE_IMAGE',
+ VIDEO: 'TYPE_VIDEO',
+ HANDLEBARS: 'TYPE_HANDLEBARS',
+ MARKDOWN: 'TYPE_MARKDOWN',
+ SCRIPT: 'TYPE_SCRIPT',
+ STYLE: 'TYPE_STYLE',
+ OTHER: 'TYPE_OTHER',
};
exports.type = dictMatch({
@@ -99,11 +99,11 @@ exports.type = dictMatch({
const KIND = exports.KIND = {
- PAGE: 'PAGE',
- POST: 'POST',
- ASSET: 'ASSET',
- ARTIFACT: 'ARTIFACT',
- OTHER: 'OTHER',
+ PAGE: 'KIND_PAGE',
+ POST: 'KIND_POST',
+ ASSET: 'KIND_ASSET',
+ ARTIFACT: 'KIND_ARTIFACT',
+ OTHER: 'KIND_OTHER',
};
exports.kind = dictMatch({
@@ -114,19 +114,13 @@ exports.kind = dictMatch({
-const ENGINE = exports.ENGINE = {
- HANDLEBARS: 'HANDLEBARS',
- MARKDOWN: 'MARKDOWN',
- COPY: 'COPY',
+exports.ENGINE = {
+ HTML: 'ENGINE_HTML',
+ PAGE: 'ENGINE_PAGE',
+ POST: 'ENGINE_POST',
+ OTHER: 'ENGINE_OTHER',
};
-exports.engine = dictMatch({
- [ENGINE.HANDLEBARS]: is(XML, HBS, HTML),
- [ENGINE.MARKDOWN]: is(MD),
-}, ENGINE.COPY);
-
-
-
exports.readFile = function readFile (fpath) {
fpath = exports.resolve(fpath);
return fs.readFile(fpath).catch((err) => {