Refining engine targeting

This commit is contained in:
Jocelyn Badgley (Twipped) 2020-03-07 18:04:37 -08:00
parent 4e5c14123f
commit 67b168dba1
7 changed files with 188 additions and 125 deletions

View File

@ -4,11 +4,11 @@ const path = require('path');
const fs = require('fs-extra'); const fs = require('fs-extra');
const log = require('fancy-log'); const log = require('fancy-log');
const { minify } = require('html-minifier-terser'); const { minify } = require('html-minifier-terser');
const { resolve, readFile, ENGINE } = require('./resolve'); const { resolve, readFile, ENGINE, TYPE } = require('./resolve');
const handlebars = require('handlebars'); const Handlebars = require('handlebars');
const HandlebarsKit = require('hbs-kit'); const HandlebarsKit = require('hbs-kit');
HandlebarsKit.load(handlebars); HandlebarsKit.load(Handlebars);
const slugs = require('slugify'); const slugs = require('slugify');
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase(); const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
@ -44,7 +44,7 @@ const markdownEngines = {
function markdown (mode, input, env) { function markdown (mode, input, env) {
input = input.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => { input = input.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => {
try { try {
const result = handlebars.compile(contents)(env); const result = Handlebars.compile(contents)(env);
return 'æææ' + result + 'æææ'; return 'æææ' + result + 'æææ';
} catch (e) { } catch (e) {
log.error(e); log.error(e);
@ -66,6 +66,11 @@ function markdown (mode, input, env) {
return input ? markdownEngines[mode].render(input, env) : ''; return input ? markdownEngines[mode].render(input, env) : '';
} }
function handlebars (input, env) {
const template = Handlebars.compile(input);
return template(env);
}
function stripIndent (input) { function stripIndent (input) {
const match = input.match(/^[^\S\n]*(?=\S)/gm); const match = input.match(/^[^\S\n]*(?=\S)/gm);
const indent = match && Math.min(...match.map((el) => el.length)); const indent = match && Math.min(...match.map((el) => el.length));
@ -88,47 +93,45 @@ const MINIFY_CONFIG = {
const HANDLEBARS_PARTIALS = { const HANDLEBARS_PARTIALS = {
layout: 'templates/layout.hbs', layout: 'templates/layout.hbs',
page: 'templates/page.hbs',
post: 'templates/post.hbs',
}; };
module.exports = exports = async function (prod) { module.exports = exports = async function (prod) {
const templates = {};
for (const [ name, file ] of Object.entries(HANDLEBARS_PARTIALS)) { for (const [ name, file ] of Object.entries(HANDLEBARS_PARTIALS)) {
try { try {
const contents = await readFile(file); const contents = await readFile(file);
const template = handlebars.compile(contents.toString('utf8')); const template = Handlebars.compile(contents.toString('utf8'));
handlebars.registerPartial(name, template); templates[name] = template;
Handlebars.registerPartial(name, template);
} catch (e) { } catch (e) {
log.error('Could not execute load partial ' + file, e); log.error('Could not execute load partial ' + file, e);
} }
} }
const pageTemplateRaw = await readFile('templates/page.hbs');
if (!pageTemplateRaw) throw new Error('Post template was empty?');
try {
var pageTemplate = handlebars.compile(pageTemplateRaw.toString('utf8'));
} catch (e) {
log.error('Crash while loading page template', e);
}
const revManifest = prod && await fs.readJson(resolve('rev-manifest.json')).catch(() => {}).then((r) => r || {}); const revManifest = prod && await fs.readJson(resolve('rev-manifest.json')).catch(() => {}).then((r) => r || {});
const helpers = new Injectables(prod, revManifest); const helpers = new Injectables(prod, revManifest);
handlebars.registerHelper('import', helpers.import()); Handlebars.registerHelper('import', helpers.import());
handlebars.registerHelper('markdown', helpers.markdown()); Handlebars.registerHelper('markdown', helpers.markdown());
handlebars.registerHelper('icon', helpers.icon()); Handlebars.registerHelper('icon', helpers.icon());
handlebars.registerHelper('prod', helpers.production()); Handlebars.registerHelper('prod', helpers.production());
handlebars.registerHelper('rev', helpers.rev()); Handlebars.registerHelper('rev', helpers.rev());
const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input); const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input);
const result = { const result = {
[ENGINE.HANDLEBARS]: (source, env) => { [TYPE.HANDLEBARS]: handlebars,
const template = handlebars.compile(source); [TYPE.MARKDOWN]: (source, env) => markdown('full', source, env),
return shrink(template(env)); [TYPE.OTHER]: (source) => source,
},
[ENGINE.MARKDOWN]: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })), [ENGINE.PAGE]: (source, env) => shrink(templates.page({ ...env, contents: markdown('full', source, env) })),
[ENGINE.OTHER]: (source) => shrink(source), [ENGINE.POST]: (source, env) => shrink(templates.post({ ...env, contents: markdown('full', source, env) })),
MARKDOWN_CONTENT: (source, env) => markdown('full', source, env), [ENGINE.HTML]: (source) => shrink(source),
MARKDOWN_PREVIEW: (source, env) => markdown('preview', source, env), [ENGINE.OTHER]: (source) => source,
preview: (source, env) => markdown('preview', source, env),
}; };
return result; return result;
@ -206,7 +209,7 @@ class Injectables {
contents = markdown('full', contents, data); contents = markdown('full', contents, data);
return new handlebars.SafeString(contents); return new Handlebars.SafeString(contents);
}; };
} }
@ -215,14 +218,14 @@ class Injectables {
return function (tpath, ...args) { return function (tpath, ...args) {
const { hash, data } = args.pop(); const { hash, data } = args.pop();
const value = args.shift(); const value = args.shift();
const context = handlebars.createFrame(value || data.root); const context = Handlebars.createFrame(value || data.root);
Object.assign(context, hash || {}); Object.assign(context, hash || {});
tpath = self._parsePath(tpath, data.root.local, 'hbs'); tpath = self._parsePath(tpath, data.root.local, 'hbs');
try { try {
const contents = self._template(tpath, handlebars.compile)(context); const contents = self._template(tpath, Handlebars.compile)(context);
return new handlebars.SafeString(contents); return new Handlebars.SafeString(contents);
} catch (e) { } catch (e) {
log.error('Could not execute import template ' + tpath, e); log.error('Could not execute import template ' + tpath, e);
return ''; return '';
@ -238,10 +241,10 @@ class Injectables {
try { try {
const contents = self._template(tpath, (s) => const contents = self._template(tpath, (s) =>
handlebars.compile(`<span class="svg-icon" {{#if size}}style="width:{{size}}px;height:{{size}}px"{{/if}}>${s}</span>`), Handlebars.compile(`<span class="svg-icon" {{#if size}}style="width:{{size}}px;height:{{size}}px"{{/if}}>${s}</span>`),
)({ size: hash && hash.size }); )({ size: hash && hash.size });
return new handlebars.SafeString(contents); return new Handlebars.SafeString(contents);
} catch (e) { } catch (e) {
log.error('Could not execute import template ' + tpath, e); log.error('Could not execute import template ' + tpath, e);
return ''; return '';

View File

@ -19,22 +19,16 @@ module.exports = exports = class File {
} }
const file = path.parse(filepath); const file = path.parse(filepath);
let { base: basename, name } = file;
this.preprocessed = false; this._basename();
if (name[0] === '_') {
this.preprocessed = true;
file.name = name = name.slice(1);
file.basename = basename = basename.slice(1);
}
this.kind = kind(filepath); this.kind = kind(filepath);
this.type = type(filepath); this.type = type(filepath);
this.input = filepath; // public/file.ext this.input = filepath; // public/file.ext
this.cwd = file.dir; this.cwd = file.dir;
this.ext = this.preprocessed ? file.ext : normalizedExt(file.ext); this.ext = this.preprocessed ? file.ext : normalizedExt(file.ext);
this.name = name; // index, fileA, fileB this.name = file.name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext this.basename = file.basename; // index.ext, fileA.ext, fileB.ext
const dir = this._dir(file.dir); const dir = this._dir(file.dir);
if (dir) { if (dir) {
@ -60,6 +54,15 @@ module.exports = exports = class File {
]; ];
} }
_basename (file) {
this.preprocessed = false;
if (file.name[0] === '_') {
this.preprocessed = true;
file.name = file.name.slice(1);
file.basename = file.basename.slice(1);
}
}
_dir (dir) { _dir (dir) {
dir = dir.split('/'); dir = dir.split('/');
if (dir[0] === 'public') dir.shift(); if (dir[0] === 'public') dir.shift();

View File

@ -8,6 +8,7 @@ const Promise = require('bluebird');
const fs = require('fs-extra'); const fs = require('fs-extra');
const { sortBy } = require('lodash'); const { sortBy } = require('lodash');
const getEngines = require('./engines');
const primeTweets = require('./page-tweets'); const primeTweets = require('./page-tweets');
const pageWriter = require('./page-writer'); const pageWriter = require('./page-writer');
const evaluate = require('./evaluate'); const evaluate = require('./evaluate');
@ -27,9 +28,10 @@ exports.everything = function (prod = false) {
async function fn () { async function fn () {
// load a directory scan of the public and post folders // load a directory scan of the public and post folders
const [ PublicFiles, PostFiles ] = await Promise.all([ const [ PublicFiles, PostFiles, engines ] = await Promise.all([
loadPublicFiles(), loadPublicFiles(),
loadPostFiles(), loadPostFiles(),
getEngines(prod),
]); ]);
// load data for all the files in that folder // load data for all the files in that folder
@ -71,8 +73,8 @@ exports.everything = function (prod = false) {
await evaluate(tasks.flat(), cache); await evaluate(tasks.flat(), cache);
await cache.save(); await cache.save();
posts = await pageWriter(pages, posts, prod); const postIndex = await pageWriter(engines, pages, posts, prod);
await writeIndex('dist/tweets/index.json', posts.filter(Boolean), true); await fs.writeFile(resolve('dist/tweets/index.json'), prod ? JSON.stringify(postIndex) : JSON.stringify(postIndex, null, 2));
} }
fn.displayName = prod ? 'buildForProd' : 'build'; fn.displayName = prod ? 'buildForProd' : 'build';

View File

@ -1,58 +1,105 @@
const path = require('path'); const path = require('path');
const Promise = require('bluebird'); const Promise = require('bluebird');
const fs = require('fs-extra'); const fs = require('fs-extra');
const getEngines = require('./engines'); const { map, uniq } = require('lodash');
const { resolve, ROOT, ENGINE } = require('./resolve'); const { resolve, ROOT } = require('./resolve');
const { siteInfo } = require(resolve('package.json')); const { siteInfo } = require(resolve('package.json'));
module.exports = exports = async function writePageContent (pages, posts, prod) { module.exports = exports = async function writePageContent (engines, pages, posts, prod) {
const engines = await getEngines(prod); const postIndex = index(posts, engines);
const postIndex = await processPages(engines, posts, null, prod); await processPages(engines, [ ...posts, ...pages ], postIndex, prod);
await processPages(engines, pages, posts, prod);
return postIndex; return postIndex;
}; };
function index (posts, engines) {
posts = posts.filter((p) => !p.draft);
siblings(posts);
// fill in post content
posts.forEach((p) => { p.content = engines[p.type](p.source, pageState(p)); });
const reducedPosts = posts.map(pageJSON);
const authors = uniq(map(reducedPosts, 'author').flat()).sort((a, b) => (a.toUpperCase() > b.toUpperCase() ? 1 : -1));
const tagMap = reducedPosts.reduce((o, p) => Object.assign(o, p.tags), {});
const tags = Object.keys(tagMap).sort().reduce((result, tagslug) => {
result[tagslug] = tagMap[tagslug];
return result;
}, {});
return {
posts: reducedPosts,
authors,
tags,
latest: posts[0],
};
}
function siblings (posts) {
let first, prev, next, last;
for (let i = 0; i < posts.length; i++) {
const post = posts[i];
first = i > 0 && posts[0];
prev = posts[i - 1] || false;
next = posts[i + 1] || false;
last = i < posts.length - 1 && posts[posts.length - 1];
post.siblings = {
first: first && first.url,
prev: prev && prev.url,
next: next && next.url,
last: last && last.url,
};
}
}
function pageState (page, posts) {
return {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: resolve(page.cwd),
root: ROOT,
basename: page.basename,
},
posts,
};
}
function pageJSON (post) {
return {
url: post.url,
fullurl: post.fullurl,
json: '/' + post.json,
title: post.meta.title,
subtitle: post.meta.subtitle,
description: post.meta.description,
date: post.dateCreated,
titlecard: post.titlecard,
tags: post.meta.tags,
author: post.meta.author,
siblings: post.sibling,
};
}
function processPages (engines, pages, posts, prod) { function processPages (engines, pages, posts, prod) {
return Promise.map(pages, async (page) => { return Promise.map(pages, async (page) => {
// page = new Page(page);
var data = { const state = pageState(page, posts);
...page, const json = pageJSON(page);
meta: { ...page.meta, ...page }, const html = String(engines[page.engine](page.source, state));
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: resolve(page.cwd),
root: ROOT,
basename: page.basename,
},
posts,
};
const json = { json.content = page.content;
url: page.url,
fullurl: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
date: page.dateCreated,
titlecard: page.titlecard,
tags: page.meta.tags,
author: page.meta.author,
};
const html = String(engines[page.engine](data.source, data));
if (page.engine === ENGINE.MARKDOWN) {
json.preview = String(engines.MARKDOWN_PREVIEW(data.source, data));
page.content = String(engines.MARKDOWN_CONTENT(data.source, data));
json.content = page.content;
}
const output = resolve('dist', page.out); const output = resolve('dist', page.out);
await fs.ensureDir(path.dirname(output)); await fs.ensureDir(path.dirname(output));
@ -62,16 +109,5 @@ function processPages (engines, pages, posts, prod) {
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2), prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)), )),
]); ]);
return !page.draft && {
url: page.url,
json: page.json,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
date: page.dateCreated,
tags: page.meta.tags,
author: page.meta.author,
};
}); });
} }

View File

@ -6,7 +6,7 @@ const log = require('fancy-log');
const File = require('./file'); const File = require('./file');
const actions = require('./actions'); const actions = require('./actions');
const { URL } = require('url'); const { URL } = require('url');
const { resolve, readFile, isCleanUrl, ENGINE } = require('./resolve'); const { resolve, readFile, isCleanUrl, TYPE, ENGINE } = require('./resolve');
const { isObject, isString } = require('./lib/util'); const { isObject, isString } = require('./lib/util');
const pkg = require(resolve('package.json')); const pkg = require(resolve('package.json'));
@ -29,9 +29,21 @@ module.exports = exports = class Page extends File {
'dateModified', 'dateModified',
'classes', 'classes',
'flags', 'flags',
'siblings',
); );
this.engine = ENGINE[this.type] || ENGINE.COPY; this.engine = this._engine();
}
_engine () {
switch (this.type) {
case TYPE.HANDLEBARS:
return TYPE.HANDLEBARS;
case TYPE.MARKDOWN:
return ENGINE.PAGE;
default:
return ENGINE.OTHER;
}
} }
_out () { _out () {

View File

@ -1,7 +1,7 @@
const path = require('path'); const path = require('path');
const { without } = require('lodash'); const { without } = require('lodash');
const { resolve, isCleanUrl } = require('./resolve'); const { resolve, isCleanUrl, TYPE, ENGINE } = require('./resolve');
const Page = require('./page'); const Page = require('./page');
const slugs = require('slugify'); const slugs = require('slugify');
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase(); const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
@ -17,6 +17,17 @@ function arrayify (input) {
module.exports = exports = class Post extends Page { module.exports = exports = class Post extends Page {
_engine () {
switch (this.type) {
case TYPE.HANDLEBARS:
return TYPE.HANDLEBARS;
case TYPE.MARKDOWN:
return ENGINE.POST;
default:
return ENGINE.OTHER;
}
}
_dir (dir) { _dir (dir) {
// if the file name matches the postmatch pattern, then this needs to be /p/ file // if the file name matches the postmatch pattern, then this needs to be /p/ file
const match = this.name.match(postmatch); const match = this.name.match(postmatch);
@ -61,6 +72,8 @@ module.exports = exports = class Post extends Page {
_parse (...args) { _parse (...args) {
super._parse(...args); super._parse(...args);
if (!this.titlecard) this.titlecard = '/tweets/titlecard.png';
this.meta.tags = (this.meta.tags || []).reduce((result, tag) => { this.meta.tags = (this.meta.tags || []).reduce((result, tag) => {
result[slugify(tag)] = tag; result[slugify(tag)] = tag;
return result; return result;

View File

@ -78,13 +78,13 @@ exports.isCleanUrl = is(HBS, MD);
const TYPE = exports.TYPE = { const TYPE = exports.TYPE = {
IMAGE: 'IMAGE', IMAGE: 'TYPE_IMAGE',
VIDEO: 'VIDEO', VIDEO: 'TYPE_VIDEO',
HANDLEBARS: 'HANDLEBARS', HANDLEBARS: 'TYPE_HANDLEBARS',
MARKDOWN: 'MARKDOWN', MARKDOWN: 'TYPE_MARKDOWN',
SCRIPT: 'SCRIPT', SCRIPT: 'TYPE_SCRIPT',
STYLE: 'STYLE', STYLE: 'TYPE_STYLE',
OTHER: 'OTHER', OTHER: 'TYPE_OTHER',
}; };
exports.type = dictMatch({ exports.type = dictMatch({
@ -99,11 +99,11 @@ exports.type = dictMatch({
const KIND = exports.KIND = { const KIND = exports.KIND = {
PAGE: 'PAGE', PAGE: 'KIND_PAGE',
POST: 'POST', POST: 'KIND_POST',
ASSET: 'ASSET', ASSET: 'KIND_ASSET',
ARTIFACT: 'ARTIFACT', ARTIFACT: 'KIND_ARTIFACT',
OTHER: 'OTHER', OTHER: 'KIND_OTHER',
}; };
exports.kind = dictMatch({ exports.kind = dictMatch({
@ -114,19 +114,13 @@ exports.kind = dictMatch({
const ENGINE = exports.ENGINE = { exports.ENGINE = {
HANDLEBARS: 'HANDLEBARS', HTML: 'ENGINE_HTML',
MARKDOWN: 'MARKDOWN', PAGE: 'ENGINE_PAGE',
COPY: 'COPY', POST: 'ENGINE_POST',
OTHER: 'ENGINE_OTHER',
}; };
exports.engine = dictMatch({
[ENGINE.HANDLEBARS]: is(XML, HBS, HTML),
[ENGINE.MARKDOWN]: is(MD),
}, ENGINE.COPY);
exports.readFile = function readFile (fpath) { exports.readFile = function readFile (fpath) {
fpath = exports.resolve(fpath); fpath = exports.resolve(fpath);
return fs.readFile(fpath).catch((err) => { return fs.readFile(fpath).catch((err) => {