More burndown.

This commit is contained in:
Jocelyn Badgley (Twipped) 2020-02-27 18:57:39 -08:00
parent 2df7574697
commit e95f2cf3db
17 changed files with 736 additions and 528 deletions

View File

@ -1,72 +1,29 @@
const { pick } = require('lodash');
const actions = require('./actions');
const path = require('path');
const { pick } = require('lodash');
const actions = require('./actions');
const File = require('./file');
const { TYPE } = require('./resolve');
const getImageDimensions = require('../lib/dimensions');
const getVideoDimensions = require('get-video-dimensions');
const JPG = '.jpg';
const JPEG = '.jpeg';
const PNG = '.png';
const GIF = '.gif';
const MP4 = '.mp4';
const M4V = '.m4v';
const FILETYPE = {
[JPG]: 'jpeg',
[JPEG]: 'jpeg',
[PNG]: 'png',
[GIF]: 'gif',
[MP4]: 'mp4',
[M4V]: 'mp4',
};
const RESOLUTIONS = [ 2048, 1024, 768, 576, 300, 100 ];
module.exports = exports = class Asset {
module.exports = exports = class Asset extends File {
constructor (filepath) {
const file = path.parse(filepath);
let { base: basename, name } = file;
super(filepath);
this.preprocessed = false;
if (name[0] === '_') {
this.preprocessed = true;
file.name = name = name.slice(1);
file.basename = basename = basename.slice(1);
}
this.type = FILETYPE[file.ext] || file.ext.slice(1);
if ([ JPG, JPEG, PNG, GIF ].includes(file.ext)) {
this.kind = 'image';
} else if ([ MP4, M4V ].includes(file.ext)) {
this.kind = 'video';
} else {
this.kind = 'raw';
}
// remove the pages root and any _images segment from the dir
const dir = file.dir.split('/');
if (dir[0] === 'pages') dir.shift();
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.input = filepath; // pages/file.ext
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.ext = file.ext;
this.out = path.join(this.base, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
this.url = path.join(this.dir, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
this.serializable.push(
'dimensions',
'sizes',
);
}
load () {
switch (this.kind) {
case 'video': return this.loadVideo();
case 'image': return this.loadImage();
switch (this.type) {
case TYPE.VIDEO: return this.loadVideo();
case TYPE.IMAGE: return this.loadImage();
default:
}
}
@ -111,7 +68,7 @@ module.exports = exports = class Asset {
for (const w of RESOLUTIONS) {
if (w > width) continue;
const name = `${this.name}.${w}w.${this.type}`;
const name = `${this.name}.${w}w${this.ext}`;
this.sizes.push({
output: path.join(this.base, name),
url: path.join(this.dir, name),
@ -156,27 +113,12 @@ module.exports = exports = class Asset {
return this;
}
toJson () {
return pick(this, [
'preprocessed',
'type',
'kind',
'input',
'base',
'dir',
'name',
'basename',
'ext',
'dimensions',
]);
}
webready () {
const { kind, name } = this;
const { type, name, sizes } = this;
return {
kind,
type,
name,
sizes: this.sizes.map((s) => pick(s, [ 'url', 'width', 'height' ])),
sizes: sizes.map((s) => pick(s, [ 'url', 'width', 'height' ])),
};
}
@ -184,19 +126,10 @@ module.exports = exports = class Asset {
return this.sizes.map(({ output, width }) => ({
input: this.input,
output,
format: this.preprocessed ? undefined : this.type,
format: this.preprocessed ? undefined : this.ext.slice(1),
width: this.preprocessed ? undefined : width,
action: this.preprocessed ? actions.copy : actions.image,
}));
}
};
exports.JPG = JPG;
exports.JPEG = JPEG;
exports.PNG = PNG;
exports.GIF = GIF;
exports.MP4 = MP4;
exports.M4V = M4V;
exports.FILETYPE = FILETYPE;
exports.RESOLUTIONS = RESOLUTIONS;

View File

@ -1,45 +0,0 @@
const glob = require('../lib/glob');
const { keyBy, filter, get, set, memoize } = require('lodash');
const { relative, ROOT } = require('./resolve');
const Asset = require('./asset');
module.exports = exports = async function createAssetFinder () {
const files = await glob('pages/**/*.{jpeg,jpg,png,gif,mp4}', { cwd: ROOT });
const map = {};
const assets = (await Promise.all(files.map(async (filepath) => {
const asset = new Asset(relative(filepath));
await asset.load();
set(map, [ ...asset.base.split('/'), asset.name ], asset);
return asset;
}))).filter(Boolean);
Object.freeze(map);
function within (dir) {
const subset = filter(assets, { dir });
return {
get titlecard () {
return get(filter(subset, { name: 'titlecard' }), [ 0, 'url' ]);
},
get assets () {
return keyBy(subset.map((a) => a.webready()), 'name');
},
get all () {
return [ ...subset ];
},
};
}
return {
map,
for: memoize(within),
get tasks () {
return assets.map((a) => a.tasks()).flat(1);
},
get all () {
return [ ...assets ];
},
};
};
exports.Asset = Asset;

View File

@ -4,7 +4,7 @@ const path = require('path');
const fs = require('fs-extra');
const log = require('fancy-log');
const { minify } = require('html-minifier-terser');
const { resolve, readFile } = require('./resolve');
const { resolve, readFile, ENGINE } = require('./resolve');
const handlebars = require('handlebars');
const HandlebarsKit = require('hbs-kit');
@ -121,18 +121,15 @@ module.exports = exports = async function (prod) {
const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input);
const result = {
hbs: (source, env) => {
[ENGINE.HANDLEBARS]: (source, env) => {
const template = handlebars.compile(source);
return shrink(template(env));
},
md: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
raw: (source) => shrink(source),
preview: (source, env) => markdown('preview', source, env),
[ENGINE.MARKDOWN]: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
[ENGINE.OTHER]: (source) => shrink(source),
PREVIEW: (source, env) => markdown('preview', source, env),
};
// result.handlebars.engine = handlebars;
// result.markdown.engine = markdownEngines.full;
return result;
};
@ -153,11 +150,11 @@ class Injectables {
}
_template (tpath, make) {
if (!tpath) throw new Error('Received an empty template path: ' + tpath);
if (this.injections[tpath]) return this.injections[tpath];
if (!fs.existsSync(tpath)) {
log.error('Injectable does not exist: ' + tpath);
return '';
throw new Error('Injectable does not exist: ' + tpath);
}
let contents;
@ -226,7 +223,7 @@ class Injectables {
const contents = self._template(tpath, handlebars.compile)(context);
return new handlebars.SafeString(contents);
} catch (e) {
log.error('Could not execute import template ' + path.relative(ROOT, tpath), e);
log.error('Could not execute import template ' + tpath, e);
return '';
}
};
@ -245,7 +242,7 @@ class Injectables {
return new handlebars.SafeString(contents);
} catch (e) {
log.error('Could not execute import template ' + path.relative(ROOT, tpath), e);
log.error('Could not execute import template ' + tpath, e);
return '';
}
};

View File

@ -1,4 +1,4 @@
const { sortBy } = require('lodash');
const { sortBy, uniqBy } = require('lodash');
const { resolve } = require('./resolve');
const log = require('fancy-log');
const Promise = require('bluebird');
@ -16,7 +16,10 @@ const LOG = {
module.exports = exports = async function process (tasks, cache) {
const lastSeen = new Date();
await Promise.map(sortBy(tasks, [ 'input', 'output' ]), async (task) => {
tasks = uniqBy(tasks, 'output');
tasks = sortBy(tasks, [ 'input', 'output' ]);
await Promise.map(tasks, async (task) => {
let result;
let status = await cache.get(task);
const { input, output } = task;

91
gulp/content/file.js Normal file
View File

@ -0,0 +1,91 @@
const path = require('path');
const { pick } = require('lodash');
const {
normalizedExt,
kind,
type,
} = require('./resolve');
const actions = require('./actions');
module.exports = exports = class File {
constructor (filepath) {
if (filepath && typeof filepath === 'object') {
// we've been passed a json object, treat as serialized Page
Object.assign(this, filepath);
return this;
}
const file = path.parse(filepath);
let { base: basename, name } = file;
this.preprocessed = false;
if (name[0] === '_') {
this.preprocessed = true;
file.name = name = name.slice(1);
file.basename = basename = basename.slice(1);
}
// remove the public root and any _images segment from the dir
const dir = file.dir.split('/');
if (dir[0] === 'public') dir.shift();
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.kind = kind(filepath);
this.type = type(filepath);
this.cwd = file.dir;
this.ext = this.preprocessed ? file.ext : normalizedExt(file.ext);
this.input = filepath; // public/file.ext
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.ext = file.ext;
this.out = path.join(this.base, `${this.name}${this.ext}`);
this.url = path.join(this.dir, `${this.name}${this.ext}`);
this.serializable = [
'kind',
'type',
'ext',
'input',
'base',
'dir',
'name',
'basename',
'ext',
'out',
'url',
];
}
load () {}
tasks () {
return [ {
input: this.input,
output: this.out,
action: actions.copy,
} ];
}
toJson () {
return pick(this.serializable, [
'preprocessed',
'type',
'kind',
'input',
'base',
'dir',
'name',
'basename',
'ext',
'dimensions',
]);
}
};

View File

@ -1,69 +1,52 @@
const createAssetFinder = require('./assets');
const loadPublicFiles = require('./public');
const Cache = require('./cache');
const Promise = require('bluebird');
const fs = require('fs-extra');
const primeTweets = require('./page-tweets');
const pageWriter = require('./page-writer');
const evaluate = require('./evaluate');
const { resolve } = require('./resolve');
const pages = require('./pages');
const twitter = require('./twitter');
const favicon = require('./favicon');
const assets = () => createAssetFinder().then(({ tasks }) => tasks);
const svg = require('./svg');
exports.everything = function (prod = false) {
const fn = async () => {
const AssetFinder = await createAssetFinder();
// load a directory scan of the public folder
const PublicFiles = await loadPublicFiles();
await pages.parse(AssetFinder);
// load data for all the files in that folder
await Promise.map(PublicFiles.all, (p) => p.load(PublicFiles));
// prime tweet data for all pages
const pages = await primeTweets(PublicFiles.pages);
// compile all tasks to be completed
const tasks = await Promise.all([
AssetFinder.tasks,
twitter(prod),
PublicFiles.tasks,
svg(prod),
favicon(prod),
]);
if (!tasks.length) return;
async function crankTasks () {
if (!tasks.length) return;
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
}
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
await pages.write(prod);
await Promise.all([
fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
pageWriter(pages, prod),
crankTasks(),
]);
};
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
ret.displayName = prod ? 'generateEverythingForProd' : 'generateEverything';
return ret;
};
exports.task = function (action, prod = false) {
let fn;
if (action === 'parse') {
fn = () => pages.parse();
} else if (action === 'pages') {
fn = () => pages.write(prod);
} else {
fn = async () => {
const tasks = await {
twitter,
favicon,
assets,
}[action](prod);
if (!tasks.length) return;
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks, cache);
await cache.save();
};
}
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
ret.displayName = prod ? action + 'ForProd' : action;
return ret;
};

View File

@ -0,0 +1,89 @@
const { chunk, uniq, difference } = require('lodash');
const fs = require('fs-extra');
const { resolve } = require('./resolve');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const Twitter = require('twitter-lite');
module.exports = exports = async function tweets (pages) {
const [ twitter, twitterBackup, twitterCache ] = await Promise.all([
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
]);
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
}
tweetsNeeded = uniq(tweetsNeeded);
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
fs.writeFile(resolve('twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(resolve('twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(resolve('twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
/* Utility Functions **************************************************/
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}

View File

@ -0,0 +1,56 @@
const path = require('path');
const Promise = require('bluebird');
const fs = require('fs-extra');
const getEngines = require('./engines');
const { resolve, ROOT } = require('./resolve');
const { siteInfo } = require(resolve('package.json'));
module.exports = exports = async function writePageContent (pages, prod) {
const engines = await getEngines(prod);
await Promise.map(pages, async (page) => {
// page = new Page(page);
var data = {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: resolve(page.cwd),
root: ROOT,
basename: page.basename,
},
pages,
};
const html = String(engines[page.engine](data.source, data));
const json = page.json && {
url: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
};
const output = resolve('dist', page.output);
await fs.ensureDir(path.dirname(output));
await Promise.all([
fs.writeFile(output, Buffer.from(html)),
json && fs.writeFile(resolve('dist', page.json), Buffer.from(
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)),
]);
}, { concurrency: 1 });
};

View File

@ -3,74 +3,42 @@ const path = require('path');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const frontmatter = require('front-matter');
const File = require('./file');
const actions = require('./actions');
const { URL } = require('url');
const { pick, omit } = require('lodash');
const { resolve, readFile } = require('./resolve');
const { resolve, readFile, isCleanUrl, ENGINE } = require('./resolve');
const { isObject } = require('../lib/util');
const pkg = require(resolve('package.json'));
const frontmatter = require('front-matter');
/* Utility Functions **************************************************/
const MD = '.md';
const HBS = '.hbs';
const HTML = '.html';
const XML = '.xml';
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
const tweetidcheck = /^\d+$/;
function parseTweetId (tweetid) {
// we can't trust an id that isn't a string
if (typeof tweetid !== 'string') return false;
const match = tweetid.match(tweeturl);
if (match) return match[1];
if (tweetid.match(tweetidcheck)) return tweetid;
return false;
}
module.exports = exports = class Page {
module.exports = exports = class Page extends File {
constructor (filepath) {
if (filepath && typeof filepath === 'object') {
// we've been passed a json object, treat as serialized Page
Object.assign(this, filepath);
return this;
}
super(filepath);
const file = path.parse(filepath);
const { base: basename, name, ext } = file;
this.serializable.push(
'fullurl',
'engine',
'source',
'meta',
'images',
'titlecard',
'tweets',
'dateCreated',
'dateModified',
'classes',
'flags',
);
// this file is an include, skip it.
if (name[0] === '_') return false;
var isIndexPage = (this.name === 'index');
var isClean = isCleanUrl(this.ext);
// this is not a page file
if (![ MD, HBS, HTML, XML ].includes(ext)) return false;
// remove the pages root and any _images segment from the dir
const dir = file.dir.split('/');
if (dir[0] === 'pages') dir.shift();
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.input = filepath; // /local/path/to/pages/file.ext
this.cwd = file.dir; // /local/path/to/pages/, pages/folder, pages/folder/subfolder
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.ext = file.ext;
var isIndexPage = (name === 'index');
var isCleanUrl = [ HBS, MD ].includes(ext);
if (isCleanUrl && isIndexPage) {
if (isClean && isIndexPage) {
this.output = path.join(this.base, 'index.html');
this.json = path.join(this.base, 'index.json');
this.url = this.dir;
} else if (isCleanUrl) {
} else if (isClean) {
this.output = path.join(this.base, this.name, 'index.html');
this.json = path.join(this.base, this.name + '.json');
this.url = path.join(this.dir, this.name);
@ -88,23 +56,16 @@ module.exports = exports = class Page {
url.pathname = this.url;
this.fullurl = url.href;
if ([ HBS, HTML, XML ].includes(ext)) {
this.engine = 'hbs';
} else if (ext === MD) {
this.engine = 'md';
} else {
this.engine = 'raw';
}
this.engine = ENGINE[this.type] || ENGINE.COPY;
}
async load ({ Assets }) {
async load (PublicFiles) {
const [ raw, { ctime, mtime } ] = await Promise.all([
readFile(this.input).catch(() => null),
fs.stat(this.input).catch(() => ({})),
]);
const { titlecard, assets } = Assets.for(this.dir);
const { titlecard, assets } = PublicFiles.for(this.dir);
// empty file
if (!raw || !ctime) {
@ -137,36 +98,27 @@ module.exports = exports = class Page {
return this;
}
toJson () {
const j = pick(this, [
'input',
'output',
'json',
'dateCreated',
'dateModified',
'cwd',
'base',
'dir',
'name',
'ext',
'basename',
'dest',
'out',
'url',
'fullurl',
'engine',
'source',
'images',
'assets',
'titlecard',
'tweets',
'classes',
'flags',
]);
tasks () {
if (!isObject(this.tweets)) return [];
j.meta = omit(this.meta, [ 'date', 'classes', 'tweets' ]);
return j;
return Object.values(this.tweets)
.map((t) => t.media)
.flat()
.map((m) => ({ ...m, action: actions.fetch, output: m.output }));
}
};
/* Utility Functions **************************************************/
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
const tweetidcheck = /^\d+$/;
function parseTweetId (tweetid) {
// we can't trust an id that isn't a string
if (typeof tweetid !== 'string') return false;
const match = tweetid.match(tweeturl);
if (match) return match[1];
if (tweetid.match(tweetidcheck)) return tweetid;
return false;
}

View File

@ -1,161 +0,0 @@
const path = require('path');
const glob = require('../lib/glob');
const { chunk, uniq, difference } = require('lodash');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const getEngines = require('./renderers');
const Twitter = require('twitter-lite');
const Page = require('./page');
const createAssetFinder = require('./assets');
const { resolve, ROOT } = require('./resolve');
exports.parse = async function parsePageContent (assetFinder) {
const [ files, twitter, twitterBackup, twitterCache, Assets ] = await Promise.all([
glob('pages/**/*.{md,hbs,html,xml}', { cwd: ROOT }),
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
assetFinder || createAssetFinder(),
]);
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
let pages = await Promise.map(files, async (filepath) => {
const page = new Page(filepath);
if (!page.input) return;
await page.load({ Assets });
if (page.tweets.length) {
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
}
return page;
});
pages = pages.filter(Boolean);
tweetsNeeded = uniq(tweetsNeeded);
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
fs.writeFile(resolve('twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(resolve('twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(resolve('twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
exports.write = async function writePageContent (prod) {
const [ pages, { siteInfo }, engines ] = await Promise.all([
fs.readJson(resolve('pages.json')),
fs.readJson(resolve('package.json')),
getEngines(prod),
]);
await Promise.map(pages, async (page) => {
// page = new Page(page);
var data = {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: page.cwd,
root: ROOT,
basename: page.basename,
},
pages,
};
const html = String(engines[page.engine](data.source, data));
const json = page.json && {
url: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
};
const output = resolve('dist', page.output);
await fs.ensureDir(path.dirname(output));
await Promise.all([
fs.writeFile(output, Buffer.from(html)),
json && fs.writeFile(resolve('dist', page.json), Buffer.from(
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)),
]);
});
};
/* Utility Functions **************************************************/
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}

62
gulp/content/public.js Normal file
View File

@ -0,0 +1,62 @@
const glob = require('../lib/glob');
const { groupBy, keyBy, filter, find, get, memoize } = require('lodash');
const { ROOT, kind, KIND } = require('./resolve');
const File = require('./file');
const Asset = require('./asset');
const Page = require('./page');
const Promise = require('bluebird');
const KIND_MAP = {
[KIND.PAGE]: Page,
[KIND.ASSET]: Asset,
[KIND.OTHER]: File,
};
module.exports = exports = async function loadPublicFiles () {
const files = await Promise.map(glob('public/**/*', { cwd: ROOT, nodir: true }), (filepath) => {
const k = kind(filepath);
const F = KIND_MAP[k];
const f = new F(filepath);
if (f.kind === KIND.PAGE && f.preprocessed) return false;
return f;
}).filter(Boolean);
const {
[KIND.PAGE]: pages,
[KIND.ASSET]: assets,
} = groupBy(files, 'kind');
function within (dir) {
const subset = filter(files, { dir });
const getTitlecard = memoize(() =>
get(find(files, { name: 'titlecard' }), [ 0, 'url' ]),
);
const {
[KIND.PAGE]: subpages,
[KIND.ASSET]: subassets,
} = groupBy(subset, 'kind');
return {
all: subset,
get titlecard () { return getTitlecard; },
get pages () {
return subpages;
},
get assets () {
return keyBy(subassets, 'name');
},
};
}
return {
all: files,
pages,
assets,
for: memoize(within),
get tasks () {
return files.map((a) => a.tasks()).flat(1);
},
};
};

View File

@ -2,6 +2,129 @@
const path = require('path');
const ROOT = path.resolve(__dirname, '../..');
const fs = require('fs-extra');
const { is: _is, re } = require('../lib/util');
function is (...args) {
const fn = _is(...args);
const ret = (ext) => fn(normalizedExt(ext));
ret.matching = args;
return ret;
}
function dictMatch (dict, def) {
const arr = Object.entries(dict);
return (tok) => {
for (const [ key, fn ] of arr) {
// console.log({ key, tok, r: fn(tok), matching: fn.matching })
if (fn(tok)) return key;
}
return def;
};
}
const EXT = exports.EXT = {
JPG: '.jpg',
JPEG: '.jpeg',
PNG: '.png',
GIF: '.gif',
MP4: '.mp4',
M4V: '.m4v',
MD: '.md',
HBS: '.hbs',
HTML: '.html',
XML: '.xml',
};
const {
JPG,
JPEG,
PNG,
GIF,
MP4,
M4V,
MD,
HBS,
HTML,
XML,
} = EXT;
exports.RE = {
JPG: re(/.jpg$/),
JPEG: re(/.jpeg$/),
PNG: re(/.png$/),
GIF: re(/.gif$/),
MP4: re(/.mp4$/),
M4V: re(/.m4v$/),
MD: re(/.md$/),
HBS: re(/.hbs$/),
HTML: re(/.html$/),
XML: re(/.xml$/),
};
const NORMALIZE_EXT = {
[JPG]: JPEG,
[M4V]: MP4,
[HBS]: HTML,
};
const normalizedExt = exports.normalizedExt = (ext) => {
if (ext[0] !== '.') ext = '.' + ext.split('.').pop();
return NORMALIZE_EXT[ext] || ext;
};
const isVideo = exports.isVideo = is(MP4, M4V);
const isImage = exports.isImage = is(JPG, JPEG, PNG, GIF);
const isHandlebars = exports.isHandlebars = is(XML, HBS, HTML);
const isMarkdown = exports.isMarkdown = is(MD);
const isPage = exports.isPage = is(isHandlebars, isMarkdown);
const isAsset = exports.isAsset = is(isImage, isVideo);
exports.isCleanUrl = is(HBS, HTML, MD);
const TYPE = exports.TYPE = {
IMAGE: 'IMAGE',
VIDEO: 'VIDEO',
HANDLEBARS: 'HANDLEBARS',
MARKDOWN: 'MARKDOWN',
OTHER: 'OTHER',
};
exports.type = dictMatch({
[TYPE.IMAGE]: isImage,
[TYPE.HANDLEBARS]: isHandlebars,
[TYPE.MARKDOWN]: isMarkdown,
[TYPE.VIDEO]: isVideo,
}, TYPE.OTHER);
const KIND = exports.KIND = {
PAGE: 'PAGE',
ASSET: 'ASSET',
OTHER: 'OTHER',
};
exports.kind = dictMatch({
[KIND.ASSET]: isAsset,
[KIND.PAGE]: isPage,
}, KIND.OTHER);
const ENGINE = exports.ENGINE = {
HANDLEBARS: 'HANDLEBARS',
MARKDOWN: 'MARKDOWN',
COPY: 'COPY',
};
exports.engine = dictMatch({
[ENGINE.HANDLEBARS]: is(XML, HBS, HTML),
[ENGINE.MARKDOWN]: is(MD),
}, ENGINE.COPY);
exports.readFile = function readFile (fpath) {
fpath = exports.resolve(fpath);
@ -15,7 +138,7 @@ exports.resolve = function resolve (...args) {
let fpath = args.shift();
if (!fpath) return ROOT;
if (fpath[0] === '/') throw new Error('Did you mean to resolve this? ' + fpath);
if (fpath[0] === '/') fpath = fpath.slice(1);
// if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
};

15
gulp/content/svg.js Normal file
View File

@ -0,0 +1,15 @@
const glob = require('../lib/glob');
const { ROOT } = require('./resolve');
const actions = require('./actions');
module.exports = exports = async function svgIcons () {
const files = await glob('svg/**/*.svg', { cwd: ROOT });
const tasks = files.map((f) => ({
input: f,
output: 'images/' + f,
action: actions.copy,
}));
return tasks;
};

View File

@ -1,48 +0,0 @@
const path = require('path');
const { src, dest } = require('gulp');
const rev = require('gulp-rev');
const asyncthrough = require('./lib/through');
const changed = require('gulp-changed');
const merge = require('merge-stream');
const ROOT = path.dirname(__dirname);
const DEST = 'dist';
module.exports = exports = function fileCopy () {
const pageFiles = src([ 'pages/**/*', '!pages/**/*.{md,hbs,xml,html,jpeg,jpg,png,gif,mp4}' ])
.pipe(changed(DEST))
.pipe(dest(DEST))
;
const svgs = src('svg/**/*.svg')
// .pipe(changed(DEST))
.pipe(dest(path.join(DEST, 'images/svg')))
.pipe(asyncthrough(async (stream, file) => {
file.base = path.resolve(file.base, '../..');
stream.push(file);
}))
;
return merge(pageFiles, svgs);
};
exports.prod = function fileCopyForProd () {
return exports()
.pipe(rev())
.pipe(dest(DEST))
.pipe(asyncthrough(async (stream, file) => {
// Change rev's original base path back to the public root so that it uses the full
// path as the original file name key in the manifest
var base = path.resolve(ROOT, DEST);
file.revOrigBase = base;
file.base = base;
stream.push(file);
}))
.pipe(rev.manifest({
merge: true, // Merge with the existing manifest if one exists
}))
.pipe(dest('.'))
;
};

View File

@ -5,22 +5,10 @@ const { series, parallel, watch } = require('gulp');
var content = require('./content');
const parse = exports.parse = content.task('parse');
const pages = exports.pages = content.task('pages');
exports.twitter = content.task('twitter');
exports.favicon = content.task('favicon');
exports.assets = content.task('assets');
exports.content = series(parse, pages);
const everything = content.everything();
everything.prod = content.everything(true);
const filesTask = require('./files');
exports.files = filesTask;
exports['files-prod'] = filesTask.prod;
exports.everything = everything;
var scssTask = require('./scss');
exports.scss = scssTask;
@ -42,14 +30,12 @@ exports.cloudfront = cloudfront;
var prodBuildTask = parallel(
scssTask.prod,
jsTask.prod,
filesTask.prod,
everything.prod,
);
var devBuildTask = parallel(
scssTask,
jsTask,
filesTask,
everything,
);
@ -68,11 +54,9 @@ exports.testpush = pushToProd.dryrun;
function watcher () {
watch([
'pages/**/*.{md,hbs,html}',
'public/**/*',
'templates/*.{md,hbs,html}',
], series(exports.parse, exports.twitter, exports.pages));
watch('page/**/*.{jpeg,jpg,png,gif}', series(exports.assets, exports.parse, exports.pages));
], everything);
watch('scss/*.scss', scssTask);
watch('js/*.js', jsTask);
@ -92,7 +76,7 @@ function server () {
}
exports.watch = series(exports.parse, exports.pages, watcher);
exports.watch = series(everything, watcher);
exports.uat = series(cleanTask, prodBuildTask, server);
/** **************************************************************************************************************** **/

View File

@ -22,6 +22,6 @@ module.exports = exports = function (iteratees) {
stream.push(file);
await sleep(100);
}
}
},
);
};

View File

@ -31,6 +31,35 @@
Object.defineProperty(exports, '__esModule', { value: true });
function equals (value) {
value = uc(value);
return (tok) => uc(tok) === value;
}
function re (pattern) {
if (isString(pattern)) pattern = new RegExp(pattern);
return (tok) => !!String(tok).match(pattern);
}
function anyOf (...args) {
args = args.flat().map(uc);
if (!anyBy(args, isFunction)) {
// arguments do not contain a function, so we can optimize
if (args.length === 1) return (tok) => uc(tok) === args[0];
return (tok) => args.includes(uc(tok));
}
args = args.map((a) => isFunction(a) && a || equals(a));
if (args.length === 1) return (tok) => args[0](tok);
return (tok) => anyBy(args, (check) => check(tok));
}
function allOf (...args) {
args = args.flat().map((a) => isFunction(a) && a || equals(a));
if (args.length === 1) return (tok) => args[0](tok);
return (tok) => allBy(args, (check) => check(tok));
}
function isNumber (input) { return typeof input === 'number' && !isNaN(input); }
function isString (input) { return typeof input === 'string'; }
function isBoolean (input) { return typeof input === 'boolean'; }
@ -38,6 +67,12 @@ function isFunction (input) { return typeof input === 'function'; }
function isUndefined (input) { return typeof input === 'undefined'; }
function isMap (input) { return input instanceof Map; }
function isSet (input) { return input instanceof Set; }
function isDate (input) { return input instanceof Date; }
function isRegExp (input) { return input instanceof RegExp; }
function isTruthy (input) { return !!input; }
function isFalsey (input) { return !input; }
function isNull (input) { return input === null; }
const isArray = Array.isArray;
function isPrimitive (input) {
switch (typeof input) {
@ -50,8 +85,6 @@ function isPrimitive (input) {
}
}
function isNull (input) { return input === null; }
function isObject (input) {
if (!input) return false;
if (typeof input !== 'object') return false;
@ -61,13 +94,72 @@ function isObject (input) {
return true;
}
const isArray = Array.isArray;
const IS_LOOKUP = new Map([
[ Array, isArray ],
[ Number, isNumber ],
[ String, isString ],
[ Boolean, isBoolean ],
[ Map, isMap ],
[ Set, isSet ],
[ Function, isFunction ],
[ Date, isDate ],
[ undefined, isUndefined ],
[ true, isTruthy ],
[ false, isFalsey ],
]);
function is (...args) {
args = args.flat().map((a) =>
IS_LOOKUP.get(a)
|| (isFunction(a) && a)
|| (isRegExp(a) && re(a))
|| equals(a),
);
if (args.length === 1) return (tok) => args[0](tok);
return (tok) => anyBy(args, (check) => check(tok));
}
function isAll (...args) {
args = args.flat().map((a) =>
IS_LOOKUP.get(a)
|| (isFunction(a) && a)
|| (isRegExp(a) && re(a))
|| equals(a),
);
if (args.length === 1) return (tok) => args[0](tok);
return (tok) => allBy(args, (check) => check(tok));
}
function isArrayOf (...args) {
const predicate = is(...args);
return (tok) => (isArray(tok) ? allBy(tok, predicate) : predicate(tok));
}
function isArrayOfStrings (input) { return allBy(input, isString); }
function isArrayOfNumbers (input) { return allBy(input, isNumber); }
function isArrayOfBooleans (input) { return allBy(input, isBoolean); }
function isArrayOfObjects (input) { return allBy(input, isObject); }
function isArrayOfMappables (input) { return allBy(input, isMappable); }
function isArrayOfPrimatives (input) { return allBy(input, isPrimitive); }
function isArrayOfFunctions (input) { return allBy(input, isFunction); }
function isArrayOfRegEx (input) { return allBy(input, isRegExp); }
function isArrayOfTruthy (input) { return allBy(input, isTruthy); }
function isArrayOfFalsey (input) { return allBy(input, isFalsey); }
function contains (...args) {
const predicate = is(...args);
return (tok) => (isArray(tok) ? anyBy(tok, predicate) : predicate(tok));
}
function containsStrings (input) { return anyBy(input, isString); }
function containsNumbers (input) { return anyBy(input, isNumber); }
function containsBooleans (input) { return anyBy(input, isBoolean); }
function containsObjects (input) { return anyBy(input, isObject); }
function containsMappables (input) { return anyBy(input, isMappable); }
function containsPrimatives (input) { return anyBy(input, isPrimitive); }
function containsFunctions (input) { return anyBy(input, isFunction); }
function containsRegEx (input) { return anyBy(input, isRegExp); }
function containsTruthy (input) { return anyBy(input, isTruthy); }
function containsFalsey (input) { return anyBy(input, isFalsey); }
function truthy (value) {
if (isMappable(value)) return !!sizeOf(value);
@ -78,6 +170,14 @@ function hasOwn (obj, key) {
return Object.prototype.hasOwnProperty.call(obj, key);
}
function lc (str) {
return isString(uc) ? str.toLowerCase() : str;
}
function uc (str) {
return isString(str) ? str.toUpperCase() : str;
}
function ucfirst (input) {
input = String(input);
return input.charAt(0).toUpperCase() + input.slice(1);
@ -215,6 +315,31 @@ function arrayify (input) {
return [ input ];
}
function first (input, count = 1) {
if (count === 1) {
if (isArray(input) || isString(input)) return input[0];
if (isSet(input) || isObject(input)) for (const v of input) return v;
if (isMap(input)) for (const [ , v ] of input) return v;
return;
}
if (isArray(input) || isString(input)) return input.slice(0, count);
if (isSet(input)) return Array.from(input).slice(0, count);
if (isObject(input)) return Object.values(input).slice(0, count);
if (isMap(input)) return Array.from(input.values()).slice(0, count);
}
function last (input, count = 1) {
if (count === 1) {
if (isArray(input) || isString(input)) return input[input.length - 1];
}
if (isArray(input) || isString(input)) return input.slice(-count);
if (isSet(input)) return Array.from(input).slice(-count);
if (isObject(input)) return Object.values(input).slice(-count);
if (isMap(input)) return Array.from(input.values()).slice(-count);
}
function all (...args) {
let input;
if (args.length > 1) {
@ -238,7 +363,7 @@ function allBy (collection, predicate = null) {
if (!collection) return false;
if (predicate === null) {
predicate = (v) => v;
} else {
} else if (!isFunction(predicate)) {
predicate = iteratee(predicate);
}
@ -300,7 +425,7 @@ function anyBy (collection, predicate = null) {
if (!collection) return false;
if (predicate === null) {
predicate = (v) => v;
} else {
} else if (!isFunction(iteratee)) {
predicate = iteratee(predicate);
}
@ -352,8 +477,7 @@ function iteratee (match) {
if (isObject(o)) return o[match];
if (isMap(o)) return o.get(match);
if (isSet(o)) return o.has(match);
if (isString(o)) return o === match;
if (isNumber(o)) return String(o) === match;
if (isPrimitive(o)) return o[match];
return o === match;
};
}
@ -548,6 +672,22 @@ function uniq (collection, predicate = null) {
return collection;
}
function keyBy (collection, predicate) {
predicate = iteratee(predicate);
return mapReduce(collection, (value, key, index) =>
[ predicate(value, key, index), value ],
);
}
function groupBy (collection, predicate) {
predicate = iteratee(predicate);
return reduce(collection, (result, value, key, index) => {
const k = predicate(value, key, index);
(result[k] || (result[k] = [])).push(value);
return result;
}, {});
}
function filter (collection, predicate) {
predicate = iteratee(predicate);
@ -722,19 +862,21 @@ function mapReduce (collection, cb) {
return result;
}
function reduce (collection, cb, init) {
if (isArray(collection)) return collection.reduce(cb, init);
function reduce (collection, predicate, init) {
if (!isFunction(predicate)) throw new TypeError('Predicate must be a function');
if (isArray(collection)) return collection.reduce((r, v, i) => predicate(r, v, i, i), init);
if (isSet(collection)) {
return Array.from(collection).reduce(cb, init);
return Array.from(collection).reduce((r, v, i) => predicate(r, v, i, i), init);
}
if (isMap(collection)) {
return Array.from(collection.entries()).reduce((prev, [ key, value ], i) => cb(prev, value, key, i), init);
return Array.from(collection.entries()).reduce((prev, [ key, value ], i) => predicate(prev, value, key, i), init);
}
if (isObject(collection)) {
return Object.entries(collection).reduce((prev, [ key, value ], i) => cb(prev, value, key, i), init);
return Object.entries(collection).reduce((prev, [ key, value ], i) => predicate(prev, value, key, i), init);
}
}
@ -1428,24 +1570,49 @@ function slugify (input, delimiter = '-', separators = false) {
exports.all = all;
exports.allBy = allBy;
exports.allOf = allOf;
exports.any = any;
exports.anyBy = anyBy;
exports.anyOf = anyOf;
exports.arrayify = arrayify;
exports.contains = contains;
exports.containsBooleans = containsBooleans;
exports.containsFalsey = containsFalsey;
exports.containsFunctions = containsFunctions;
exports.containsMappables = containsMappables;
exports.containsNumbers = containsNumbers;
exports.containsObjects = containsObjects;
exports.containsPrimatives = containsPrimatives;
exports.containsRegEx = containsRegEx;
exports.containsStrings = containsStrings;
exports.containsTruthy = containsTruthy;
exports.deepPick = deepPick;
exports.equals = equals;
exports.filter = filter;
exports.first = first;
exports.flatten = flatten;
exports.fromPairs = fromPairs;
exports.get = get;
exports.groupBy = groupBy;
exports.has = has;
exports.hasOwn = hasOwn;
exports.is = is;
exports.isAll = isAll;
exports.isArray = isArray;
exports.isArrayOf = isArrayOf;
exports.isArrayOfBooleans = isArrayOfBooleans;
exports.isArrayOfFalsey = isArrayOfFalsey;
exports.isArrayOfFunctions = isArrayOfFunctions;
exports.isArrayOfMappables = isArrayOfMappables;
exports.isArrayOfNumbers = isArrayOfNumbers;
exports.isArrayOfObjects = isArrayOfObjects;
exports.isArrayOfPrimatives = isArrayOfPrimatives;
exports.isArrayOfRegEx = isArrayOfRegEx;
exports.isArrayOfStrings = isArrayOfStrings;
exports.isArrayOfTruthy = isArrayOfTruthy;
exports.isBoolean = isBoolean;
exports.isDate = isDate;
exports.isFalsey = isFalsey;
exports.isFunction = isFunction;
exports.isMap = isMap;
exports.isMappable = isMappable;
@ -1453,17 +1620,23 @@ exports.isNull = isNull;
exports.isNumber = isNumber;
exports.isObject = isObject;
exports.isPrimitive = isPrimitive;
exports.isRegExp = isRegExp;
exports.isSet = isSet;
exports.isString = isString;
exports.isTruthy = isTruthy;
exports.isUndefined = isUndefined;
exports.iteratee = iteratee;
exports.keyBy = keyBy;
exports.keys = keys;
exports.last = last;
exports.lc = lc;
exports.map = map;
exports.mapReduce = mapReduce;
exports.merge = merge;
exports.omit = omit;
exports.pathinate = pathinate;
exports.pick = pick;
exports.re = re;
exports.reduce = reduce;
exports.set = set;
exports.sizeOf = sizeOf;
@ -1473,6 +1646,7 @@ exports.sort = sort;
exports.sorter = sorter;
exports.toPairs = toPairs;
exports.truthy = truthy;
exports.uc = uc;
exports.ucfirst = ucfirst;
exports.ucsentence = ucsentence;
exports.ucwords = ucwords;