168 lines
5.3 KiB
JavaScript
Raw Normal View History

2020-02-21 20:05:52 -08:00
const path = require('path');
const glob = require('../lib/glob');
const { chunk, uniq, difference } = require('lodash');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const getEngines = require('./renderers');
const Twitter = require('twitter-lite');
2020-02-22 20:32:51 -08:00
const Page = require('./page');
const createAssetLoader = require('./files');
2020-02-21 20:05:52 -08:00
const ROOT = path.resolve(__dirname, '../..');
exports.parse = async function parsePageContent () {
const [ files, twitter, twitterBackup, twitterCache, Assets ] = await Promise.all([
2020-02-21 20:05:52 -08:00
glob('pages/**/*.{md,hbs,html,xml}', { cwd: ROOT }),
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
2020-02-22 20:32:51 -08:00
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
createAssetLoader(),
2020-02-21 20:05:52 -08:00
]);
2020-02-22 20:32:51 -08:00
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
2020-02-21 20:05:52 -08:00
2020-02-22 20:32:51 -08:00
let pages = await Promise.map(files, async (filepath) => {
const page = new Page(filepath);
if (!page.input) return;
await page.load({ Assets });
2020-02-21 20:05:52 -08:00
2020-02-22 20:32:51 -08:00
if (page.tweets.length) {
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
2020-02-21 20:05:52 -08:00
}
2020-02-22 20:32:51 -08:00
return page;
2020-02-21 20:05:52 -08:00
});
pages = pages.filter(Boolean);
2020-02-22 20:32:51 -08:00
tweetsNeeded = uniq(tweetsNeeded);
2020-02-21 20:05:52 -08:00
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
2020-02-22 20:32:51 -08:00
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
2020-02-21 20:05:52 -08:00
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
2020-02-22 20:32:51 -08:00
fs.writeFile(path.join(ROOT, 'pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
2020-02-21 20:05:52 -08:00
fs.writeFile(path.join(ROOT, 'twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(path.join(ROOT, 'twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(path.join(ROOT, 'twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
exports.write = async function writePageContent ({ prod }) {
const [ pages, { siteInfo }, engines ] = await Promise.all([
fs.readJson(resolve('pages.json')),
fs.readJson(resolve('package.json')),
getEngines(prod),
]);
await Promise.map(pages, async (page) => {
2020-02-22 20:32:51 -08:00
// page = new Page(page);
2020-02-21 20:05:52 -08:00
var data = {
...page,
2020-02-22 20:32:51 -08:00
meta: { ...page.meta, ...page },
2020-02-21 20:05:52 -08:00
page: {
domain: siteInfo.domain,
2020-02-22 20:32:51 -08:00
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
2020-02-21 20:05:52 -08:00
: siteInfo.title,
2020-02-22 20:32:51 -08:00
description: page.meta.description || siteInfo.description,
2020-02-21 20:05:52 -08:00
},
2020-02-23 20:13:20 -08:00
site: siteInfo,
2020-02-21 20:05:52 -08:00
local: {
cwd: page.cwd,
root: ROOT,
2020-02-22 20:32:51 -08:00
basename: page.basename,
2020-02-21 20:05:52 -08:00
},
pages,
};
2020-02-22 20:32:51 -08:00
const html = String(engines[page.engine](data.source, data));
2020-02-21 20:05:52 -08:00
const json = page.json && {
url: page.fullurl,
2020-02-22 20:32:51 -08:00
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
2020-02-21 20:05:52 -08:00
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
2020-02-22 20:32:51 -08:00
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
2020-02-21 20:05:52 -08:00
};
await fs.ensureDir(path.dirname(page.output));
await Promise.all([
fs.writeFile(page.output, Buffer.from(html)),
json && fs.writeFile(page.json, Buffer.from(prod ? JSON.stringify(json) : JSON.stringify(json, null, 2))),
]);
});
};
exports.write.prod = function writePageContentForProduction () { return exports.write({ prod: true }); };
/* Utility Functions **************************************************/
function resolve (fpath, ...args) {
if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
}
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}