1
0
mirror of https://github.com/GenderDysphoria/GenderDysphoria.fyi.git synced 2025-11-26 04:52:43 +00:00

Burn it down and rise from the ashes

This commit is contained in:
Jocelyn Badgley (Twipped)
2020-02-25 19:37:10 -08:00
parent ba8ac1c8e7
commit 97398e6df4
19 changed files with 705 additions and 589 deletions

155
gulp/content/actions.js Normal file
View File

@@ -0,0 +1,155 @@
const path = require('path');
const fs = require('fs-extra');
const gm = require('gm');
const Promise = require('bluebird');
const fetch = require('make-fetch-happen');
const ico = require('png-to-ico');
const { resolve, readFile } = require('./resolve');
const actions = {
async copy ({ input, output }) {
await fs.copy(resolve(input), resolve(output));
return readFile(input);
},
async transcode ({ input, output }) {
const result = await actions.image({
input,
output,
format: 'jpeg',
});
return result;
},
async fetch ({ input, output }) {
const res = await fetch(input);
if (res.status !== 200) {
throw new Error(`File could not be fetched (${res.status}): "${input}"`);
}
const body = await res.buffer();
output = resolve(output);
await fs.ensureDir(path.dirname(output));
await fs.writeFile(output, body);
return body;
},
async image (options) {
const output = resolve(options.output);
const contents = await readFile(options.input);
let gmfile = gm(contents, resolve(options.input));
const size = await Promise.fromCallback((cb) => gmfile.size(cb));
if (options.height || options.width) {
// if upscale is not requested, restrict size
if (!options.upscale) {
if (!isNaN(options.width)) {
options.width = Math.min(options.width, size.width);
}
if (!isNaN(options.height)) {
options.height = Math.min(options.height, size.height);
}
}
// if one dimension is not set - we fill it proportionally
if (!options.height) {
if (options.crop) {
options.height = size.height;
} else {
options.height = Math.ceil((options.width / size.width) * size.height);
}
}
if (!options.width) {
if (options.crop) {
options.width = size.width;
} else {
options.width = Math.ceil((options.height / size.height) * size.width);
}
}
if (options.fill === 'crop') {
if (size.height < options.height || size.width < options.width) {
gmfile = gmfile
.geometry(options.width, options.height, '^')
.borderColor(options.bgColor || '#FFFFFF')
.border(options.width, options.height)
.gravity(options.gravity)
.crop(options.width, options.height);
} else {
gmfile = gmfile
.geometry(options.width, options.height, '^')
.gravity(options.gravity)
.crop(options.width, options.height);
}
} else if (options.fill === 'cover') {
gmfile = gmfile
.geometry(options.width, options.height, '^');
} else if (options.fill === 'box') {
gmfile = gmfile
.geometry(options.width, options.height)
.borderColor(options.bgColor || '#FFFFFF')
.border(options.width, options.height)
.gravity(options.gravity)
.crop(options.width, options.height);
} else if (options.fill === 'contain') {
gmfile = gmfile
.geometry(options.width, options.height);
} else {
gmfile = gmfile
.geometry(options.width, options.height, '!');
}
} else if (options.percentage) {
gmfile = gmfile
.geometry(options.percentage, null, '%');
}
if (options.format) {
gmfile = gmfile
.setFormat(options.format === 'ico' ? 'png' : options.format);
}
if (options.quality) {
gmfile = gmfile.quality(Math.floor(options.quality));
} else {
gmfile = gmfile.quality(Math.floor(95));
}
if (options.samplingFactor) {
gmfile = gmfile
.samplingFactor(options.samplingFactor[0], options.samplingFactor[1]);
}
if (options.sharpen) {
options.sharpen = (typeof options.sharpen === 'string') ? options.sharpen : '1.5x1+0.7+0.02';
gmfile = gmfile.unsharp(options.sharpen);
}
if (options.flatten) {
gmfile = gmfile.flatten();
}
if (options.interlace) {
gmfile = gmfile.interlace('Line');
}
if (options.background) {
gmfile = gmfile.background(options.background);
}
if (options.noProfile) {
gmfile = gmfile.noProfile();
}
await fs.ensureDir(path.dirname(output));
let result = await Promise.fromCallback((cb) => gmfile.toBuffer(cb));
if (options.format === 'ico') result = await ico(result);
await fs.writeFile(output, result);
return result;
},
};
module.exports = exports = actions;

View File

@@ -1,60 +1,10 @@
const { pick } = require('lodash');
const actions = require('./actions');
const path = require('path');
const glob = require('../lib/glob');
const getImageDimensions = require('../lib/dimensions');
const getVideoDimensions = require('get-video-dimensions');
const { keyBy, pick, filter, get, set, memoize } = require('lodash');
const actions = require('../imgflow/actions');
const ROOT = path.resolve(__dirname, '../..');
function resolve (...args) {
args = args.filter(Boolean);
let fpath = args.shift();
if (!fpath) return ROOT;
if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
}
module.exports = exports = async function findAssets () {
const files = await glob('pages/**/*.{jpeg,jpg,png,gif,mp4}', { cwd: ROOT });
const map = {};
const assets = (await Promise.all(files.map(async (filepath) => {
const asset = new Asset(path.relative(ROOT, filepath));
await asset.load();
set(map, [ ...asset.base.split('/'), asset.name ], asset);
return asset;
}))).filter(Boolean);
Object.freeze(map);
function within (dir) {
const subset = filter(assets, { dir });
return {
get titlecard () {
return get(filter(subset, { name: 'titlecard' }), [ 0, 'url' ]);
},
get assets () {
return keyBy(subset.map((a) => a.webready()), 'name');
},
get all () {
return [ ...subset ];
},
};
}
return {
map,
for: memoize(within),
get tasks () {
return assets.map((a) => a.tasks()).flat(1);
},
get all () {
return [ ...assets ];
},
};
};
const JPG = '.jpg';
const JPEG = '.jpeg';
@@ -74,8 +24,7 @@ const FILETYPE = {
const RESOLUTIONS = [ 2048, 1024, 768, 576, 300, 100 ];
class Asset {
module.exports = exports = class Asset {
constructor (filepath) {
const file = path.parse(filepath);
@@ -103,16 +52,14 @@ class Asset {
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.input = resolve(filepath); // /local/path/to/pages/file.ext
this.cwd = resolve(file.dir); // /local/path/to/pages/, pages/folder, pages/folder/subfolder
this.input = filepath; // pages/file.ext
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.dest = path.join('dist/', ...dir); // dist/, dist/folder, dist/folder/subfolder
this.ext = file.ext;
this.out = path.join(this.dest, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
this.out = path.join(this.base, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
this.url = path.join(this.dir, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
}
@@ -147,7 +94,7 @@ class Asset {
if (this.preprocessed) {
this.sizes = [ {
output: resolve(this.out),
output: this.out,
url: this.url,
width,
height,
@@ -155,7 +102,7 @@ class Asset {
} else {
this.sizes = [
{
output: resolve(this.out),
output: this.out,
url: this.url,
width,
height,
@@ -164,9 +111,10 @@ class Asset {
for (const w of RESOLUTIONS) {
if (w > width) continue;
const name = `${this.name}.${w}w.${this.type}`;
this.sizes.push({
output: resolve(this.dest, `${this.name}.${w}w.${this.type}`),
url: path.join(this.dir, `${this.name}.${w}w.${this.type}`),
output: path.join(this.base, name),
url: path.join(this.dir, name),
width: w,
height: Math.ceil((w / width) * height),
});
@@ -199,8 +147,8 @@ class Asset {
};
this.sizes = [ {
output: resolve(this.dest, this.basename),
url: path.join(this.dir, this.basename),
output: path.join(this.base, this.basename),
url: path.join(this.dir, this.basename),
width,
height,
} ];
@@ -214,12 +162,10 @@ class Asset {
'type',
'kind',
'input',
'cwd',
'base',
'dir',
'name',
'basename',
'dest',
'ext',
'dimensions',
]);
@@ -244,6 +190,13 @@ class Asset {
}));
}
}
};
exports.Asset = Asset;
exports.JPG = JPG;
exports.JPEG = JPEG;
exports.PNG = PNG;
exports.GIF = GIF;
exports.MP4 = MP4;
exports.M4V = M4V;
exports.FILETYPE = FILETYPE;
exports.RESOLUTIONS = RESOLUTIONS;

45
gulp/content/assets.js Normal file
View File

@@ -0,0 +1,45 @@
const glob = require('../lib/glob');
const { keyBy, filter, get, set, memoize } = require('lodash');
const { relative, ROOT } = require('./resolve');
const Asset = require('./asset');
module.exports = exports = async function createAssetFinder () {
const files = await glob('pages/**/*.{jpeg,jpg,png,gif,mp4}', { cwd: ROOT });
const map = {};
const assets = (await Promise.all(files.map(async (filepath) => {
const asset = new Asset(relative(filepath));
await asset.load();
set(map, [ ...asset.base.split('/'), asset.name ], asset);
return asset;
}))).filter(Boolean);
Object.freeze(map);
function within (dir) {
const subset = filter(assets, { dir });
return {
get titlecard () {
return get(filter(subset, { name: 'titlecard' }), [ 0, 'url' ]);
},
get assets () {
return keyBy(subset.map((a) => a.webready()), 'name');
},
get all () {
return [ ...subset ];
},
};
}
return {
map,
for: memoize(within),
get tasks () {
return assets.map((a) => a.tasks()).flat(1);
},
get all () {
return [ ...assets ];
},
};
};
exports.Asset = Asset;

214
gulp/content/cache.js Normal file
View File

@@ -0,0 +1,214 @@
const path = require('path');
const Promise = require('bluebird');
const fs = require('fs-extra');
const { memoize: memoizeSync } = require('lodash');
const memoizeAsync = require('memoizepromise');
const { resolve, readFile } = require('./resolve');
const { hasOwn, isFunction } = require('../lib/util');
const revHash = require('rev-hash');
const revPath = require('rev-path');
const CACHE = 'if-cache';
const MANIFEST = 'if-cache.json';
const REV_MANIFEST = 'rev-manifest.json';
module.exports = exports = class Manifest {
constructor ({ time = true, inputRev = true, prod = false, writeCount = 100, writeInterval = 10000 }) {
this.compareBy = { time, inputRev };
this.manifest = {};
this.rev = memoizeSync(revHash);
this.stat = memoizeAsync((f) =>
fs.stat(resolve(f))
.catch(() => null)
.then((stats) => (stats && Math.floor(stats.mtimeMs / 1000))),
);
this.revFile = memoizeAsync((f) =>
readFile(f)
.then(revHash)
.catch(() => null),
);
this.isProd = prod;
this.writeCounter = 0;
this.lastWriteTime = 0;
this.writeCountThreshold = writeCount;
this.writeTimeThreshold = writeInterval;
this.revManifest = {};
}
async load () {
const [ manifest ] = await Promise.all([
fs.readJson(resolve(MANIFEST)).catch(() => ({})),
fs.ensureDir(resolve(CACHE)),
]);
this.manifest = manifest;
}
hash ({ action, input, output, ...task }) {
if (!isFunction(action)) throw new Error('Task action is not a task action (function).');
const name = action.name;
const hash = [
name,
this.rev(input),
this.rev(output),
];
// if this is an image operation, include the format and width in the hash
if (name === 'image') hash.splice(1, 0, [ task.width, task.format ]);
return hash.join('.');
}
has (task) {
const hash = this.hash(task);
return hasOwn(this.manifest, hash);
}
async get (task) {
const hash = this.hash(task);
const { input, output } = task;
const ext = path.extname(task.output);
const local = !task.input.includes('://');
const cached = path.join(CACHE, hash + ext);
const result = {
iTime: 0,
iRev: null,
oRev: null,
...this.manifest[hash],
hash,
action: task.action.name,
input,
output,
mode: 'new',
};
const [ iTime, oTime, cTime, iRev ] = await Promise.all([
local && this.stat(input),
this.stat(output),
this.stat(cached),
local && this.compareBy.inputRev && this.revFile(input),
]);
if (local && !iTime) throw new Error('Input file does not exist: ' + input);
result.outputExists = !!oTime;
if (oTime) {
// output exists, we can move on
result.mode = 'skip';
return result;
}
if (this.compareBy.time && iTime <= result.iTime) {
result.inputDiffers = false;
} else if (local && this.compareBy.inputRev && iRev !== result.iRev) {
// either we aren't checking time, or the time has changed
// check if the contents changed
result.inputDiffers = true;
result.iRev = iRev;
result.mode = 'update';
return result;
}
if (!cTime || cTime < iTime) {
// output does not exist in the cache or the cached file predates input, we need to remake.
result.inputDiffers = true;
result.oRev = null;
result.mode = 'rebuild';
return result;
}
result.mode = 'cached';
result.cache = await readFile(cached);
return result;
}
async touch (task, lastSeen = new Date()) {
const hash = this.hash(task);
const { input, output } = task;
const local = !task.input.includes('://');
const [ iTime, iRev ] = await Promise.all([
local && this.stat(input),
local && this.compareBy.inputRev && this.revFile(input),
]);
const record = {
...this.manifest[hash],
action: task.action.name,
hash,
input,
iTime,
iRev,
output,
oTime: Math.floor(lastSeen / 1000),
lastSeen,
};
if (record.revPath) this.revManifest[output] = record.revPath;
this.manifest[hash] = record;
await this.writeManifest();
return { ...record };
}
async set (task, result, lastSeen = new Date()) {
const hash = this.hash(task);
const { input, output } = task;
const ext = path.extname(task.output);
const local = !task.input.includes('://');
const cached = path.join(CACHE, hash + ext);
const oRev = revHash(result);
const [ iTime, iRev ] = await Promise.all([
local && this.stat(input),
local && this.compareBy.inputRev && this.revFile(input),
result && fs.writeFile(resolve(cached), result),
]);
const record = {
action: task.action.name,
hash,
input,
iTime,
iRev,
output,
oTime: Math.floor(lastSeen / 1000),
oRev,
lastSeen,
revPath: revPath(output, oRev),
};
this.revManifest[output] = record.revPath;
this.manifest[hash] = record;
await this.writeManifest();
return { ...record };
}
async writeManifest (force) {
if (!force && this.isProd) return; // disable interim writes during prod builds.
if (!force && ++this.writeCounter % this.writeCountThreshold) return;
const now = Date.now();
if (!force && now - this.lastWriteTime < this.writeTimeThreshold) return;
this.lastWriteTime = now;
await fs.writeFile(resolve(MANIFEST), JSON.stringify(this.manifest, null, 2));
}
async save () {
const revManifest = this.isProd && await fs.readJson(resolve(REV_MANIFEST))
.catch(() => ({}))
.then((old) => ({ ...old, ...this.revManifest }));
await Promise.all([
revManifest && fs.writeFile(resolve(REV_MANIFEST), JSON.stringify(revManifest, null, 2)),
this.writeManifest(true),
]);
}
};

57
gulp/content/evaluate.js Normal file
View File

@@ -0,0 +1,57 @@
const { sortBy } = require('lodash');
const { resolve } = require('./resolve');
const log = require('fancy-log');
const Promise = require('bluebird');
const fs = require('fs-extra');
const LOG = {
new: true,
update: true,
skip: true,
rebuild: true,
cached: false,
copy: false,
};
module.exports = exports = async function process (tasks, cache) {
const lastSeen = new Date();
await Promise.map(sortBy(tasks, [ 'input', 'output' ]), async (task) => {
let result;
let status = await cache.get(task);
const { input, output } = task;
const taskLog = [ status.mode, status.input, status.output ];
if (status.mode === 'skip') {
await cache.touch(task, lastSeen);
if (taskLog && LOG[taskLog[0]]) log.info(...taskLog);
return status;
}
if (status.mode === 'cached') {
result = status.cache;
await fs.writeFile(resolve('dist', output), result);
await cache.touch(task, lastSeen);
} else {
try {
result = await task.action({
...task,
input,
output: 'dist/' + output,
});
} catch (err) {
log.error(`Task (${task.action.name}) failed for file ${output}.\n`, err);
return false;
}
status = await cache.set(task, result, lastSeen);
}
if (taskLog && LOG[taskLog[0]]) log.info(...taskLog);
if (cache.isProd) {
fs.writeFile(resolve('dist', status.revPath), result);
}
}, { concurrency: 1 });
};

45
gulp/content/favicon.js Normal file
View File

@@ -0,0 +1,45 @@
const fs = require('fs-extra');
const actions = require('./actions');
const { resolve } = require('./resolve');
function any (input) {
for (const i of input) if (i) return i;
return false;
}
const MATCHES = [
'favicon.png',
'favicon.gif',
'favicon.jpeg',
'favicon.jpg',
];
module.exports = exports = async function favicon () {
const input = any(await Promise.all(
MATCHES.map((f) =>
fs.exists(resolve(f)).then((y) => y && f),
),
));
if (!input) return [];
// input = resolve(input);
const tasks = [ 32, 57, 64, 76, 96, 114, 120, 128, 144, 152, 180, 192, 196, 228 ].map((width) => ({
input,
output: `favicon${width}.png`,
format: 'png',
width,
action: actions.image,
}));
tasks.push({
input,
output: 'favicon.ico',
format: 'ico',
action: actions.image,
});
return tasks;
};

View File

@@ -1,167 +1,69 @@
const path = require('path');
const glob = require('../lib/glob');
const { chunk, uniq, difference } = require('lodash');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const getEngines = require('./renderers');
const Twitter = require('twitter-lite');
const Page = require('./page');
const createAssetLoader = require('./files');
const ROOT = path.resolve(__dirname, '../..');
const createAssetFinder = require('./assets');
const Cache = require('./cache');
exports.parse = async function parsePageContent () {
const [ files, twitter, twitterBackup, twitterCache, Assets ] = await Promise.all([
glob('pages/**/*.{md,hbs,html,xml}', { cwd: ROOT }),
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
createAssetLoader(),
]);
const evaluate = require('./evaluate');
const pages = require('./pages');
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
const twitter = require('./twitter');
const favicon = require('./favicon');
const assets = () => createAssetFinder().then(({ tasks }) => tasks);
let pages = await Promise.map(files, async (filepath) => {
const page = new Page(filepath);
if (!page.input) return;
await page.load({ Assets });
exports.everything = function (prod = false) {
const fn = async () => {
if (page.tweets.length) {
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
}
const AssetFinder = await createAssetFinder();
return page;
});
await pages.parse(AssetFinder);
pages = pages.filter(Boolean);
tweetsNeeded = uniq(tweetsNeeded);
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
fs.writeFile(path.join(ROOT, 'pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
fs.writeFile(path.join(ROOT, 'twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(path.join(ROOT, 'twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(path.join(ROOT, 'twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
exports.write = async function writePageContent ({ prod }) {
const [ pages, { siteInfo }, engines ] = await Promise.all([
fs.readJson(resolve('pages.json')),
fs.readJson(resolve('package.json')),
getEngines(prod),
]);
await Promise.map(pages, async (page) => {
// page = new Page(page);
var data = {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: page.cwd,
root: ROOT,
basename: page.basename,
},
pages,
};
const html = String(engines[page.engine](data.source, data));
const json = page.json && {
url: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
};
await fs.ensureDir(path.dirname(page.output));
await Promise.all([
fs.writeFile(page.output, Buffer.from(html)),
json && fs.writeFile(page.json, Buffer.from(prod ? JSON.stringify(json) : JSON.stringify(json, null, 2))),
const tasks = await Promise.all([
AssetFinder.tasks,
twitter(prod),
favicon(prod),
]);
});
if (!tasks.length) return;
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
await pages.write(prod);
};
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
ret.displayName = prod ? 'generateEverythingForProd' : 'generateEverything';
return ret;
};
exports.write.prod = function writePageContentForProduction () { return exports.write({ prod: true }); };
exports.task = function (action, prod = false) {
let fn;
if (action === 'parse') {
fn = () => pages.parse();
} else if (action === 'pages') {
fn = () => pages.write(prod);
} else {
fn = async () => {
const tasks = await {
twitter,
favicon,
assets,
}[action](prod);
/* Utility Functions **************************************************/
if (!tasks.length) return;
function resolve (fpath, ...args) {
if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
}
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks, cache);
await cache.save();
};
}
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
ret.displayName = prod ? action + 'ForProd' : action;
return ret;
};

View File

@@ -6,8 +6,8 @@ const log = require('fancy-log');
const frontmatter = require('front-matter');
const { URL } = require('url');
const { pick, omit } = require('lodash');
const { resolve, readFile } = require('./resolve');
const ROOT = path.resolve(__dirname, '../..');
const pkg = require(resolve('package.json'));
@@ -30,14 +30,6 @@ function parseTweetId (tweetid) {
return false;
}
function resolve (...args) {
args = args.filter(Boolean);
let fpath = args.shift();
if (!fpath) return ROOT;
if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
}
module.exports = exports = class Page {
@@ -63,38 +55,35 @@ module.exports = exports = class Page {
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.input = resolve(filepath); // /local/path/to/pages/file.ext
this.cwd = resolve(file.dir); // /local/path/to/pages/, pages/folder, pages/folder/subfolder
this.input = filepath; // /local/path/to/pages/file.ext
this.cwd = file.dir; // /local/path/to/pages/, pages/folder, pages/folder/subfolder
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.dest = path.join('dist/', ...dir); // dist/, dist/folder, dist/folder/subfolder
this.ext = file.ext;
var isIndexPage = (name === 'index');
var isCleanUrl = [ HBS, MD ].includes(ext);
if (isCleanUrl && isIndexPage) {
this.out = path.join(this.dest, 'index.html');
this.json = path.join(this.dest, 'index.json');
this.url = this.dir;
this.output = path.join(this.base, 'index.html');
this.json = path.join(this.base, 'index.json');
this.url = this.dir;
} else if (isCleanUrl) {
this.out = path.join(this.dest, this.name, 'index.html');
this.json = path.join(this.dest, this.name + '.json');
this.url = path.join(this.dir, this.name);
this.output = path.join(this.base, this.name, 'index.html');
this.json = path.join(this.base, this.name + '.json');
this.url = path.join(this.dir, this.name);
} else if (isIndexPage) {
this.out = path.join(this.dest, 'index.html');
this.json = path.join(this.dest, this.name + '.json');
this.url = this.dir;
this.output = path.join(this.base, 'index.html');
this.json = path.join(this.base, this.name + '.json');
this.url = this.dir;
} else {
this.out = path.join(this.dest, this.basename);
this.json = path.join(this.dest, this.basename + '.json');
this.url = path.join(this.dir, this.basename);
this.output = path.join(this.base, this.basename);
this.json = path.join(this.base, this.basename + '.json');
this.url = path.join(this.dir, this.basename);
}
this.output = resolve(this.out);
const url = new URL(pkg.siteInfo.siteUrl);
url.pathname = this.url;
this.fullurl = url.href;
@@ -111,8 +100,8 @@ module.exports = exports = class Page {
async load ({ Assets }) {
const [ raw, { ctime, mtime } ] = await Promise.all([
fs.readFile(this.input).catch(() => null),
fs.stat(this.input).catch(() => {}),
readFile(this.input).catch(() => null),
fs.stat(this.input).catch(() => ({})),
]);
const { titlecard, assets } = Assets.for(this.dir);

161
gulp/content/pages.js Normal file
View File

@@ -0,0 +1,161 @@
const path = require('path');
const glob = require('../lib/glob');
const { chunk, uniq, difference } = require('lodash');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const getEngines = require('./renderers');
const Twitter = require('twitter-lite');
const Page = require('./page');
const createAssetFinder = require('./assets');
const { resolve, ROOT } = require('./resolve');
exports.parse = async function parsePageContent (assetFinder) {
const [ files, twitter, twitterBackup, twitterCache, Assets ] = await Promise.all([
glob('pages/**/*.{md,hbs,html,xml}', { cwd: ROOT }),
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
assetFinder || createAssetFinder(),
]);
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
let pages = await Promise.map(files, async (filepath) => {
const page = new Page(filepath);
if (!page.input) return;
await page.load({ Assets });
if (page.tweets.length) {
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
}
return page;
});
pages = pages.filter(Boolean);
tweetsNeeded = uniq(tweetsNeeded);
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
fs.writeFile(resolve('twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(resolve('twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(resolve('twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
exports.write = async function writePageContent (prod) {
const [ pages, { siteInfo }, engines ] = await Promise.all([
fs.readJson(resolve('pages.json')),
fs.readJson(resolve('package.json')),
getEngines(prod),
]);
await Promise.map(pages, async (page) => {
// page = new Page(page);
var data = {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: page.cwd,
root: ROOT,
basename: page.basename,
},
pages,
};
const html = String(engines[page.engine](data.source, data));
const json = page.json && {
url: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
};
const output = resolve('dist', page.output);
await fs.ensureDir(path.dirname(output));
await Promise.all([
fs.writeFile(output, Buffer.from(html)),
json && fs.writeFile(resolve('dist', page.json), Buffer.from(
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)),
]);
});
};
/* Utility Functions **************************************************/
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}

View File

@@ -1,10 +1,10 @@
const path = require('path');
const ROOT = path.resolve(__dirname, '../..');
const fs = require('fs-extra');
const log = require('fancy-log');
const { minify } = require('html-minifier-terser');
const { resolve, readFile } = require('./resolve');
const handlebars = require('handlebars');
const HandlebarsKit = require('hbs-kit');
@@ -93,15 +93,15 @@ const HANDLEBARS_PARTIALS = {
module.exports = exports = async function (prod) {
for (const [ name, file ] of Object.entries(HANDLEBARS_PARTIALS)) {
try {
const contents = await fs.readFile(path.resolve(ROOT, file));
const contents = await readFile(file);
const template = handlebars.compile(contents.toString('utf8'));
handlebars.registerPartial(name, template);
} catch (e) {
log.error('Could not execute load partial ' + path.relative(ROOT, file), e);
log.error('Could not execute load partial ' + file, e);
}
}
const pageTemplateRaw = await fs.readFile(path.join(ROOT, 'templates/post.hbs'));
const pageTemplateRaw = await readFile('templates/post.hbs');
if (!pageTemplateRaw) throw new Error('Post template was empty?');
try {
var pageTemplate = handlebars.compile(pageTemplateRaw.toString('utf8'));
@@ -109,7 +109,7 @@ module.exports = exports = async function (prod) {
log.error('Crash while loading post template', e);
}
const revManifest = prod && await fs.readJson(path.join(ROOT, 'rev-manifest.json')).catch(() => {}).then((r) => r || {});
const revManifest = prod && await fs.readJson(resolve('rev-manifest.json')).catch(() => {}).then((r) => r || {});
const helpers = new Injectables(prod, revManifest);
handlebars.registerHelper('import', helpers.import());
@@ -145,8 +145,8 @@ class Injectables {
}
_parsePath (tpath, local, type) {
if (tpath[0] === '/') tpath = path.join(local.root, tpath);
else if (tpath[0] === '~') tpath = path.join(local.root, 'templates', tpath.slice(2));
if (tpath[0] === '/') tpath = resolve(tpath.slice(1));
else if (tpath[0] === '~') tpath = resolve('templates', tpath.slice(2));
else tpath = path.resolve(local.cwd, tpath);
if (type && !tpath.endsWith(type)) tpath += '.' + type;
return tpath;
@@ -156,7 +156,7 @@ class Injectables {
if (this.injections[tpath]) return this.injections[tpath];
if (!fs.existsSync(tpath)) {
log.error('Injectable does not exist: ' + path.relative(ROOT, tpath));
log.error('Injectable does not exist: ' + tpath);
return '';
}
@@ -167,7 +167,7 @@ class Injectables {
this.injections[tpath] = contents;
return contents;
} catch (e) {
log.error(e, 'An error occured while loading the injectable: ' + path.relative(ROOT, tpath));
log.error(e, 'An error occured while loading the injectable: ' + tpath);
}
return '';

26
gulp/content/resolve.js Normal file
View File

@@ -0,0 +1,26 @@
const path = require('path');
const ROOT = path.resolve(__dirname, '../..');
const fs = require('fs-extra');
exports.readFile = function readFile (fpath) {
fpath = exports.resolve(fpath);
return fs.readFile(fpath).catch((err) => {
throw new Error(err.trace);
});
};
exports.resolve = function resolve (...args) {
args = args.filter(Boolean);
let fpath = args.shift();
if (!fpath) return ROOT;
if (fpath[0] === '/') throw new Error('Did you mean to resolve this? ' + fpath);
if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
};
exports.relative = function relative (fpath) {
return path.relative(ROOT, fpath);
};
exports.ROOT = ROOT;

13
gulp/content/twitter.js Normal file
View File

@@ -0,0 +1,13 @@
const fs = require('fs-extra');
const actions = require('./actions');
const { uniqBy } = require('lodash');
const { resolve } = require('./resolve');
module.exports = exports = async function twitter () {
const media = await fs.readJson(resolve('twitter-media.json')).catch(() => ([]));
const tasks = uniqBy(media, 'input')
.map((m) => ({ ...m, action: actions.fetch, output: m.output }));
return tasks;
};