More burndown.

This commit is contained in:
Jocelyn Badgley (Twipped)
2020-02-27 18:57:39 -08:00
parent 2df7574697
commit e95f2cf3db
17 changed files with 736 additions and 528 deletions

View File

@@ -1,72 +1,29 @@
const { pick } = require('lodash');
const actions = require('./actions');
const path = require('path');
const { pick } = require('lodash');
const actions = require('./actions');
const File = require('./file');
const { TYPE } = require('./resolve');
const getImageDimensions = require('../lib/dimensions');
const getVideoDimensions = require('get-video-dimensions');
const JPG = '.jpg';
const JPEG = '.jpeg';
const PNG = '.png';
const GIF = '.gif';
const MP4 = '.mp4';
const M4V = '.m4v';
const FILETYPE = {
[JPG]: 'jpeg',
[JPEG]: 'jpeg',
[PNG]: 'png',
[GIF]: 'gif',
[MP4]: 'mp4',
[M4V]: 'mp4',
};
const RESOLUTIONS = [ 2048, 1024, 768, 576, 300, 100 ];
module.exports = exports = class Asset {
module.exports = exports = class Asset extends File {
constructor (filepath) {
const file = path.parse(filepath);
let { base: basename, name } = file;
super(filepath);
this.preprocessed = false;
if (name[0] === '_') {
this.preprocessed = true;
file.name = name = name.slice(1);
file.basename = basename = basename.slice(1);
}
this.type = FILETYPE[file.ext] || file.ext.slice(1);
if ([ JPG, JPEG, PNG, GIF ].includes(file.ext)) {
this.kind = 'image';
} else if ([ MP4, M4V ].includes(file.ext)) {
this.kind = 'video';
} else {
this.kind = 'raw';
}
// remove the pages root and any _images segment from the dir
const dir = file.dir.split('/');
if (dir[0] === 'pages') dir.shift();
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.input = filepath; // pages/file.ext
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.ext = file.ext;
this.out = path.join(this.base, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
this.url = path.join(this.dir, `${this.name}${this.preprocessed ? this.ext : '.' + this.type}`);
this.serializable.push(
'dimensions',
'sizes',
);
}
load () {
switch (this.kind) {
case 'video': return this.loadVideo();
case 'image': return this.loadImage();
switch (this.type) {
case TYPE.VIDEO: return this.loadVideo();
case TYPE.IMAGE: return this.loadImage();
default:
}
}
@@ -111,7 +68,7 @@ module.exports = exports = class Asset {
for (const w of RESOLUTIONS) {
if (w > width) continue;
const name = `${this.name}.${w}w.${this.type}`;
const name = `${this.name}.${w}w${this.ext}`;
this.sizes.push({
output: path.join(this.base, name),
url: path.join(this.dir, name),
@@ -156,27 +113,12 @@ module.exports = exports = class Asset {
return this;
}
toJson () {
return pick(this, [
'preprocessed',
'type',
'kind',
'input',
'base',
'dir',
'name',
'basename',
'ext',
'dimensions',
]);
}
webready () {
const { kind, name } = this;
const { type, name, sizes } = this;
return {
kind,
type,
name,
sizes: this.sizes.map((s) => pick(s, [ 'url', 'width', 'height' ])),
sizes: sizes.map((s) => pick(s, [ 'url', 'width', 'height' ])),
};
}
@@ -184,19 +126,10 @@ module.exports = exports = class Asset {
return this.sizes.map(({ output, width }) => ({
input: this.input,
output,
format: this.preprocessed ? undefined : this.type,
format: this.preprocessed ? undefined : this.ext.slice(1),
width: this.preprocessed ? undefined : width,
action: this.preprocessed ? actions.copy : actions.image,
}));
}
};
exports.JPG = JPG;
exports.JPEG = JPEG;
exports.PNG = PNG;
exports.GIF = GIF;
exports.MP4 = MP4;
exports.M4V = M4V;
exports.FILETYPE = FILETYPE;
exports.RESOLUTIONS = RESOLUTIONS;

View File

@@ -1,45 +0,0 @@
const glob = require('../lib/glob');
const { keyBy, filter, get, set, memoize } = require('lodash');
const { relative, ROOT } = require('./resolve');
const Asset = require('./asset');
module.exports = exports = async function createAssetFinder () {
const files = await glob('pages/**/*.{jpeg,jpg,png,gif,mp4}', { cwd: ROOT });
const map = {};
const assets = (await Promise.all(files.map(async (filepath) => {
const asset = new Asset(relative(filepath));
await asset.load();
set(map, [ ...asset.base.split('/'), asset.name ], asset);
return asset;
}))).filter(Boolean);
Object.freeze(map);
function within (dir) {
const subset = filter(assets, { dir });
return {
get titlecard () {
return get(filter(subset, { name: 'titlecard' }), [ 0, 'url' ]);
},
get assets () {
return keyBy(subset.map((a) => a.webready()), 'name');
},
get all () {
return [ ...subset ];
},
};
}
return {
map,
for: memoize(within),
get tasks () {
return assets.map((a) => a.tasks()).flat(1);
},
get all () {
return [ ...assets ];
},
};
};
exports.Asset = Asset;

View File

@@ -4,7 +4,7 @@ const path = require('path');
const fs = require('fs-extra');
const log = require('fancy-log');
const { minify } = require('html-minifier-terser');
const { resolve, readFile } = require('./resolve');
const { resolve, readFile, ENGINE } = require('./resolve');
const handlebars = require('handlebars');
const HandlebarsKit = require('hbs-kit');
@@ -121,18 +121,15 @@ module.exports = exports = async function (prod) {
const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input);
const result = {
hbs: (source, env) => {
[ENGINE.HANDLEBARS]: (source, env) => {
const template = handlebars.compile(source);
return shrink(template(env));
},
md: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
raw: (source) => shrink(source),
preview: (source, env) => markdown('preview', source, env),
[ENGINE.MARKDOWN]: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
[ENGINE.OTHER]: (source) => shrink(source),
PREVIEW: (source, env) => markdown('preview', source, env),
};
// result.handlebars.engine = handlebars;
// result.markdown.engine = markdownEngines.full;
return result;
};
@@ -153,11 +150,11 @@ class Injectables {
}
_template (tpath, make) {
if (!tpath) throw new Error('Received an empty template path: ' + tpath);
if (this.injections[tpath]) return this.injections[tpath];
if (!fs.existsSync(tpath)) {
log.error('Injectable does not exist: ' + tpath);
return '';
throw new Error('Injectable does not exist: ' + tpath);
}
let contents;
@@ -226,7 +223,7 @@ class Injectables {
const contents = self._template(tpath, handlebars.compile)(context);
return new handlebars.SafeString(contents);
} catch (e) {
log.error('Could not execute import template ' + path.relative(ROOT, tpath), e);
log.error('Could not execute import template ' + tpath, e);
return '';
}
};
@@ -245,7 +242,7 @@ class Injectables {
return new handlebars.SafeString(contents);
} catch (e) {
log.error('Could not execute import template ' + path.relative(ROOT, tpath), e);
log.error('Could not execute import template ' + tpath, e);
return '';
}
};

View File

@@ -1,4 +1,4 @@
const { sortBy } = require('lodash');
const { sortBy, uniqBy } = require('lodash');
const { resolve } = require('./resolve');
const log = require('fancy-log');
const Promise = require('bluebird');
@@ -16,7 +16,10 @@ const LOG = {
module.exports = exports = async function process (tasks, cache) {
const lastSeen = new Date();
await Promise.map(sortBy(tasks, [ 'input', 'output' ]), async (task) => {
tasks = uniqBy(tasks, 'output');
tasks = sortBy(tasks, [ 'input', 'output' ]);
await Promise.map(tasks, async (task) => {
let result;
let status = await cache.get(task);
const { input, output } = task;

91
gulp/content/file.js Normal file
View File

@@ -0,0 +1,91 @@
const path = require('path');
const { pick } = require('lodash');
const {
normalizedExt,
kind,
type,
} = require('./resolve');
const actions = require('./actions');
module.exports = exports = class File {
constructor (filepath) {
if (filepath && typeof filepath === 'object') {
// we've been passed a json object, treat as serialized Page
Object.assign(this, filepath);
return this;
}
const file = path.parse(filepath);
let { base: basename, name } = file;
this.preprocessed = false;
if (name[0] === '_') {
this.preprocessed = true;
file.name = name = name.slice(1);
file.basename = basename = basename.slice(1);
}
// remove the public root and any _images segment from the dir
const dir = file.dir.split('/');
if (dir[0] === 'public') dir.shift();
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.kind = kind(filepath);
this.type = type(filepath);
this.cwd = file.dir;
this.ext = this.preprocessed ? file.ext : normalizedExt(file.ext);
this.input = filepath; // public/file.ext
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.ext = file.ext;
this.out = path.join(this.base, `${this.name}${this.ext}`);
this.url = path.join(this.dir, `${this.name}${this.ext}`);
this.serializable = [
'kind',
'type',
'ext',
'input',
'base',
'dir',
'name',
'basename',
'ext',
'out',
'url',
];
}
load () {}
tasks () {
return [ {
input: this.input,
output: this.out,
action: actions.copy,
} ];
}
toJson () {
return pick(this.serializable, [
'preprocessed',
'type',
'kind',
'input',
'base',
'dir',
'name',
'basename',
'ext',
'dimensions',
]);
}
};

View File

@@ -1,69 +1,52 @@
const createAssetFinder = require('./assets');
const loadPublicFiles = require('./public');
const Cache = require('./cache');
const Promise = require('bluebird');
const fs = require('fs-extra');
const primeTweets = require('./page-tweets');
const pageWriter = require('./page-writer');
const evaluate = require('./evaluate');
const { resolve } = require('./resolve');
const pages = require('./pages');
const twitter = require('./twitter');
const favicon = require('./favicon');
const assets = () => createAssetFinder().then(({ tasks }) => tasks);
const svg = require('./svg');
exports.everything = function (prod = false) {
const fn = async () => {
const AssetFinder = await createAssetFinder();
// load a directory scan of the public folder
const PublicFiles = await loadPublicFiles();
await pages.parse(AssetFinder);
// load data for all the files in that folder
await Promise.map(PublicFiles.all, (p) => p.load(PublicFiles));
// prime tweet data for all pages
const pages = await primeTweets(PublicFiles.pages);
// compile all tasks to be completed
const tasks = await Promise.all([
AssetFinder.tasks,
twitter(prod),
PublicFiles.tasks,
svg(prod),
favicon(prod),
]);
if (!tasks.length) return;
async function crankTasks () {
if (!tasks.length) return;
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
}
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
await pages.write(prod);
await Promise.all([
fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
pageWriter(pages, prod),
crankTasks(),
]);
};
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
ret.displayName = prod ? 'generateEverythingForProd' : 'generateEverything';
return ret;
};
exports.task = function (action, prod = false) {
let fn;
if (action === 'parse') {
fn = () => pages.parse();
} else if (action === 'pages') {
fn = () => pages.write(prod);
} else {
fn = async () => {
const tasks = await {
twitter,
favicon,
assets,
}[action](prod);
if (!tasks.length) return;
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks, cache);
await cache.save();
};
}
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
ret.displayName = prod ? action + 'ForProd' : action;
return ret;
};

View File

@@ -0,0 +1,89 @@
const { chunk, uniq, difference } = require('lodash');
const fs = require('fs-extra');
const { resolve } = require('./resolve');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const Twitter = require('twitter-lite');
module.exports = exports = async function tweets (pages) {
const [ twitter, twitterBackup, twitterCache ] = await Promise.all([
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
]);
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
}
tweetsNeeded = uniq(tweetsNeeded);
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
fs.writeFile(resolve('twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(resolve('twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(resolve('twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
/* Utility Functions **************************************************/
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}

View File

@@ -0,0 +1,56 @@
const path = require('path');
const Promise = require('bluebird');
const fs = require('fs-extra');
const getEngines = require('./engines');
const { resolve, ROOT } = require('./resolve');
const { siteInfo } = require(resolve('package.json'));
module.exports = exports = async function writePageContent (pages, prod) {
const engines = await getEngines(prod);
await Promise.map(pages, async (page) => {
// page = new Page(page);
var data = {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: resolve(page.cwd),
root: ROOT,
basename: page.basename,
},
pages,
};
const html = String(engines[page.engine](data.source, data));
const json = page.json && {
url: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
};
const output = resolve('dist', page.output);
await fs.ensureDir(path.dirname(output));
await Promise.all([
fs.writeFile(output, Buffer.from(html)),
json && fs.writeFile(resolve('dist', page.json), Buffer.from(
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)),
]);
}, { concurrency: 1 });
};

View File

@@ -3,74 +3,42 @@ const path = require('path');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const frontmatter = require('front-matter');
const File = require('./file');
const actions = require('./actions');
const { URL } = require('url');
const { pick, omit } = require('lodash');
const { resolve, readFile } = require('./resolve');
const { resolve, readFile, isCleanUrl, ENGINE } = require('./resolve');
const { isObject } = require('../lib/util');
const pkg = require(resolve('package.json'));
const frontmatter = require('front-matter');
/* Utility Functions **************************************************/
const MD = '.md';
const HBS = '.hbs';
const HTML = '.html';
const XML = '.xml';
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
const tweetidcheck = /^\d+$/;
function parseTweetId (tweetid) {
// we can't trust an id that isn't a string
if (typeof tweetid !== 'string') return false;
const match = tweetid.match(tweeturl);
if (match) return match[1];
if (tweetid.match(tweetidcheck)) return tweetid;
return false;
}
module.exports = exports = class Page {
module.exports = exports = class Page extends File {
constructor (filepath) {
if (filepath && typeof filepath === 'object') {
// we've been passed a json object, treat as serialized Page
Object.assign(this, filepath);
return this;
}
super(filepath);
const file = path.parse(filepath);
const { base: basename, name, ext } = file;
this.serializable.push(
'fullurl',
'engine',
'source',
'meta',
'images',
'titlecard',
'tweets',
'dateCreated',
'dateModified',
'classes',
'flags',
);
// this file is an include, skip it.
if (name[0] === '_') return false;
var isIndexPage = (this.name === 'index');
var isClean = isCleanUrl(this.ext);
// this is not a page file
if (![ MD, HBS, HTML, XML ].includes(ext)) return false;
// remove the pages root and any _images segment from the dir
const dir = file.dir.split('/');
if (dir[0] === 'pages') dir.shift();
const i = dir.indexOf('_images');
if (i > -1) dir.splice(i, 1);
this.input = filepath; // /local/path/to/pages/file.ext
this.cwd = file.dir; // /local/path/to/pages/, pages/folder, pages/folder/subfolder
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
this.name = name; // index, fileA, fileB
this.basename = basename; // index.ext, fileA.ext, fileB.ext
this.ext = file.ext;
var isIndexPage = (name === 'index');
var isCleanUrl = [ HBS, MD ].includes(ext);
if (isCleanUrl && isIndexPage) {
if (isClean && isIndexPage) {
this.output = path.join(this.base, 'index.html');
this.json = path.join(this.base, 'index.json');
this.url = this.dir;
} else if (isCleanUrl) {
} else if (isClean) {
this.output = path.join(this.base, this.name, 'index.html');
this.json = path.join(this.base, this.name + '.json');
this.url = path.join(this.dir, this.name);
@@ -88,23 +56,16 @@ module.exports = exports = class Page {
url.pathname = this.url;
this.fullurl = url.href;
if ([ HBS, HTML, XML ].includes(ext)) {
this.engine = 'hbs';
} else if (ext === MD) {
this.engine = 'md';
} else {
this.engine = 'raw';
}
this.engine = ENGINE[this.type] || ENGINE.COPY;
}
async load ({ Assets }) {
async load (PublicFiles) {
const [ raw, { ctime, mtime } ] = await Promise.all([
readFile(this.input).catch(() => null),
fs.stat(this.input).catch(() => ({})),
]);
const { titlecard, assets } = Assets.for(this.dir);
const { titlecard, assets } = PublicFiles.for(this.dir);
// empty file
if (!raw || !ctime) {
@@ -137,36 +98,27 @@ module.exports = exports = class Page {
return this;
}
toJson () {
const j = pick(this, [
'input',
'output',
'json',
'dateCreated',
'dateModified',
'cwd',
'base',
'dir',
'name',
'ext',
'basename',
'dest',
'out',
'url',
'fullurl',
'engine',
'source',
'images',
'assets',
'titlecard',
'tweets',
'classes',
'flags',
]);
tasks () {
if (!isObject(this.tweets)) return [];
j.meta = omit(this.meta, [ 'date', 'classes', 'tweets' ]);
return j;
return Object.values(this.tweets)
.map((t) => t.media)
.flat()
.map((m) => ({ ...m, action: actions.fetch, output: m.output }));
}
};
/* Utility Functions **************************************************/
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
const tweetidcheck = /^\d+$/;
function parseTweetId (tweetid) {
// we can't trust an id that isn't a string
if (typeof tweetid !== 'string') return false;
const match = tweetid.match(tweeturl);
if (match) return match[1];
if (tweetid.match(tweetidcheck)) return tweetid;
return false;
}

View File

@@ -1,161 +0,0 @@
const path = require('path');
const glob = require('../lib/glob');
const { chunk, uniq, difference } = require('lodash');
const Promise = require('bluebird');
const fs = require('fs-extra');
const log = require('fancy-log');
const tweetparse = require('../lib/tweetparse');
const getEngines = require('./renderers');
const Twitter = require('twitter-lite');
const Page = require('./page');
const createAssetFinder = require('./assets');
const { resolve, ROOT } = require('./resolve');
exports.parse = async function parsePageContent (assetFinder) {
const [ files, twitter, twitterBackup, twitterCache, Assets ] = await Promise.all([
glob('pages/**/*.{md,hbs,html,xml}', { cwd: ROOT }),
fs.readJson(resolve('twitter-config.json')).catch(() => null)
.then(getTwitterClient),
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
assetFinder || createAssetFinder(),
]);
let tweetsNeeded = [];
const tweetsPresent = Object.keys(twitterCache);
let pages = await Promise.map(files, async (filepath) => {
const page = new Page(filepath);
if (!page.input) return;
await page.load({ Assets });
if (page.tweets.length) {
const missing = difference(page.tweets, tweetsPresent);
tweetsNeeded.push(...missing);
}
return page;
});
pages = pages.filter(Boolean);
tweetsNeeded = uniq(tweetsNeeded);
/* Load Missing Tweets **************************************************/
if (tweetsNeeded.length) {
log('Fetching tweets: ' + tweetsNeeded.join(', '));
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
const loaded = [];
for (const tweet of arriving.flat(1)) {
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
twitterCache[tweet.id_str] = tweetparse(tweet);
loaded.push(tweet.id_str);
}
const absent = difference(tweetsNeeded, loaded);
for (const id of absent) {
if (twitterBackup[id]) {
log('Pulled tweet from backup ' + id);
twitterCache[id] = tweetparse(twitterBackup[id]);
continue;
}
log.error('Could not find tweet ' + id);
}
}
/* Apply Tweets to Pages **************************************************/
const twitterMedia = [];
// now loop through pages and substitute the tweet data for the ids
for (const page of pages) {
if (!page.tweets || !page.tweets.length) continue;
page.tweets = page.tweets.reduce((dict, tweetid) => {
const tweet = twitterCache[tweetid];
if (!tweet) {
log.error(`Tweet ${tweetid} is missing from the cache.`);
return dict;
}
dict[tweetid] = tweet;
twitterMedia.push( ...tweet.media );
return dict;
}, {});
}
await Promise.all([
fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
fs.writeFile(resolve('twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
fs.writeFile(resolve('twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
fs.writeFile(resolve('twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
]);
return pages;
};
exports.write = async function writePageContent (prod) {
const [ pages, { siteInfo }, engines ] = await Promise.all([
fs.readJson(resolve('pages.json')),
fs.readJson(resolve('package.json')),
getEngines(prod),
]);
await Promise.map(pages, async (page) => {
// page = new Page(page);
var data = {
...page,
meta: { ...page.meta, ...page },
page: {
domain: siteInfo.domain,
title: page.meta.title
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
: siteInfo.title,
description: page.meta.description || siteInfo.description,
},
site: siteInfo,
local: {
cwd: page.cwd,
root: ROOT,
basename: page.basename,
},
pages,
};
const html = String(engines[page.engine](data.source, data));
const json = page.json && {
url: page.fullurl,
title: page.meta.title,
subtitle: page.meta.subtitle,
description: page.meta.description,
tweets: page.tweets,
images: page.images,
dateCreated: page.dateCreated,
dateModified: page.dateModified,
titlecard: page.titlecard,
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
};
const output = resolve('dist', page.output);
await fs.ensureDir(path.dirname(output));
await Promise.all([
fs.writeFile(output, Buffer.from(html)),
json && fs.writeFile(resolve('dist', page.json), Buffer.from(
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
)),
]);
});
};
/* Utility Functions **************************************************/
function getTwitterClient (config) {
if (!config) return () => [];
const client = new Twitter(config);
return (tweetids) => client
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
.catch((e) => { log.error(e); return []; });
}

62
gulp/content/public.js Normal file
View File

@@ -0,0 +1,62 @@
const glob = require('../lib/glob');
const { groupBy, keyBy, filter, find, get, memoize } = require('lodash');
const { ROOT, kind, KIND } = require('./resolve');
const File = require('./file');
const Asset = require('./asset');
const Page = require('./page');
const Promise = require('bluebird');
const KIND_MAP = {
[KIND.PAGE]: Page,
[KIND.ASSET]: Asset,
[KIND.OTHER]: File,
};
module.exports = exports = async function loadPublicFiles () {
const files = await Promise.map(glob('public/**/*', { cwd: ROOT, nodir: true }), (filepath) => {
const k = kind(filepath);
const F = KIND_MAP[k];
const f = new F(filepath);
if (f.kind === KIND.PAGE && f.preprocessed) return false;
return f;
}).filter(Boolean);
const {
[KIND.PAGE]: pages,
[KIND.ASSET]: assets,
} = groupBy(files, 'kind');
function within (dir) {
const subset = filter(files, { dir });
const getTitlecard = memoize(() =>
get(find(files, { name: 'titlecard' }), [ 0, 'url' ]),
);
const {
[KIND.PAGE]: subpages,
[KIND.ASSET]: subassets,
} = groupBy(subset, 'kind');
return {
all: subset,
get titlecard () { return getTitlecard; },
get pages () {
return subpages;
},
get assets () {
return keyBy(subassets, 'name');
},
};
}
return {
all: files,
pages,
assets,
for: memoize(within),
get tasks () {
return files.map((a) => a.tasks()).flat(1);
},
};
};

View File

@@ -2,6 +2,129 @@
const path = require('path');
const ROOT = path.resolve(__dirname, '../..');
const fs = require('fs-extra');
const { is: _is, re } = require('../lib/util');
function is (...args) {
const fn = _is(...args);
const ret = (ext) => fn(normalizedExt(ext));
ret.matching = args;
return ret;
}
function dictMatch (dict, def) {
const arr = Object.entries(dict);
return (tok) => {
for (const [ key, fn ] of arr) {
// console.log({ key, tok, r: fn(tok), matching: fn.matching })
if (fn(tok)) return key;
}
return def;
};
}
const EXT = exports.EXT = {
JPG: '.jpg',
JPEG: '.jpeg',
PNG: '.png',
GIF: '.gif',
MP4: '.mp4',
M4V: '.m4v',
MD: '.md',
HBS: '.hbs',
HTML: '.html',
XML: '.xml',
};
const {
JPG,
JPEG,
PNG,
GIF,
MP4,
M4V,
MD,
HBS,
HTML,
XML,
} = EXT;
exports.RE = {
JPG: re(/.jpg$/),
JPEG: re(/.jpeg$/),
PNG: re(/.png$/),
GIF: re(/.gif$/),
MP4: re(/.mp4$/),
M4V: re(/.m4v$/),
MD: re(/.md$/),
HBS: re(/.hbs$/),
HTML: re(/.html$/),
XML: re(/.xml$/),
};
const NORMALIZE_EXT = {
[JPG]: JPEG,
[M4V]: MP4,
[HBS]: HTML,
};
const normalizedExt = exports.normalizedExt = (ext) => {
if (ext[0] !== '.') ext = '.' + ext.split('.').pop();
return NORMALIZE_EXT[ext] || ext;
};
const isVideo = exports.isVideo = is(MP4, M4V);
const isImage = exports.isImage = is(JPG, JPEG, PNG, GIF);
const isHandlebars = exports.isHandlebars = is(XML, HBS, HTML);
const isMarkdown = exports.isMarkdown = is(MD);
const isPage = exports.isPage = is(isHandlebars, isMarkdown);
const isAsset = exports.isAsset = is(isImage, isVideo);
exports.isCleanUrl = is(HBS, HTML, MD);
const TYPE = exports.TYPE = {
IMAGE: 'IMAGE',
VIDEO: 'VIDEO',
HANDLEBARS: 'HANDLEBARS',
MARKDOWN: 'MARKDOWN',
OTHER: 'OTHER',
};
exports.type = dictMatch({
[TYPE.IMAGE]: isImage,
[TYPE.HANDLEBARS]: isHandlebars,
[TYPE.MARKDOWN]: isMarkdown,
[TYPE.VIDEO]: isVideo,
}, TYPE.OTHER);
const KIND = exports.KIND = {
PAGE: 'PAGE',
ASSET: 'ASSET',
OTHER: 'OTHER',
};
exports.kind = dictMatch({
[KIND.ASSET]: isAsset,
[KIND.PAGE]: isPage,
}, KIND.OTHER);
const ENGINE = exports.ENGINE = {
HANDLEBARS: 'HANDLEBARS',
MARKDOWN: 'MARKDOWN',
COPY: 'COPY',
};
exports.engine = dictMatch({
[ENGINE.HANDLEBARS]: is(XML, HBS, HTML),
[ENGINE.MARKDOWN]: is(MD),
}, ENGINE.COPY);
exports.readFile = function readFile (fpath) {
fpath = exports.resolve(fpath);
@@ -15,7 +138,7 @@ exports.resolve = function resolve (...args) {
let fpath = args.shift();
if (!fpath) return ROOT;
if (fpath[0] === '/') throw new Error('Did you mean to resolve this? ' + fpath);
if (fpath[0] === '/') fpath = fpath.slice(1);
// if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
};

15
gulp/content/svg.js Normal file
View File

@@ -0,0 +1,15 @@
const glob = require('../lib/glob');
const { ROOT } = require('./resolve');
const actions = require('./actions');
module.exports = exports = async function svgIcons () {
const files = await glob('svg/**/*.svg', { cwd: ROOT });
const tasks = files.map((f) => ({
input: f,
output: 'images/' + f,
action: actions.copy,
}));
return tasks;
};