mirror of
https://github.com/GenderDysphoria/GenderDysphoria.fyi.git
synced 2025-11-25 20:42:40 +00:00
Rewrote gulp/content
This commit is contained in:
84
gulp/content/files.js
Normal file
84
gulp/content/files.js
Normal file
@@ -0,0 +1,84 @@
|
||||
const path = require('path');
|
||||
const glob = require('../lib/glob');
|
||||
const memoize = require('memoizepromise');
|
||||
const getDimensions = require('../lib/dimensions');
|
||||
const { keyBy } = require('lodash');
|
||||
|
||||
const RESOLUTIONS = [ 2048, 1024, 768, 576, 300, 100 ];
|
||||
|
||||
module.exports = exports = function () {
|
||||
return memoize(async (cwd, siteDir) => {
|
||||
const imageFiles = (await glob('{*,_images/*}.{jpeg,jpg,png,gif,mp4}', { cwd }));
|
||||
|
||||
const images = (await Promise.all(imageFiles.map(async (imgpath) => {
|
||||
|
||||
const ext = path.extname(imgpath);
|
||||
let basename = path.basename(imgpath, ext);
|
||||
|
||||
if (basename === 'titlecard') return;
|
||||
|
||||
if (ext === '.mp4') {
|
||||
return {
|
||||
name: basename,
|
||||
type: 'movie',
|
||||
full: path.join(siteDir, `${basename}${ext}`),
|
||||
};
|
||||
}
|
||||
|
||||
const dimensions = await getDimensions(path.resolve(cwd, imgpath));
|
||||
const { width, height } = dimensions;
|
||||
dimensions.ratioH = Math.round((height / width) * 100);
|
||||
dimensions.ratioW = Math.round((width / height) * 100);
|
||||
if (dimensions.ratioH > 100) {
|
||||
dimensions.orientation = 'tall';
|
||||
} else if (dimensions.ratioH === 100) {
|
||||
dimensions.orientation = 'square';
|
||||
} else {
|
||||
dimensions.orientation = 'wide';
|
||||
}
|
||||
|
||||
if (basename[0] === '_') {
|
||||
basename = basename.slice(1);
|
||||
}
|
||||
|
||||
const filetype = {
|
||||
'.jpeg': 'jpeg',
|
||||
'.jpg': 'jpeg',
|
||||
'.png': 'png',
|
||||
'.gif': 'gif',
|
||||
}[ext];
|
||||
|
||||
const sizes = [
|
||||
{
|
||||
url: path.join(siteDir, `${basename}.${filetype}`),
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
},
|
||||
];
|
||||
|
||||
for (const w of RESOLUTIONS) {
|
||||
if (w > dimensions.width) continue;
|
||||
sizes.push({
|
||||
url: path.join(siteDir, `${basename}.${w}w.${filetype}`),
|
||||
width: w,
|
||||
height: Math.ceil((w / dimensions.width) * dimensions.height),
|
||||
});
|
||||
}
|
||||
|
||||
sizes.reverse();
|
||||
|
||||
return {
|
||||
name: basename,
|
||||
type: 'image',
|
||||
sizes,
|
||||
};
|
||||
}))).filter(Boolean);
|
||||
|
||||
const titlecard = (await glob('titlecard.{jpeg,jpg,png,gif}', { cwd }))[0];
|
||||
|
||||
return {
|
||||
images: keyBy(images, 'name'),
|
||||
titlecard: titlecard ? path.join(siteDir, titlecard) : '/images/titlecard.png',
|
||||
};
|
||||
});
|
||||
};
|
||||
305
gulp/content/index.js
Normal file
305
gulp/content/index.js
Normal file
@@ -0,0 +1,305 @@
|
||||
const path = require('path');
|
||||
const glob = require('../lib/glob');
|
||||
const { chunk, uniq, difference } = require('lodash');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const tweetparse = require('../lib/tweetparse');
|
||||
const getEngines = require('./renderers');
|
||||
const Twitter = require('twitter-lite');
|
||||
const frontmatter = require('front-matter');
|
||||
const createFileLoader = require('./files');
|
||||
const { URL } = require('url');
|
||||
|
||||
const ROOT = path.resolve(__dirname, '../..');
|
||||
|
||||
exports.parse = async function parsePageContent () {
|
||||
const [ files, twitter, twitterBackup, twitterCache, { siteInfo } ] = await Promise.all([
|
||||
glob('pages/**/*.{md,hbs,html,xml}', { cwd: ROOT }),
|
||||
fs.readJson(resolve('twitter-config.json')).catch(() => null)
|
||||
.then(getTwitterClient),
|
||||
fs.readJson(resolve('twitter-backup.json')).catch(() => {}),
|
||||
fs.readJson(resolve('twitter-cache.json')).catch(() => {}),
|
||||
fs.readJson(resolve('package.json')).catch(() => ({})),
|
||||
]);
|
||||
const loadFiles = createFileLoader();
|
||||
|
||||
const tweetsNeeded = [];
|
||||
|
||||
let pages = await Promise.map(files, async (filepath) => {
|
||||
const { dir, name, ext } = path.parse(filepath);
|
||||
const basename = path.basename(filepath);
|
||||
|
||||
// this is an include, skip it.
|
||||
if (name[0] === '_') return;
|
||||
|
||||
const cwd = resolve(dir);
|
||||
const input = resolve(filepath);
|
||||
const outDir = path.join('dist', dir.slice(6));
|
||||
const siteDir = `/${dir.slice(6)}`;
|
||||
|
||||
// if cwd === ROOT then we're in the bottom directory and there is no base
|
||||
const base = path.relative(cwd, ROOT) && path.basename(dir);
|
||||
|
||||
/* Load Page Content **************************************************/
|
||||
const [ raw, { ctime, mtime }, { images, titlecard } ] = await Promise.all([
|
||||
fs.readFile(input).catch(() => null),
|
||||
stat(input),
|
||||
loadFiles(cwd, siteDir),
|
||||
]);
|
||||
|
||||
// empty file
|
||||
if (!raw) return;
|
||||
|
||||
try {
|
||||
var { attributes: meta, body } = frontmatter(raw.toString('utf8'));
|
||||
} catch (e) {
|
||||
log.error('Error while parsing frontmatter for ' + filepath, e);
|
||||
return;
|
||||
}
|
||||
|
||||
// page is marked to be ignored, skip it.
|
||||
if (meta.ignore) return;
|
||||
|
||||
meta.path = filepath;
|
||||
meta.cwd = cwd;
|
||||
meta.base = base;
|
||||
meta.outDir = outDir;
|
||||
meta.input = input;
|
||||
meta.source = body;
|
||||
meta.dateCreated = meta.date && new Date(meta.date) || ctime;
|
||||
meta.dateModified = mtime;
|
||||
meta.siteDir = siteDir;
|
||||
meta.name = name;
|
||||
meta.ext = ext;
|
||||
meta.titlecard = titlecard;
|
||||
meta.images = images;
|
||||
|
||||
var flags = new Set(meta.classes || []);
|
||||
var isIndexPage = meta.isIndex = (name === 'index');
|
||||
var isRootPage = meta.isRoot = (siteDir === '/');
|
||||
var isCleanUrl = meta.isCleanUrl = [ '.hbs', '.md' ].includes(ext);
|
||||
|
||||
if ([ '.hbs', '.html', '.xml' ].includes(ext)) {
|
||||
meta.engine = 'hbs';
|
||||
} else if (ext === '.md') {
|
||||
meta.engine = 'md';
|
||||
} else {
|
||||
meta.engine = 'raw';
|
||||
}
|
||||
|
||||
flags.add(titlecard ? 'has-titlecard' : 'no-titlecard');
|
||||
flags.add(meta.title ? 'has-title' : 'no-title');
|
||||
flags.add(meta.subtitle ? 'has-subtitle' : 'no-subtitle');
|
||||
flags.add(meta.description ? 'has-descrip' : 'no-descrip');
|
||||
|
||||
let slug, output, jsonOutput;
|
||||
if (isRootPage) {
|
||||
if (isCleanUrl) {
|
||||
slug = '';
|
||||
output = resolve(outDir, name, 'index.html');
|
||||
jsonOutput = resolve(outDir, name + '.json');
|
||||
} else {
|
||||
slug = '';
|
||||
output = resolve(outDir, basename);
|
||||
jsonOutput = resolve(outDir, basename + '.json');
|
||||
}
|
||||
} else if (isCleanUrl) {
|
||||
slug = name;
|
||||
if (isIndexPage) {
|
||||
output = resolve(outDir, 'index.html');
|
||||
} else {
|
||||
output = resolve(outDir, name, 'index.html');
|
||||
}
|
||||
jsonOutput = resolve(outDir, name + '.json');
|
||||
} else {
|
||||
slug = base;
|
||||
output = resolve(outDir, basename);
|
||||
jsonOutput = resolve(outDir, basename + '.json');
|
||||
}
|
||||
meta.slug = slug;
|
||||
meta.output = output;
|
||||
meta.json = jsonOutput;
|
||||
|
||||
const url = new URL(siteInfo.siteUrl);
|
||||
if ([ '.hbs', '.md' ].includes(ext)) {
|
||||
url.pathname = path.join(siteDir, slug);
|
||||
} else if (isIndexPage) {
|
||||
url.pathname = siteDir;
|
||||
} else {
|
||||
url.pathname = path.join(siteDir, path.basename(filepath));
|
||||
}
|
||||
meta.url = url.pathname;
|
||||
meta.fullurl = url.toString();
|
||||
|
||||
|
||||
/* Process Tweets **************************************************/
|
||||
|
||||
const tweets = [];
|
||||
|
||||
if (meta.tweet) {
|
||||
meta.tweet = [ meta.tweet ].flat(1).map(parseTweetId);
|
||||
tweets.push(...meta.tweet);
|
||||
}
|
||||
|
||||
if (meta.tweets) {
|
||||
meta.tweets = meta.tweets.map(parseTweetId);
|
||||
tweets.push(...meta.tweets);
|
||||
}
|
||||
|
||||
for (const id of tweets) {
|
||||
if (!twitterCache[id]) {
|
||||
tweetsNeeded.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
meta.tweets = tweets;
|
||||
|
||||
flags.add(tweets.length ? 'has-tweets' : 'no-tweets');
|
||||
|
||||
/* Process Flags **************************************************/
|
||||
|
||||
meta.classes = Array.from(flags);
|
||||
meta.flags = meta.classes.reduce((res, item) => {
|
||||
var camelCased = item.replace(/-([a-z])/g, (g) => g[1].toUpperCase());
|
||||
res[camelCased] = true;
|
||||
return res;
|
||||
}, {});
|
||||
|
||||
return meta;
|
||||
});
|
||||
|
||||
pages = pages.filter(Boolean);
|
||||
|
||||
/* Load Missing Tweets **************************************************/
|
||||
|
||||
if (tweetsNeeded.length) {
|
||||
log('Fetching tweets: ' + tweetsNeeded.join(', '));
|
||||
const arriving = await Promise.all(chunk(uniq(tweetsNeeded), 99).map(twitter));
|
||||
|
||||
const loaded = [];
|
||||
for (const tweet of arriving.flat(1)) {
|
||||
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
|
||||
twitterCache[tweet.id_str] = tweetparse(tweet);
|
||||
loaded.push(tweet.id_str);
|
||||
}
|
||||
|
||||
const absent = difference(tweetsNeeded, loaded);
|
||||
for (const id of absent) {
|
||||
if (twitterBackup[id]) {
|
||||
log('Pulled tweet from backup ' + id);
|
||||
twitterCache[id] = tweetparse(twitterBackup[id]);
|
||||
continue;
|
||||
}
|
||||
log.error('Could not find tweet ' + id);
|
||||
}
|
||||
}
|
||||
|
||||
/* Apply Tweets to Pages **************************************************/
|
||||
|
||||
const twitterMedia = [];
|
||||
|
||||
// now loop through pages and substitute the tweet data for the ids
|
||||
for (const page of pages) {
|
||||
if (!page.tweets || !page.tweets.length) continue;
|
||||
|
||||
page.tweets = page.tweets.reduce((dict, tweetid) => {
|
||||
const tweet = twitterCache[tweetid];
|
||||
if (!tweet) {
|
||||
log.error(`Tweet ${tweetid} is missing from the cache.`);
|
||||
return dict;
|
||||
}
|
||||
dict[tweetid] = tweet;
|
||||
twitterMedia.push( ...tweet.media );
|
||||
return dict;
|
||||
}, {});
|
||||
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
fs.writeFile(path.join(ROOT, 'pages.json'), JSON.stringify(pages, null, 2)),
|
||||
fs.writeFile(path.join(ROOT, 'twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
|
||||
fs.writeFile(path.join(ROOT, 'twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
|
||||
fs.writeFile(path.join(ROOT, 'twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
|
||||
]);
|
||||
|
||||
return pages;
|
||||
};
|
||||
|
||||
exports.write = async function writePageContent ({ prod }) {
|
||||
const [ pages, { siteInfo }, engines ] = await Promise.all([
|
||||
fs.readJson(resolve('pages.json')),
|
||||
fs.readJson(resolve('package.json')),
|
||||
getEngines(prod),
|
||||
]);
|
||||
|
||||
await Promise.map(pages, async (page) => {
|
||||
var data = {
|
||||
...page,
|
||||
meta: page,
|
||||
page: {
|
||||
domain: siteInfo.domain,
|
||||
title: page.title
|
||||
? (page.title + (page.subtitle ? ', ' + page.subtitle : '') + ' :: ' + siteInfo.title)
|
||||
: siteInfo.title,
|
||||
},
|
||||
local: {
|
||||
cwd: page.cwd,
|
||||
root: ROOT,
|
||||
basename: path.basename(page.input),
|
||||
},
|
||||
pages,
|
||||
};
|
||||
|
||||
const html = engines[page.engine](data.source, data).toString();
|
||||
const json = page.json && {
|
||||
url: page.fullurl,
|
||||
title: page.title,
|
||||
subtitle: page.subtitle,
|
||||
description: page.description,
|
||||
tweets: page.tweets,
|
||||
images: page.images,
|
||||
dateCreated: page.dateCreated,
|
||||
dateModified: page.dateModified,
|
||||
titlecard: page.titlecard,
|
||||
};
|
||||
|
||||
await fs.ensureDir(path.dirname(page.output));
|
||||
await Promise.all([
|
||||
fs.writeFile(page.output, Buffer.from(html)),
|
||||
json && fs.writeFile(page.json, Buffer.from(prod ? JSON.stringify(json) : JSON.stringify(json, null, 2))),
|
||||
]);
|
||||
});
|
||||
};
|
||||
|
||||
exports.write.prod = function writePageContentForProduction () { return exports.write({ prod: true }); };
|
||||
|
||||
|
||||
/* Utility Functions **************************************************/
|
||||
|
||||
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
|
||||
const tweetidcheck = /^\d+$/;
|
||||
function parseTweetId (tweetid) {
|
||||
// we can't trust an id that isn't a string
|
||||
if (typeof tweetid !== 'string') return false;
|
||||
|
||||
const match = tweetid.match(tweeturl);
|
||||
if (match) return match[1];
|
||||
if (tweetid.match(tweetidcheck)) return tweetid;
|
||||
return false;
|
||||
}
|
||||
|
||||
function resolve (fpath, ...args) {
|
||||
if (fpath[0] === '/') fpath = fpath.slice(1);
|
||||
return path.resolve(ROOT, fpath, ...args);
|
||||
}
|
||||
|
||||
function getTwitterClient (config) {
|
||||
if (!config) return () => [];
|
||||
const client = new Twitter(config);
|
||||
return (tweetids) => client
|
||||
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
|
||||
.catch((e) => { log.error(e); return []; });
|
||||
}
|
||||
|
||||
const stat = (f) => fs.stat(f).catch(() => undefined);
|
||||
254
gulp/content/renderers.js
Normal file
254
gulp/content/renderers.js
Normal file
@@ -0,0 +1,254 @@
|
||||
|
||||
const path = require('path');
|
||||
const ROOT = path.resolve(__dirname, '../..');
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const { minify } = require('html-minifier-terser');
|
||||
|
||||
const handlebars = require('handlebars');
|
||||
const HandlebarsKit = require('hbs-kit');
|
||||
HandlebarsKit.load(handlebars);
|
||||
|
||||
const slugs = require('slugify');
|
||||
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
|
||||
const striptags = require('string-strip-html');
|
||||
|
||||
const markdownIt = require('markdown-it');
|
||||
|
||||
|
||||
|
||||
const markdownEngines = {
|
||||
full: markdownIt({
|
||||
html: true,
|
||||
linkify: true,
|
||||
typographer: true,
|
||||
})
|
||||
.enable('image')
|
||||
.use(require('markdown-it-anchor'), {
|
||||
permalink: true,
|
||||
permalinkClass: 'header-link',
|
||||
permalinkSymbol: '<img src="/images/svg/paragraph.svg">',
|
||||
slugify,
|
||||
})
|
||||
.use(require('../lib/markdown-raw-html')),
|
||||
|
||||
preview: markdownIt({
|
||||
html: false,
|
||||
linkify: false,
|
||||
typographer: true,
|
||||
})
|
||||
.use(require('../lib/markdown-token-filter')),
|
||||
};
|
||||
|
||||
function markdown (mode, input, env) {
|
||||
input = input.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => {
|
||||
try {
|
||||
const result = handlebars.compile(contents)(env);
|
||||
return '|||' + result + '|||';
|
||||
} catch (e) {
|
||||
log.error(e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
if (mode === 'preview') {
|
||||
input = striptags(input
|
||||
.replace(/<!--\[[\s\S]*?\]-->/g, '')
|
||||
.replace(/|||[\s\S]*?|||/gi, ''),
|
||||
).trim();
|
||||
if (input.length > 1000) input = input.slice(0, 1000) + '…';
|
||||
input = input ? markdownEngines[mode].render(input) : '';
|
||||
} else {
|
||||
input = input.replace(/<!--[[\]]-->/g, '');
|
||||
}
|
||||
|
||||
return input ? markdownEngines[mode].render(input, env) : '';
|
||||
}
|
||||
|
||||
function stripIndent (input) {
|
||||
const match = input.match(/^[^\S\n]*(?=\S)/gm);
|
||||
const indent = match && Math.min(...match.map((el) => el.length));
|
||||
|
||||
if (indent) {
|
||||
const regexp = new RegExp(`^.{${indent}}`, 'gm');
|
||||
input = input.replace(regexp, '');
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
const MINIFY_CONFIG = {
|
||||
conservativeCollapse: true,
|
||||
collapseWhitespace: true,
|
||||
minifyCSS: true,
|
||||
removeComments: true,
|
||||
removeRedundantAttributes: true,
|
||||
};
|
||||
|
||||
const HANDLEBARS_PARTIALS = {
|
||||
layout: 'templates/layout.hbs',
|
||||
};
|
||||
|
||||
module.exports = exports = async function (prod) {
|
||||
for (const [ name, file ] of Object.entries(HANDLEBARS_PARTIALS)) {
|
||||
try {
|
||||
const contents = await fs.readFile(path.resolve(ROOT, file));
|
||||
const template = handlebars.compile(contents.toString('utf8'));
|
||||
handlebars.registerPartial(name, template);
|
||||
} catch (e) {
|
||||
log.error('Could not execute load partial ' + path.relative(ROOT, file), e);
|
||||
}
|
||||
}
|
||||
|
||||
const pageTemplateRaw = await fs.readFile(path.join(ROOT, 'templates/post.hbs'));
|
||||
if (!pageTemplateRaw) throw new Error('Post template was empty?');
|
||||
try {
|
||||
var pageTemplate = handlebars.compile(pageTemplateRaw.toString('utf8'));
|
||||
} catch (e) {
|
||||
log.error('Crash while loading post template', e);
|
||||
}
|
||||
|
||||
const revManifest = prod && await fs.readJson(path.join(ROOT, 'rev-manifest.json')).catch(() => {}).then((r) => r || {});
|
||||
|
||||
const helpers = new Injectables(prod, revManifest);
|
||||
handlebars.registerHelper('import', helpers.import());
|
||||
handlebars.registerHelper('markdown', helpers.markdown());
|
||||
handlebars.registerHelper('icon', helpers.icon());
|
||||
handlebars.registerHelper('prod', helpers.production());
|
||||
handlebars.registerHelper('rev', helpers.rev());
|
||||
|
||||
const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input);
|
||||
|
||||
const result = {
|
||||
hbs: (source, env) => {
|
||||
const template = handlebars.compile(source);
|
||||
return shrink(template(env));
|
||||
},
|
||||
md: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
|
||||
raw: (source) => shrink(source),
|
||||
preview: (source, env) => markdown('preview', source, env),
|
||||
};
|
||||
|
||||
// result.handlebars.engine = handlebars;
|
||||
// result.markdown.engine = markdownEngines.full;
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
class Injectables {
|
||||
|
||||
constructor (prod, revManifest) {
|
||||
this.prod = prod;
|
||||
this.revManifest = revManifest;
|
||||
this.injections = {};
|
||||
}
|
||||
|
||||
_parsePath (tpath, local, type) {
|
||||
if (tpath[0] === '/') tpath = path.join(local.root, tpath);
|
||||
else if (tpath[0] === '~') tpath = path.join(local.root, 'templates', tpath.slice(2));
|
||||
else tpath = path.resolve(local.cwd, tpath);
|
||||
if (type && !tpath.endsWith(type)) tpath += '.' + type;
|
||||
return tpath;
|
||||
}
|
||||
|
||||
_template (tpath, make) {
|
||||
if (this.injections[tpath]) return this.injections[tpath];
|
||||
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Injectable does not exist: ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
let contents;
|
||||
try {
|
||||
contents = fs.readFileSync(tpath).toString('utf8');
|
||||
if (make) contents = make(contents);
|
||||
this.injections[tpath] = contents;
|
||||
return contents;
|
||||
} catch (e) {
|
||||
log.error(e, 'An error occured while loading the injectable: ' + path.relative(ROOT, tpath));
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
rev () {
|
||||
const self = this;
|
||||
return function (url) {
|
||||
if (!url) return '';
|
||||
if (url[0] === '/') url = url.substr(1);
|
||||
if (self.prod && self.revManifest[url]) return '/' + self.revManifest[url];
|
||||
return '/' + url;
|
||||
};
|
||||
}
|
||||
|
||||
production () {
|
||||
const self = this;
|
||||
return function (options) {
|
||||
if (!options.fn) return self.prod;
|
||||
return self.prod ? options.fn(this) : options.inverse(this);
|
||||
};
|
||||
}
|
||||
|
||||
markdown () {
|
||||
const self = this;
|
||||
return function (...args) {
|
||||
const { fn } = args.pop();
|
||||
let contents;
|
||||
|
||||
if (fn) {
|
||||
contents = stripIndent(fn(this));
|
||||
} else {
|
||||
let tpath = args.shift();
|
||||
tpath = self._parsePath(tpath, this.local, 'md');
|
||||
|
||||
contents = self._template(tpath);
|
||||
}
|
||||
|
||||
contents = markdown('full', contents, this);
|
||||
|
||||
return new handlebars.SafeString(contents);
|
||||
};
|
||||
}
|
||||
|
||||
import () {
|
||||
const self = this;
|
||||
return function (tpath, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const value = args.shift();
|
||||
const context = handlebars.createFrame(value || this);
|
||||
Object.assign(context, hash || {});
|
||||
|
||||
tpath = self._parsePath(tpath, this.local, 'hbs');
|
||||
|
||||
try {
|
||||
const contents = self._template(tpath, handlebars.compile)(context);
|
||||
return new handlebars.SafeString(contents);
|
||||
} catch (e) {
|
||||
log.error('Could not execute import template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
icon () {
|
||||
const self = this;
|
||||
return function (name, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const tpath = path.join(this.local.root, 'svg', name + '.svg');
|
||||
|
||||
try {
|
||||
const contents = self._template(tpath, (s) =>
|
||||
handlebars.compile(`<span class="svg-icon" {{#if size}}style="width:{{size}}px;height:{{size}}px"{{/if}}>${s}</span>`),
|
||||
)({ size: hash && hash.size });
|
||||
|
||||
return new handlebars.SafeString(contents);
|
||||
} catch (e) {
|
||||
log.error('Could not execute import template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
579
gulp/contents.js
579
gulp/contents.js
@@ -1,579 +0,0 @@
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const { chunk, uniq, keyBy, difference, omit } = require('lodash');
|
||||
const log = require('fancy-log');
|
||||
const glob = require('./lib/glob');
|
||||
const getDimensions = require('./lib/dimensions');
|
||||
const memoize = require('memoizepromise');
|
||||
const { URL } = require('url');
|
||||
const { minify: htmlMinify } = require('html-minifier-terser');
|
||||
|
||||
const { src, dest } = require('gulp');
|
||||
const frontmatter = require('gulp-front-matter');
|
||||
const collect = require('gulp-collect');
|
||||
|
||||
const asyncthrough = require('./lib/through');
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist';
|
||||
|
||||
const { siteInfo } = require('../package.json');
|
||||
|
||||
const markdown = require('markdown-it');
|
||||
const striptags = require('string-strip-html');
|
||||
const tweetparse = require('./lib/tweetparse');
|
||||
|
||||
const slugs = require('slugify');
|
||||
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
|
||||
|
||||
const handlebars = require('handlebars');
|
||||
const HandlebarsKit = require('hbs-kit');
|
||||
HandlebarsKit.load(handlebars);
|
||||
|
||||
const md = markdown({
|
||||
html: true,
|
||||
linkify: true,
|
||||
typographer: true,
|
||||
}).enable('image')
|
||||
.use(require('markdown-it-anchor'), {
|
||||
permalink: true,
|
||||
permalinkClass: 'header-link',
|
||||
permalinkSymbol: '<img src="/images/svg/paragraph.svg">',
|
||||
slugify,
|
||||
})
|
||||
.use(require('./lib/markdown-raw-html'))
|
||||
;
|
||||
|
||||
const mdPreview = markdown({
|
||||
html: false,
|
||||
linkify: false,
|
||||
typographer: true,
|
||||
})
|
||||
.use(require('./lib/markdown-token-filter'))
|
||||
;
|
||||
|
||||
let twitterClient;
|
||||
const Twitter = require('twitter-lite');
|
||||
try {
|
||||
twitterClient = new Twitter(require('../twitter.json'));
|
||||
} catch (e) {
|
||||
twitterClient = null;
|
||||
}
|
||||
|
||||
function twitter (tweetids) {
|
||||
if (!twitterClient) return [];
|
||||
return twitterClient.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
|
||||
.catch((e) => { log.error(e); return []; });
|
||||
}
|
||||
|
||||
|
||||
async function reloadLayouts () {
|
||||
const layouts = {
|
||||
layout: 'templates/layout.hbs.html',
|
||||
};
|
||||
|
||||
let pending = Object.entries(layouts)
|
||||
.map(async ([ name, file ]) =>
|
||||
[ name, (await fs.readFile(path.resolve(ROOT, file))).toString('utf8') ],
|
||||
);
|
||||
|
||||
pending = await Promise.all(pending);
|
||||
|
||||
pending.forEach(([ name, file ]) => handlebars.registerPartial(name, handlebars.compile(file)));
|
||||
|
||||
const injections = {};
|
||||
handlebars.registerHelper('inject', function (tpath, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const context = handlebars.createFrame(args[0] || this);
|
||||
Object.assign(context, hash || {});
|
||||
|
||||
if (tpath[0] === '/') tpath = path.join(this.local.root, tpath);
|
||||
else if (tpath[0] === '~') tpath = path.join(this.local.root, 'templates', tpath.slice(2));
|
||||
else tpath = path.resolve(this.local.cwd, tpath);
|
||||
tpath += '.hbs';
|
||||
|
||||
if (!injections[tpath]) {
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Template does not exist for injection ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
injections[tpath] = handlebars.compile(fs.readFileSync(tpath).toString('utf8'));
|
||||
} catch (e) {
|
||||
log.error('Could not load injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return new handlebars.SafeString(injections[tpath](context));
|
||||
} catch (e) {
|
||||
log.error('Could not execute injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
handlebars.registerHelper('icon', function (name, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const tpath = path.join(this.local.root, 'svg', name + '.svg');
|
||||
|
||||
if (!injections[tpath]) {
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Template does not exist for injection ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
const svg = fs.readFileSync(tpath).toString('utf8');
|
||||
injections[tpath] = handlebars.compile(`<span class="svg-icon" {{#if size}}style="width:{{size}}px;height:{{size}}px"{{/if}}>${svg}</span>`);
|
||||
} catch (e) {
|
||||
log.error('Could not load injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return new handlebars.SafeString(injections[tpath]({ size: hash && hash.size }));
|
||||
} catch (e) {
|
||||
log.error('Could not execute injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
handlebars.registerHelper('markdown', function (...args) {
|
||||
const { fn } = args.pop();
|
||||
let original;
|
||||
|
||||
if (fn) {
|
||||
original = fn(this);
|
||||
|
||||
const match = original.match(/^[^\S\n]*(?=\S)/gm);
|
||||
const indent = match && Math.min(...match.map((el) => el.length));
|
||||
|
||||
if (indent) {
|
||||
const regexp = new RegExp(`^.{${indent}}`, 'gm');
|
||||
original = original.replace(regexp, '');
|
||||
}
|
||||
|
||||
} else {
|
||||
let tpath = args.shift();
|
||||
if (!tpath) throw new Error('No content was provided for the Markdown helper');
|
||||
if (tpath[0] === '/') tpath = path.join(this.local.root, tpath);
|
||||
else tpath = path.resolve(this.local.cwd, tpath);
|
||||
tpath += '.md';
|
||||
|
||||
if (!injections[tpath]) {
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Markdown does not exist for injection ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
original = fs.readFileSync(tpath).toString('utf8');
|
||||
injections[tpath] = original;
|
||||
} catch (e) {
|
||||
log.error('Could not load markdown file ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
original = md.render(original);
|
||||
|
||||
return new handlebars.SafeString(original);
|
||||
});
|
||||
}
|
||||
|
||||
exports.loadLayout = async function loadLayout () {
|
||||
await reloadLayouts();
|
||||
handlebars.registerHelper('rev', (url) => {
|
||||
if (!url) return '';
|
||||
if (url[0] === '/') url = url.substr(1);
|
||||
return '/' + url;
|
||||
});
|
||||
handlebars.registerHelper('prod', function (options) {
|
||||
if (!options.inverse) return false;
|
||||
return options.inverse(this);
|
||||
});
|
||||
};
|
||||
|
||||
exports.loadLayout.prod = async function loadLayoutForProd () {
|
||||
const manifest = await fs.readJson(path.join(ROOT, 'rev-manifest.json')).catch(() => {}).then((r) => r || {});
|
||||
|
||||
await reloadLayouts();
|
||||
|
||||
handlebars.registerHelper('rev', (url) => {
|
||||
if (!url) return '';
|
||||
if (url[0] === '/') url = url.substr(1);
|
||||
if (manifest[url]) return '/' + manifest[url];
|
||||
return '/' + url;
|
||||
});
|
||||
handlebars.registerHelper('prod', function (options) {
|
||||
if (!options.fn) return true;
|
||||
return options.fn(this);
|
||||
});
|
||||
};
|
||||
|
||||
exports.pages = function buildPages ({ minify }) {
|
||||
var postTemplate = handlebars.compile(String(fs.readFileSync(path.join(ROOT, '/templates/post.hbs.html'))));
|
||||
const minifyConfig = {
|
||||
conservativeCollapse: true,
|
||||
collapseWhitespace: true,
|
||||
minifyCSS: true,
|
||||
removeComments: true,
|
||||
removeRedundantAttributes: true,
|
||||
};
|
||||
|
||||
return src([ 'pages/**/*.{md,html,xml}', '!pages/**/_*.{md,html}' ])
|
||||
.pipe(frontmatter({
|
||||
property: 'meta',
|
||||
}))
|
||||
.pipe(parseMeta())
|
||||
.pipe(parseTweets())
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
const cwd = path.dirname(file.path);
|
||||
let original = file.contents.toString('utf8').trim();
|
||||
|
||||
var data = {
|
||||
...file.meta,
|
||||
meta: file.meta,
|
||||
page: {
|
||||
domain: siteInfo.domain,
|
||||
title: file.meta.title
|
||||
? (file.meta.title + (file.meta.subtitle ? ', ' + file.meta.subtitle : '') + ' :: ' + siteInfo.title)
|
||||
: siteInfo.title,
|
||||
},
|
||||
local: {
|
||||
cwd,
|
||||
root: ROOT,
|
||||
basename: file.basename,
|
||||
},
|
||||
};
|
||||
|
||||
if ([ '.html', '.md' ].includes(file.extname)) {
|
||||
const datajs = file.clone();
|
||||
datajs.contents = Buffer.from(JSON.stringify(omit(file.meta, [ 'destination' ]), null, 2));
|
||||
datajs.basename = path.basename(file.path, file.extname) + '.json';
|
||||
stream.push(datajs);
|
||||
}
|
||||
|
||||
if ([ '.html', '.xml' ].includes(file.extname)) {
|
||||
// is a handlebars template
|
||||
try {
|
||||
const template = handlebars.compile(original);
|
||||
let html = template(data);
|
||||
if (minify) {
|
||||
html = htmlMinify(html, minifyConfig);
|
||||
}
|
||||
file.contents = Buffer.from(html);
|
||||
stream.push(file);
|
||||
} catch (err) {
|
||||
log.error('Encountered a crash while compiling ' + file.path, err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
original = original.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => {
|
||||
try {
|
||||
const result = handlebars.compile(contents)(data);
|
||||
return '|||' + result + '|||';
|
||||
} catch (e) {
|
||||
log.error(e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
if (file.extname === '.md') {
|
||||
|
||||
let contents, preview;
|
||||
try {
|
||||
contents = md.render(original.replace(/<!--[[\]]-->/g, '')).trim();
|
||||
data.contents = contents;
|
||||
|
||||
preview = striptags(original
|
||||
.replace(/<!--\[[\s\S]*?\]-->/g, '')
|
||||
.replace(/|||[\s\S]*?|||/gi, ''),
|
||||
).trim();
|
||||
if (preview.length > 1000) preview = preview.slice(0, 1000) + '…';
|
||||
preview = preview ? mdPreview.render(preview) : '';
|
||||
|
||||
data.preview = preview;
|
||||
} catch (e) {
|
||||
log.error(`Error while rendering ${file.path}`, e);
|
||||
contents = preview = '';
|
||||
}
|
||||
|
||||
if (preview) {
|
||||
file.flags.add('has-preview');
|
||||
if (preview.length < 400) file.flags.add('short-preview');
|
||||
} else {
|
||||
file.flags.add('no-preview');
|
||||
}
|
||||
|
||||
const classes = Array.from(file.flags);
|
||||
const flags = classes.reduce((res, item) => {
|
||||
var camelCased = item.replace(/-([a-z])/g, (g) => g[1].toUpperCase());
|
||||
res[camelCased] = true;
|
||||
return res;
|
||||
}, {});
|
||||
|
||||
data.classes = data.meta.classes = classes;
|
||||
data.flags = data.meta.flags = flags;
|
||||
|
||||
file.path = file.meta.destination;
|
||||
|
||||
// is a markdown file
|
||||
try {
|
||||
let html = postTemplate(data);
|
||||
if (minify) {
|
||||
html = htmlMinify(html, minifyConfig);
|
||||
}
|
||||
file.contents = Buffer.from(html);
|
||||
stream.push(file);
|
||||
} catch (err) {
|
||||
log.error(`Error while rendering html for ${file.path}`, err);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
}))
|
||||
.pipe(dest(DEST));
|
||||
};
|
||||
|
||||
exports.pages.prod = function buildPagesProd () { return exports.pages({ minify: true }); };
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
|
||||
function parseMeta () {
|
||||
const getFileData = memoize(async (cwd, siteCwd) => {
|
||||
const imageFiles = (await glob('{*,_images/*}.{jpeg,jpg,png,gif,mp4}', { cwd }));
|
||||
|
||||
const images = (await Promise.all(imageFiles.map(async (imgpath) => {
|
||||
|
||||
const ext = path.extname(imgpath);
|
||||
let basename = path.basename(imgpath, ext);
|
||||
|
||||
if (basename === 'titlecard') return;
|
||||
|
||||
if (ext === '.mp4') {
|
||||
return {
|
||||
name: basename,
|
||||
type: 'movie',
|
||||
full: path.join(siteCwd, `${basename}${ext}`),
|
||||
};
|
||||
}
|
||||
|
||||
const dimensions = await getDimensions(path.resolve(cwd, imgpath));
|
||||
const { width, height } = dimensions;
|
||||
dimensions.ratioH = Math.round((height / width) * 100);
|
||||
dimensions.ratioW = Math.round((width / height) * 100);
|
||||
if (dimensions.ratioH > 100) {
|
||||
dimensions.orientation = 'tall';
|
||||
} else if (dimensions.ratioH === 100) {
|
||||
dimensions.orientation = 'square';
|
||||
} else {
|
||||
dimensions.orientation = 'wide';
|
||||
}
|
||||
|
||||
if (basename[0] === '_') {
|
||||
basename = basename.slice(1);
|
||||
}
|
||||
|
||||
const filetype = {
|
||||
'.jpeg': 'jpeg',
|
||||
'.jpg': 'jpeg',
|
||||
'.png': 'png',
|
||||
'.gif': 'gif',
|
||||
}[ext];
|
||||
|
||||
const sizes = [
|
||||
{
|
||||
url: path.join(siteCwd, `${basename}.${filetype}`),
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
},
|
||||
];
|
||||
|
||||
for (const w of [ 2048, 1024, 768, 576, 300, 100 ]) {
|
||||
if (w > dimensions.width) continue;
|
||||
sizes.push({
|
||||
url: path.join(siteCwd, `${basename}.${w}w.${filetype}`),
|
||||
width: w,
|
||||
height: Math.ceil((w / dimensions.width) * dimensions.height),
|
||||
});
|
||||
}
|
||||
|
||||
sizes.reverse();
|
||||
|
||||
return {
|
||||
name: basename,
|
||||
type: 'image',
|
||||
sizes,
|
||||
};
|
||||
}))).filter(Boolean);
|
||||
|
||||
const titlecard = (await glob('titlecard.{jpeg,jpg,png,gif}', { cwd }))[0];
|
||||
|
||||
return {
|
||||
images: keyBy(images, 'name'),
|
||||
titlecard: titlecard ? path.join(siteCwd, titlecard) : false,
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
return asyncthrough(async (stream, file) => {
|
||||
if (!file || (file.meta && file.meta.ignore)) return;
|
||||
|
||||
if (!file.meta) file.meta = {};
|
||||
|
||||
// if metadata has a date value, us it.
|
||||
// otherwise use creation date
|
||||
var date = new Date(file.meta.date);
|
||||
if (!date) date = file.stat.ctime;
|
||||
file.meta.data = date;
|
||||
|
||||
var cwd = path.dirname(file.path);
|
||||
var siteCwd = file.meta.cwd = '/' + path.relative(path.join(ROOT, 'pages'), cwd);
|
||||
var base = file.meta.base = path.basename(file.path, file.extname);
|
||||
|
||||
var flags = file.flags = new Set(file.meta.classes || []);
|
||||
var isIndexPage = file.meta.isIndex = (base === 'index');
|
||||
var isRootPage = file.meta.isRoot = (file.meta.cwd === '/');
|
||||
|
||||
if (isRootPage && isIndexPage) {
|
||||
file.meta.slug = '';
|
||||
file.meta.destination = path.join(path.dirname(file.path), 'index.html');
|
||||
} else if (isRootPage || !isIndexPage) {
|
||||
file.meta.slug = base;
|
||||
file.meta.destination = path.join(path.dirname(file.path), base, 'index.html');
|
||||
} else if (!isRootPage && isIndexPage) {
|
||||
file.meta.slug = '';
|
||||
file.meta.destination = path.join(path.dirname(file.path), 'index.html');
|
||||
} else {
|
||||
file.meta.slug = path.basename(cwd);
|
||||
file.meta.destination = path.join(path.dirname(file.path), 'index.html');
|
||||
}
|
||||
|
||||
const url = new URL(siteInfo.rss.site_url);
|
||||
file.meta.url = url.pathname = path.join(siteCwd, file.meta.slug);
|
||||
file.meta.fullurl = url.toString();
|
||||
// file.meta.originalpath = path.relative(file.cwd, file.path);
|
||||
|
||||
const { images, titlecard } = await getFileData(cwd, siteCwd);
|
||||
|
||||
file.meta.images = images;
|
||||
file.meta.titlecard = titlecard;
|
||||
|
||||
flags.add(titlecard ? 'has-titlecard' : 'no-titlecard');
|
||||
|
||||
if (file.meta['no-title']) {
|
||||
flags.add('hide-title');
|
||||
} else if (file.meta.title || file.meta.description) {
|
||||
flags.add('show-title');
|
||||
} else {
|
||||
flags.add('hide-title');
|
||||
}
|
||||
|
||||
flags.add(file.meta.title ? 'has-title' : 'no-title');
|
||||
flags.add(file.meta.subtitle ? 'has-subtitle' : 'no-subtitle');
|
||||
flags.add(file.meta.description ? 'has-descrip' : 'no-descrip');
|
||||
|
||||
stream.push(file);
|
||||
});
|
||||
}
|
||||
|
||||
function parseTweets () {
|
||||
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
|
||||
const tweetidcheck = /^\d+$/;
|
||||
function parseTweetId (tweetid) {
|
||||
// we can't trust an id that isn't a string
|
||||
if (typeof tweetid !== 'string') return false;
|
||||
|
||||
const match = tweetid.match(tweeturl);
|
||||
if (match) return match[1];
|
||||
if (tweetid.match(tweetidcheck)) return tweetid;
|
||||
return false;
|
||||
}
|
||||
|
||||
return collect.list(async (files) => {
|
||||
const twitterBackup = (await fs.readJson(path.join(ROOT, 'twitter-backup.json')).catch(() => {})) || {};
|
||||
const twitterCache = (await fs.readJson(path.join(ROOT, 'twitter-cache.json')).catch(() => {})) || {};
|
||||
const needed = [];
|
||||
|
||||
// first loop through all posts and gather + validate all tweet ids
|
||||
for (const file of files) {
|
||||
if (!file.meta.tweets && !file.meta.tweet) continue;
|
||||
|
||||
const tweets = [];
|
||||
|
||||
if (file.meta.tweet) {
|
||||
file.meta.tweet = [ file.meta.tweet ].flat(1).map(parseTweetId);
|
||||
tweets.push(...file.meta.tweet);
|
||||
}
|
||||
|
||||
if (file.meta.tweets) {
|
||||
file.meta.tweets = file.meta.tweets.map(parseTweetId);
|
||||
tweets.push(...file.meta.tweets);
|
||||
}
|
||||
|
||||
for (const id of tweets) {
|
||||
if (!twitterCache[id]) {
|
||||
needed.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
file.meta.tweets = tweets;
|
||||
}
|
||||
|
||||
// if we have tweets we need to add to the cache, do so
|
||||
if (needed.length) {
|
||||
log('Fetching tweets: ' + needed.join(', '));
|
||||
const arriving = await Promise.all(chunk(uniq(needed), 99).map(twitter));
|
||||
|
||||
const loaded = [];
|
||||
for (const tweet of arriving.flat(1)) {
|
||||
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
|
||||
twitterCache[tweet.id_str] = tweetparse(tweet);
|
||||
loaded.push(tweet.id_str);
|
||||
}
|
||||
|
||||
const absent = difference(needed, loaded);
|
||||
for (const id of absent) {
|
||||
if (twitterBackup[id]) {
|
||||
log('Pulled tweet from backup ' + id);
|
||||
twitterCache[id] = tweetparse(twitterBackup[id]);
|
||||
continue;
|
||||
}
|
||||
log.error('Could not find tweet ' + id);
|
||||
}
|
||||
}
|
||||
|
||||
const media = [];
|
||||
|
||||
// now loop through posts and substitute the tweet data for the ids
|
||||
for (const file of files) {
|
||||
if (!file.meta.tweets) continue;
|
||||
|
||||
file.meta.tweets = file.meta.tweets.reduce((dict, tweetid) => {
|
||||
const tweet = twitterCache[tweetid];
|
||||
if (!tweet) log.error(`Tweet ${tweetid} is missing from the cache.`);
|
||||
dict[tweetid] = tweet;
|
||||
media.push( ...tweet.media );
|
||||
return dict;
|
||||
}, {});
|
||||
|
||||
}
|
||||
|
||||
await fs.writeFile(path.join(ROOT, 'twitter-media.json'), JSON.stringify(media, null, 2));
|
||||
await fs.writeFile(path.join(ROOT, 'twitter-cache.json'), JSON.stringify(twitterCache, null, 2));
|
||||
await fs.writeFile(path.join(ROOT, 'twitter-backup.json'), JSON.stringify(twitterBackup, null, 2));
|
||||
|
||||
return files;
|
||||
});
|
||||
}
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
@@ -3,10 +3,10 @@ const { series, parallel, watch } = require('gulp');
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
var { loadLayout, pages } = require('./contents');
|
||||
var contentTask = series( loadLayout, pages );
|
||||
exports.pages = series( loadLayout, pages );
|
||||
exports.content = contentTask;
|
||||
var content = require('./content');
|
||||
exports.parse = content.parse;
|
||||
exports.pages = content.write;
|
||||
exports.content = series(content.parse, content.write);
|
||||
|
||||
var images = require('./imgflow');
|
||||
exports.twimages = images.twitter;
|
||||
@@ -40,17 +40,15 @@ exports.cloudfront = cloudfront;
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
exports.new = require('./new');
|
||||
|
||||
var buildTask = series(
|
||||
var prodBuildTask = series(
|
||||
images.prod,
|
||||
images.favicon.prod,
|
||||
scssTask.prod,
|
||||
jsTask.prod,
|
||||
filesTask.prod,
|
||||
loadLayout.prod,
|
||||
pages.prod,
|
||||
content.parse,
|
||||
images.twitter.prod,
|
||||
content.write.prod,
|
||||
);
|
||||
|
||||
var devBuildTask = series(
|
||||
@@ -60,17 +58,17 @@ var devBuildTask = series(
|
||||
scssTask,
|
||||
jsTask,
|
||||
filesTask,
|
||||
content.parse,
|
||||
),
|
||||
loadLayout,
|
||||
pages,
|
||||
content.write,
|
||||
images.twitter,
|
||||
);
|
||||
|
||||
exports.dev = devBuildTask;
|
||||
exports.prod = buildTask;
|
||||
exports.prod = prodBuildTask;
|
||||
exports.publish = series(
|
||||
cleanTask,
|
||||
buildTask,
|
||||
prodBuildTask,
|
||||
pushToProd,
|
||||
cloudfront.prod,
|
||||
);
|
||||
@@ -83,7 +81,7 @@ function watcher () {
|
||||
watch([
|
||||
'pages/**/*.{md,hbs,html}',
|
||||
'templates/*.{md,hbs,html}',
|
||||
], series(contentTask, images.twitter));
|
||||
], series(content.parse, images.twitter, content.write));
|
||||
|
||||
watch('page/**/*.{jpeg,jpg,png,gif}', images);
|
||||
|
||||
@@ -105,8 +103,8 @@ function server () {
|
||||
|
||||
}
|
||||
|
||||
exports.watch = series(contentTask, watcher);
|
||||
exports.uat = series(cleanTask, buildTask, server);
|
||||
exports.watch = series(series(content.parse, images.twitter, content.write), watcher);
|
||||
exports.uat = series(cleanTask, prodBuildTask, server);
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
|
||||
Reference in New Issue
Block a user