forked from github.com/GenderDysphoria.fyi
Moved new build process out of gulp folder
This commit is contained in:
162
build/actions.js
Normal file
162
build/actions.js
Normal file
@@ -0,0 +1,162 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const gm = require('gm');
|
||||
const Promise = require('bluebird');
|
||||
const fetch = require('make-fetch-happen');
|
||||
const ico = require('png-to-ico');
|
||||
const { resolve, readFile } = require('./resolve');
|
||||
|
||||
const actions = {
|
||||
async copy ({ input, output }) {
|
||||
await fs.copy(resolve(input), resolve(output));
|
||||
return readFile(input);
|
||||
},
|
||||
|
||||
async transcode ({ input, output }) {
|
||||
const result = await actions.image({
|
||||
input,
|
||||
output,
|
||||
format: 'jpeg',
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
async fetch ({ input, output }) {
|
||||
const res = await fetch(input);
|
||||
if (res.status !== 200) {
|
||||
throw new Error(`File could not be fetched (${res.status}): "${input}"`);
|
||||
}
|
||||
const body = await res.buffer();
|
||||
output = resolve(output);
|
||||
await fs.ensureDir(path.dirname(output));
|
||||
await fs.writeFile(output, body);
|
||||
return body;
|
||||
},
|
||||
|
||||
async write ({ output, content }) {
|
||||
output = resolve(output);
|
||||
await fs.ensureDir(path.dirname(output));
|
||||
await fs.writeFile(output, content);
|
||||
return Buffer.from(content);
|
||||
},
|
||||
|
||||
async image (options) {
|
||||
const output = resolve(options.output);
|
||||
const contents = await readFile(options.input);
|
||||
let gmfile = gm(contents, resolve(options.input));
|
||||
|
||||
const size = await Promise.fromCallback((cb) => gmfile.size(cb));
|
||||
|
||||
if (options.height || options.width) {
|
||||
|
||||
// if upscale is not requested, restrict size
|
||||
if (!options.upscale) {
|
||||
if (!isNaN(options.width)) {
|
||||
options.width = Math.min(options.width, size.width);
|
||||
}
|
||||
if (!isNaN(options.height)) {
|
||||
options.height = Math.min(options.height, size.height);
|
||||
}
|
||||
}
|
||||
|
||||
// if one dimension is not set - we fill it proportionally
|
||||
if (!options.height) {
|
||||
if (options.crop) {
|
||||
options.height = size.height;
|
||||
} else {
|
||||
options.height = Math.ceil((options.width / size.width) * size.height);
|
||||
}
|
||||
}
|
||||
if (!options.width) {
|
||||
if (options.crop) {
|
||||
options.width = size.width;
|
||||
} else {
|
||||
options.width = Math.ceil((options.height / size.height) * size.width);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.fill === 'crop') {
|
||||
if (size.height < options.height || size.width < options.width) {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '^')
|
||||
.borderColor(options.bgColor || '#FFFFFF')
|
||||
.border(options.width, options.height)
|
||||
.gravity(options.gravity)
|
||||
.crop(options.width, options.height);
|
||||
} else {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '^')
|
||||
.gravity(options.gravity)
|
||||
.crop(options.width, options.height);
|
||||
}
|
||||
} else if (options.fill === 'cover') {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '^');
|
||||
} else if (options.fill === 'box') {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height)
|
||||
.borderColor(options.bgColor || '#FFFFFF')
|
||||
.border(options.width, options.height)
|
||||
.gravity(options.gravity)
|
||||
.crop(options.width, options.height);
|
||||
} else if (options.fill === 'contain') {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height);
|
||||
} else {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '!');
|
||||
}
|
||||
|
||||
} else if (options.percentage) {
|
||||
gmfile = gmfile
|
||||
.geometry(options.percentage, null, '%');
|
||||
}
|
||||
|
||||
if (options.format) {
|
||||
gmfile = gmfile
|
||||
.setFormat(options.format === 'ico' ? 'png' : options.format);
|
||||
}
|
||||
|
||||
if (options.quality) {
|
||||
gmfile = gmfile.quality(Math.floor(options.quality));
|
||||
} else {
|
||||
gmfile = gmfile.quality(Math.floor(95));
|
||||
}
|
||||
|
||||
|
||||
if (options.samplingFactor) {
|
||||
gmfile = gmfile
|
||||
.samplingFactor(options.samplingFactor[0], options.samplingFactor[1]);
|
||||
}
|
||||
|
||||
if (options.sharpen) {
|
||||
options.sharpen = (typeof options.sharpen === 'string') ? options.sharpen : '1.5x1+0.7+0.02';
|
||||
gmfile = gmfile.unsharp(options.sharpen);
|
||||
}
|
||||
|
||||
if (options.flatten) {
|
||||
gmfile = gmfile.flatten();
|
||||
}
|
||||
|
||||
if (options.interlace) {
|
||||
gmfile = gmfile.interlace('Line');
|
||||
}
|
||||
|
||||
if (options.background) {
|
||||
gmfile = gmfile.background(options.background);
|
||||
}
|
||||
|
||||
if (options.noProfile) {
|
||||
gmfile = gmfile.noProfile();
|
||||
}
|
||||
|
||||
await fs.ensureDir(path.dirname(output));
|
||||
let result = await Promise.fromCallback((cb) => gmfile.toBuffer(cb));
|
||||
if (options.format === 'ico') result = await ico(result);
|
||||
await fs.writeFile(output, result);
|
||||
|
||||
return result;
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = exports = actions;
|
||||
135
build/asset.js
Normal file
135
build/asset.js
Normal file
@@ -0,0 +1,135 @@
|
||||
|
||||
const path = require('path');
|
||||
const { pick } = require('lodash');
|
||||
const actions = require('./actions');
|
||||
const File = require('./file');
|
||||
const { TYPE } = require('./resolve');
|
||||
const getImageDimensions = require('./lib/dimensions');
|
||||
const getVideoDimensions = require('get-video-dimensions');
|
||||
|
||||
const RESOLUTIONS = [ 2048, 1024, 768, 576, 300, 100 ];
|
||||
|
||||
module.exports = exports = class Asset extends File {
|
||||
|
||||
constructor (filepath) {
|
||||
super(filepath);
|
||||
|
||||
this.serializable.push(
|
||||
'dimensions',
|
||||
'sizes',
|
||||
);
|
||||
}
|
||||
|
||||
load () {
|
||||
switch (this.type) {
|
||||
case TYPE.VIDEO: return this.loadVideo();
|
||||
case TYPE.IMAGE: return this.loadImage();
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
async loadImage () {
|
||||
|
||||
const { width, height } = await getImageDimensions(this.input);
|
||||
|
||||
const ratioH = Math.round((height / width) * 100);
|
||||
const ratioW = Math.round((width / height) * 100);
|
||||
let orientation = 'wide';
|
||||
if (ratioH > 100) {
|
||||
orientation = 'tall';
|
||||
} else if (ratioH === 100) {
|
||||
orientation = 'square';
|
||||
}
|
||||
|
||||
this.dimensions = {
|
||||
width,
|
||||
height,
|
||||
ratioH,
|
||||
ratioW,
|
||||
orientation,
|
||||
};
|
||||
|
||||
if (this.preprocessed) {
|
||||
this.sizes = [ {
|
||||
output: this.out,
|
||||
url: this.url,
|
||||
width,
|
||||
height,
|
||||
} ];
|
||||
} else {
|
||||
this.sizes = [
|
||||
{
|
||||
output: this.out,
|
||||
url: this.url,
|
||||
width,
|
||||
height,
|
||||
},
|
||||
];
|
||||
|
||||
for (const w of RESOLUTIONS) {
|
||||
if (w > width) continue;
|
||||
const name = `${this.name}.${w}w${this.ext}`;
|
||||
this.sizes.push({
|
||||
output: path.join(this.base, name),
|
||||
url: path.join(this.dir, name),
|
||||
width: w,
|
||||
height: Math.ceil((w / width) * height),
|
||||
});
|
||||
}
|
||||
|
||||
this.sizes.reverse();
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
async loadVideo () {
|
||||
const { width, height } = await getVideoDimensions(this.input);
|
||||
|
||||
const ratioH = Math.round((height / width) * 100);
|
||||
const ratioW = Math.round((width / height) * 100);
|
||||
let orientation = 'wide';
|
||||
if (ratioH > 100) {
|
||||
orientation = 'tall';
|
||||
} else if (ratioH === 100) {
|
||||
orientation = 'square';
|
||||
}
|
||||
|
||||
this.dimensions = {
|
||||
width,
|
||||
height,
|
||||
ratioH,
|
||||
ratioW,
|
||||
orientation,
|
||||
};
|
||||
|
||||
this.sizes = [ {
|
||||
output: path.join(this.base, this.basename),
|
||||
url: path.join(this.dir, this.basename),
|
||||
width,
|
||||
height,
|
||||
} ];
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
webready () {
|
||||
const { type, name, sizes } = this;
|
||||
return {
|
||||
type,
|
||||
name,
|
||||
sizes: sizes.map((s) => pick(s, [ 'url', 'width', 'height' ])),
|
||||
};
|
||||
}
|
||||
|
||||
tasks () {
|
||||
return this.sizes.map(({ output, width }) => ({
|
||||
input: this.input,
|
||||
output,
|
||||
format: this.preprocessed ? undefined : this.ext.slice(1),
|
||||
width: this.preprocessed ? undefined : width,
|
||||
action: this.preprocessed ? actions.copy : actions.image,
|
||||
}));
|
||||
}
|
||||
|
||||
};
|
||||
225
build/cache.js
Normal file
225
build/cache.js
Normal file
@@ -0,0 +1,225 @@
|
||||
const path = require('path');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
const { memoize: memoizeSync } = require('lodash');
|
||||
const memoizeAsync = require('memoizepromise');
|
||||
const { resolve, readFile } = require('./resolve');
|
||||
const { hasOwn, isFunction } = require('./lib/util');
|
||||
const revHash = require('rev-hash');
|
||||
const revPath = require('rev-path');
|
||||
|
||||
const CACHE = 'if-cache';
|
||||
const MANIFEST = 'if-cache.json';
|
||||
const REV_MANIFEST = 'rev-manifest.json';
|
||||
|
||||
module.exports = exports = class Manifest {
|
||||
|
||||
constructor ({ time = true, inputRev = true, prod = false, writeCount = 100, writeInterval = 10000 }) {
|
||||
this.compareBy = { time, inputRev };
|
||||
this.manifest = {};
|
||||
this.rev = memoizeSync(revHash);
|
||||
this.stat = memoizeAsync((f) =>
|
||||
fs.stat(resolve(f))
|
||||
.catch(() => null)
|
||||
.then((stats) => (stats && Math.floor(stats.mtimeMs / 1000))),
|
||||
);
|
||||
this.revFile = memoizeAsync((f) =>
|
||||
readFile(f)
|
||||
.then(revHash)
|
||||
.catch(() => null),
|
||||
);
|
||||
|
||||
this.isProd = prod;
|
||||
this.writeCounter = 0;
|
||||
this.lastWriteTime = 0;
|
||||
this.writeCountThreshold = writeCount;
|
||||
this.writeTimeThreshold = writeInterval;
|
||||
this.revManifest = {};
|
||||
}
|
||||
|
||||
async load () {
|
||||
const [ manifest ] = await Promise.all([
|
||||
fs.readJson(resolve(MANIFEST)).catch(() => ({})),
|
||||
fs.ensureDir(resolve(CACHE)),
|
||||
]);
|
||||
|
||||
this.manifest = manifest;
|
||||
}
|
||||
|
||||
hash ({ action, input, output, ...task }) {
|
||||
if (!isFunction(action)) throw new Error('Task action is not a task action (function).');
|
||||
|
||||
const name = action.name;
|
||||
const hash = [
|
||||
name,
|
||||
this.rev(input),
|
||||
this.rev(output),
|
||||
];
|
||||
|
||||
// if this is an image operation, include the format and width in the hash
|
||||
if (name === 'image') hash.splice(1, 0, [ task.width, task.format ]);
|
||||
|
||||
return hash.join('.');
|
||||
}
|
||||
|
||||
has (task) {
|
||||
const hash = this.hash(task);
|
||||
return hasOwn(this.manifest, hash);
|
||||
}
|
||||
|
||||
async get (task) {
|
||||
const hash = this.hash(task);
|
||||
const { input, output } = task;
|
||||
const ext = path.extname(task.output);
|
||||
const local = !task.input.includes('://');
|
||||
const cached = path.join(CACHE, hash + ext);
|
||||
const result = {
|
||||
iTime: 0,
|
||||
iRev: null,
|
||||
oRev: null,
|
||||
...this.manifest[hash],
|
||||
hash,
|
||||
action: task.action.name,
|
||||
input,
|
||||
output,
|
||||
mode: 'new',
|
||||
};
|
||||
|
||||
if (task.nocache) {
|
||||
result.mode = 'silent';
|
||||
return result;
|
||||
}
|
||||
|
||||
const [ iTime, oTime, cTime, iRev ] = await Promise.all([
|
||||
local && this.stat(input),
|
||||
this.stat(output),
|
||||
this.stat(cached),
|
||||
local && this.compareBy.inputRev && this.revFile(input),
|
||||
]);
|
||||
|
||||
if (local && !iTime) throw new Error('Input file does not exist: ' + input);
|
||||
|
||||
|
||||
result.outputExists = !!oTime;
|
||||
|
||||
if (oTime) {
|
||||
// output exists, we can move on
|
||||
result.mode = 'skip';
|
||||
return result;
|
||||
}
|
||||
|
||||
if (this.compareBy.time && iTime <= result.iTime) {
|
||||
result.inputDiffers = false;
|
||||
} else if (local && this.compareBy.inputRev && iRev !== result.iRev) {
|
||||
// either we aren't checking time, or the time has changed
|
||||
// check if the contents changed
|
||||
|
||||
result.inputDiffers = true;
|
||||
result.iRev = iRev;
|
||||
result.mode = 'update';
|
||||
return result;
|
||||
}
|
||||
|
||||
if (!cTime || cTime < iTime) {
|
||||
// output does not exist in the cache or the cached file predates input, we need to remake.
|
||||
result.inputDiffers = true;
|
||||
result.oRev = null;
|
||||
result.mode = 'rebuild';
|
||||
return result;
|
||||
}
|
||||
|
||||
result.mode = 'cached';
|
||||
result.cache = await readFile(cached);
|
||||
return result;
|
||||
}
|
||||
|
||||
async touch (task, lastSeen = new Date()) {
|
||||
|
||||
if (task.nocache || task.action.name) return null;
|
||||
|
||||
const hash = this.hash(task);
|
||||
const { input, output } = task;
|
||||
const local = !task.input.includes('://');
|
||||
|
||||
const [ iTime, iRev ] = await Promise.all([
|
||||
local && this.stat(input),
|
||||
local && this.compareBy.inputRev && this.revFile(input),
|
||||
]);
|
||||
|
||||
const record = {
|
||||
...this.manifest[hash],
|
||||
action: task.action.name,
|
||||
hash,
|
||||
input,
|
||||
iTime,
|
||||
iRev,
|
||||
output,
|
||||
oTime: Math.floor(lastSeen / 1000),
|
||||
lastSeen,
|
||||
};
|
||||
|
||||
if (record.revPath) this.revManifest[output] = record.revPath;
|
||||
this.manifest[hash] = record;
|
||||
await this.writeManifest();
|
||||
return { ...record };
|
||||
}
|
||||
|
||||
async set (task, result, lastSeen = new Date()) {
|
||||
const hash = this.hash(task);
|
||||
const { input, output } = task;
|
||||
const nocache = task.nocache || task.action.name === 'copy';
|
||||
const ext = path.extname(task.output);
|
||||
const local = !task.input.includes('://');
|
||||
const cached = path.join(CACHE, hash + ext);
|
||||
const oRev = revHash(result);
|
||||
|
||||
const [ iTime, iRev ] = await Promise.all([
|
||||
local && this.stat(input),
|
||||
local && this.compareBy.inputRev && this.revFile(input),
|
||||
result && !nocache && fs.writeFile(resolve(cached), result),
|
||||
]);
|
||||
|
||||
const record = {
|
||||
action: task.action.name,
|
||||
hash,
|
||||
input,
|
||||
iTime,
|
||||
iRev,
|
||||
output,
|
||||
oTime: Math.floor(lastSeen / 1000),
|
||||
oRev,
|
||||
lastSeen,
|
||||
revPath: revPath(output, oRev),
|
||||
};
|
||||
|
||||
this.revManifest[output] = record.revPath;
|
||||
if (!nocache) {
|
||||
this.manifest[hash] = record;
|
||||
await this.writeManifest();
|
||||
}
|
||||
return { ...record };
|
||||
}
|
||||
|
||||
|
||||
async writeManifest (force) {
|
||||
if (!force && this.isProd) return; // disable interim writes during prod builds.
|
||||
if (!force && ++this.writeCounter % this.writeCountThreshold) return;
|
||||
const now = Date.now();
|
||||
if (!force && now - this.lastWriteTime < this.writeTimeThreshold) return;
|
||||
this.lastWriteTime = now;
|
||||
await fs.writeFile(resolve(MANIFEST), JSON.stringify(this.manifest, null, 2));
|
||||
}
|
||||
|
||||
|
||||
async save () {
|
||||
const revManifest = this.isProd && await fs.readJson(resolve(REV_MANIFEST))
|
||||
.catch(() => ({}))
|
||||
.then((old) => ({ ...old, ...this.revManifest }));
|
||||
|
||||
await Promise.all([
|
||||
revManifest && fs.writeFile(resolve(REV_MANIFEST), JSON.stringify(revManifest, null, 2)),
|
||||
this.writeManifest(true),
|
||||
]);
|
||||
}
|
||||
|
||||
};
|
||||
251
build/engines.js
Normal file
251
build/engines.js
Normal file
@@ -0,0 +1,251 @@
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const { minify } = require('html-minifier-terser');
|
||||
const { resolve, readFile, ENGINE } = require('./resolve');
|
||||
|
||||
const handlebars = require('handlebars');
|
||||
const HandlebarsKit = require('hbs-kit');
|
||||
HandlebarsKit.load(handlebars);
|
||||
|
||||
const slugs = require('slugify');
|
||||
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
|
||||
const striptags = require('string-strip-html');
|
||||
|
||||
const markdownIt = require('markdown-it');
|
||||
|
||||
|
||||
|
||||
const markdownEngines = {
|
||||
full: markdownIt({
|
||||
html: true,
|
||||
linkify: true,
|
||||
typographer: true,
|
||||
})
|
||||
.enable('image')
|
||||
.use(require('markdown-it-anchor'), {
|
||||
permalink: true,
|
||||
permalinkClass: 'header-link',
|
||||
permalinkSymbol: '<img src="/images/svg/paragraph.svg">',
|
||||
slugify,
|
||||
})
|
||||
.use(require('./lib/markdown-raw-html')),
|
||||
|
||||
preview: markdownIt({
|
||||
html: false,
|
||||
linkify: false,
|
||||
typographer: true,
|
||||
})
|
||||
.use(require('./lib/markdown-token-filter')),
|
||||
};
|
||||
|
||||
function markdown (mode, input, env) {
|
||||
input = input.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => {
|
||||
try {
|
||||
const result = handlebars.compile(contents)(env);
|
||||
return '|||' + result + '|||';
|
||||
} catch (e) {
|
||||
log.error(e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
if (mode === 'preview') {
|
||||
input = striptags(input
|
||||
.replace(/<!--\[[\s\S]*?\]-->/g, '')
|
||||
.replace(/|||[\s\S]*?|||/gi, ''),
|
||||
).trim();
|
||||
if (input.length > 1000) input = input.slice(0, 1000) + '…';
|
||||
input = input ? markdownEngines[mode].render(input) : '';
|
||||
} else {
|
||||
input = input.replace(/<!--[[\]]-->/g, '');
|
||||
}
|
||||
|
||||
return input ? markdownEngines[mode].render(input, env) : '';
|
||||
}
|
||||
|
||||
function stripIndent (input) {
|
||||
const match = input.match(/^[^\S\n]*(?=\S)/gm);
|
||||
const indent = match && Math.min(...match.map((el) => el.length));
|
||||
|
||||
if (indent) {
|
||||
const regexp = new RegExp(`^.{${indent}}`, 'gm');
|
||||
input = input.replace(regexp, '');
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
const MINIFY_CONFIG = {
|
||||
conservativeCollapse: true,
|
||||
collapseWhitespace: true,
|
||||
minifyCSS: true,
|
||||
removeComments: true,
|
||||
removeRedundantAttributes: true,
|
||||
};
|
||||
|
||||
const HANDLEBARS_PARTIALS = {
|
||||
layout: 'templates/layout.hbs',
|
||||
};
|
||||
|
||||
module.exports = exports = async function (prod) {
|
||||
for (const [ name, file ] of Object.entries(HANDLEBARS_PARTIALS)) {
|
||||
try {
|
||||
const contents = await readFile(file);
|
||||
const template = handlebars.compile(contents.toString('utf8'));
|
||||
handlebars.registerPartial(name, template);
|
||||
} catch (e) {
|
||||
log.error('Could not execute load partial ' + file, e);
|
||||
}
|
||||
}
|
||||
|
||||
const pageTemplateRaw = await readFile('templates/post.hbs');
|
||||
if (!pageTemplateRaw) throw new Error('Post template was empty?');
|
||||
try {
|
||||
var pageTemplate = handlebars.compile(pageTemplateRaw.toString('utf8'));
|
||||
} catch (e) {
|
||||
log.error('Crash while loading post template', e);
|
||||
}
|
||||
|
||||
const revManifest = prod && await fs.readJson(resolve('rev-manifest.json')).catch(() => {}).then((r) => r || {});
|
||||
|
||||
const helpers = new Injectables(prod, revManifest);
|
||||
handlebars.registerHelper('import', helpers.import());
|
||||
handlebars.registerHelper('markdown', helpers.markdown());
|
||||
handlebars.registerHelper('icon', helpers.icon());
|
||||
handlebars.registerHelper('prod', helpers.production());
|
||||
handlebars.registerHelper('rev', helpers.rev());
|
||||
|
||||
const shrink = (input) => (prod ? minify(input, MINIFY_CONFIG) : input);
|
||||
|
||||
const result = {
|
||||
[ENGINE.HANDLEBARS]: (source, env) => {
|
||||
const template = handlebars.compile(source);
|
||||
return shrink(template(env));
|
||||
},
|
||||
[ENGINE.MARKDOWN]: (source, env) => shrink(pageTemplate({ ...env, contents: markdown('full', source, env) })),
|
||||
[ENGINE.OTHER]: (source) => shrink(source),
|
||||
PREVIEW: (source, env) => markdown('preview', source, env),
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
class Injectables {
|
||||
|
||||
constructor (prod, revManifest) {
|
||||
this.prod = prod;
|
||||
this.revManifest = revManifest;
|
||||
this.injections = {};
|
||||
}
|
||||
|
||||
_parsePath (tpath, local, type) {
|
||||
if (tpath[0] === '/') tpath = resolve(tpath.slice(1));
|
||||
else if (tpath[0] === '~') tpath = resolve('templates', tpath.slice(2));
|
||||
else tpath = path.resolve(local.cwd, tpath);
|
||||
if (type && !tpath.endsWith(type)) tpath += '.' + type;
|
||||
return tpath;
|
||||
}
|
||||
|
||||
_template (tpath, make) {
|
||||
if (!tpath) throw new Error('Received an empty template path: ' + tpath);
|
||||
if (this.injections[tpath]) return this.injections[tpath];
|
||||
|
||||
if (!fs.existsSync(tpath)) {
|
||||
throw new Error('Injectable does not exist: ' + tpath);
|
||||
}
|
||||
|
||||
let contents;
|
||||
try {
|
||||
contents = fs.readFileSync(tpath).toString('utf8');
|
||||
if (make) contents = make(contents);
|
||||
this.injections[tpath] = contents;
|
||||
return contents;
|
||||
} catch (e) {
|
||||
log.error(e, 'An error occured while loading the injectable: ' + tpath);
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
rev () {
|
||||
const self = this;
|
||||
return function (url) {
|
||||
if (!url) return '';
|
||||
if (url[0] === '/') url = url.substr(1);
|
||||
if (self.prod && self.revManifest[url]) return '/' + self.revManifest[url];
|
||||
return '/' + url;
|
||||
};
|
||||
}
|
||||
|
||||
production () {
|
||||
const self = this;
|
||||
return function (options) {
|
||||
if (!options.fn) return self.prod;
|
||||
return self.prod ? options.fn(this) : options.inverse(this);
|
||||
};
|
||||
}
|
||||
|
||||
markdown () {
|
||||
const self = this;
|
||||
return function (...args) {
|
||||
const { fn } = args.pop();
|
||||
let contents;
|
||||
|
||||
if (fn) {
|
||||
contents = stripIndent(fn(this));
|
||||
} else {
|
||||
let tpath = args.shift();
|
||||
tpath = self._parsePath(tpath, this.local, 'md');
|
||||
|
||||
contents = self._template(tpath);
|
||||
}
|
||||
|
||||
contents = markdown('full', contents, this);
|
||||
|
||||
return new handlebars.SafeString(contents);
|
||||
};
|
||||
}
|
||||
|
||||
import () {
|
||||
const self = this;
|
||||
return function (tpath, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const value = args.shift();
|
||||
const context = handlebars.createFrame(value || this);
|
||||
Object.assign(context, hash || {});
|
||||
|
||||
tpath = self._parsePath(tpath, this.local, 'hbs');
|
||||
|
||||
try {
|
||||
const contents = self._template(tpath, handlebars.compile)(context);
|
||||
return new handlebars.SafeString(contents);
|
||||
} catch (e) {
|
||||
log.error('Could not execute import template ' + tpath, e);
|
||||
return '';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
icon () {
|
||||
const self = this;
|
||||
return function (name, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const tpath = path.join(this.local.root, 'svg', name + '.svg');
|
||||
|
||||
try {
|
||||
const contents = self._template(tpath, (s) =>
|
||||
handlebars.compile(`<span class="svg-icon" {{#if size}}style="width:{{size}}px;height:{{size}}px"{{/if}}>${s}</span>`),
|
||||
)({ size: hash && hash.size });
|
||||
|
||||
return new handlebars.SafeString(contents);
|
||||
} catch (e) {
|
||||
log.error('Could not execute import template ' + tpath, e);
|
||||
return '';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
63
build/evaluate.js
Normal file
63
build/evaluate.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const { sortBy, uniqBy } = require('lodash');
|
||||
const { resolve } = require('./resolve');
|
||||
const log = require('fancy-log');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
const LOG = {
|
||||
new: true,
|
||||
update: true,
|
||||
skip: true,
|
||||
rebuild: true,
|
||||
cached: false,
|
||||
copy: false,
|
||||
silent: false,
|
||||
};
|
||||
|
||||
module.exports = exports = async function process (tasks, cache) {
|
||||
const lastSeen = new Date();
|
||||
|
||||
tasks = uniqBy(tasks, 'output');
|
||||
tasks = sortBy(tasks, [ 'input', 'output' ]);
|
||||
|
||||
await Promise.map(tasks, async (task) => {
|
||||
let result;
|
||||
let status = await cache.get(task);
|
||||
const { input, output } = task;
|
||||
const taskLog = [ status.mode, status.input, status.output ];
|
||||
if (status.mode === 'skip') {
|
||||
await cache.touch(task, lastSeen);
|
||||
if (taskLog && LOG[taskLog[0]]) log.info(...taskLog);
|
||||
return status;
|
||||
}
|
||||
|
||||
if (status.mode === 'cached') {
|
||||
result = status.cache;
|
||||
await fs.ensureDir(path.dirname(resolve('dist', output)));
|
||||
await fs.writeFile(resolve('dist', output), result);
|
||||
await cache.touch(task, lastSeen);
|
||||
} else {
|
||||
try {
|
||||
result = await task.action({
|
||||
...task,
|
||||
input,
|
||||
output: 'dist/' + output,
|
||||
});
|
||||
} catch (err) {
|
||||
log.error(`Task (${task.action.name}) failed for file ${output}.\n`, err);
|
||||
return false;
|
||||
}
|
||||
|
||||
status = await cache.set(task, result, lastSeen);
|
||||
}
|
||||
|
||||
if (taskLog && LOG[taskLog[0]]) log.info(...taskLog);
|
||||
|
||||
if (cache.isProd) {
|
||||
fs.writeFile(resolve('dist', status.revPath), result);
|
||||
}
|
||||
|
||||
}, { concurrency: 1 });
|
||||
|
||||
};
|
||||
45
build/favicon.js
Normal file
45
build/favicon.js
Normal file
@@ -0,0 +1,45 @@
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const actions = require('./actions');
|
||||
const { resolve } = require('./resolve');
|
||||
|
||||
function any (input) {
|
||||
for (const i of input) if (i) return i;
|
||||
return false;
|
||||
}
|
||||
|
||||
const MATCHES = [
|
||||
'favicon.png',
|
||||
'favicon.gif',
|
||||
'favicon.jpeg',
|
||||
'favicon.jpg',
|
||||
];
|
||||
|
||||
module.exports = exports = async function favicon () {
|
||||
|
||||
const input = any(await Promise.all(
|
||||
MATCHES.map((f) =>
|
||||
fs.exists(resolve(f)).then((y) => y && f),
|
||||
),
|
||||
));
|
||||
|
||||
if (!input) return [];
|
||||
|
||||
// input = resolve(input);
|
||||
const tasks = [ 32, 57, 64, 76, 96, 114, 120, 128, 144, 152, 180, 192, 196, 228 ].map((width) => ({
|
||||
input,
|
||||
output: `favicon${width}.png`,
|
||||
format: 'png',
|
||||
width,
|
||||
action: actions.image,
|
||||
}));
|
||||
|
||||
tasks.push({
|
||||
input,
|
||||
output: 'favicon.ico',
|
||||
format: 'ico',
|
||||
action: actions.image,
|
||||
});
|
||||
|
||||
return tasks;
|
||||
};
|
||||
91
build/file.js
Normal file
91
build/file.js
Normal file
@@ -0,0 +1,91 @@
|
||||
|
||||
const path = require('path');
|
||||
const { pick } = require('lodash');
|
||||
const {
|
||||
normalizedExt,
|
||||
kind,
|
||||
type,
|
||||
} = require('./resolve');
|
||||
const actions = require('./actions');
|
||||
|
||||
|
||||
module.exports = exports = class File {
|
||||
|
||||
constructor (filepath) {
|
||||
if (filepath && typeof filepath === 'object') {
|
||||
// we've been passed a json object, treat as serialized Page
|
||||
Object.assign(this, filepath);
|
||||
return this;
|
||||
}
|
||||
|
||||
const file = path.parse(filepath);
|
||||
let { base: basename, name } = file;
|
||||
|
||||
this.preprocessed = false;
|
||||
if (name[0] === '_') {
|
||||
this.preprocessed = true;
|
||||
file.name = name = name.slice(1);
|
||||
file.basename = basename = basename.slice(1);
|
||||
}
|
||||
|
||||
// remove the public root and any _images segment from the dir
|
||||
const dir = this._dir(file.dir);
|
||||
|
||||
this.kind = kind(filepath);
|
||||
this.type = type(filepath);
|
||||
this.cwd = file.dir;
|
||||
this.ext = this.preprocessed ? file.ext : normalizedExt(file.ext);
|
||||
this.input = filepath; // public/file.ext
|
||||
this.base = path.join(...dir); // '', 'folder', 'folder/subfolder'
|
||||
this.dir = path.join('/', ...dir); // /, /folder, /folder/subfolder
|
||||
this.name = name; // index, fileA, fileB
|
||||
this.basename = basename; // index.ext, fileA.ext, fileB.ext
|
||||
this.ext = file.ext;
|
||||
|
||||
this._out();
|
||||
|
||||
this.serializable = [
|
||||
'kind',
|
||||
'type',
|
||||
'cwd',
|
||||
'ext',
|
||||
'input',
|
||||
'base',
|
||||
'dir',
|
||||
'name',
|
||||
'basename',
|
||||
'ext',
|
||||
'out',
|
||||
'url',
|
||||
];
|
||||
}
|
||||
|
||||
_dir (dir) {
|
||||
dir = dir.split('/');
|
||||
if (dir[0] === 'public') dir.shift();
|
||||
const i = dir.indexOf('_images');
|
||||
if (i > -1) dir.splice(i, 1);
|
||||
return dir;
|
||||
}
|
||||
|
||||
_out () {
|
||||
this.out = path.join(this.base, `${this.name}${this.ext}`);
|
||||
this.url = path.join(this.dir, `${this.name}${this.ext}`);
|
||||
}
|
||||
|
||||
load () {}
|
||||
|
||||
tasks () {
|
||||
return [ {
|
||||
input: this.input,
|
||||
output: this.out,
|
||||
action: actions.copy,
|
||||
nocache: true,
|
||||
} ];
|
||||
}
|
||||
|
||||
toJson () {
|
||||
return pick(this, this.serializable);
|
||||
}
|
||||
|
||||
};
|
||||
54
build/index.js
Normal file
54
build/index.js
Normal file
@@ -0,0 +1,54 @@
|
||||
|
||||
const loadPublicFiles = require('./public');
|
||||
const Cache = require('./cache');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const primeTweets = require('./page-tweets');
|
||||
const pageWriter = require('./page-writer');
|
||||
const evaluate = require('./evaluate');
|
||||
const { resolve } = require('./resolve');
|
||||
|
||||
const favicon = require('./favicon');
|
||||
const scss = require('./scss');
|
||||
const svg = require('./svg');
|
||||
const scripts = require('./scripts');
|
||||
|
||||
|
||||
exports.everything = function (prod = false) {
|
||||
const fn = async () => {
|
||||
|
||||
// load a directory scan of the public folder
|
||||
const PublicFiles = await loadPublicFiles();
|
||||
|
||||
// load data for all the files in that folder
|
||||
await Promise.map(PublicFiles.assets, (p) => p.load());
|
||||
await Promise.map(PublicFiles.pages, (p) => p.load(PublicFiles));
|
||||
|
||||
// prime tweet data for all pages
|
||||
const pages = await primeTweets(PublicFiles.pages);
|
||||
|
||||
// compile all tasks to be completed
|
||||
const tasks = await Promise.all([
|
||||
PublicFiles.tasks,
|
||||
scss(prod),
|
||||
scripts(prod),
|
||||
svg(prod),
|
||||
favicon(prod),
|
||||
]);
|
||||
|
||||
await fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2));
|
||||
|
||||
await fs.ensureDir(resolve('dist'));
|
||||
const cache = new Cache({ prod });
|
||||
await cache.load();
|
||||
await evaluate(tasks.flat(), cache);
|
||||
await cache.save();
|
||||
|
||||
await pageWriter(pages, prod);
|
||||
};
|
||||
|
||||
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });
|
||||
ret.displayName = prod ? 'generateEverythingForProd' : 'generateEverything';
|
||||
return ret;
|
||||
};
|
||||
3
build/lib/dimensions.js
Normal file
3
build/lib/dimensions.js
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
var { promisify } = require('util');
|
||||
module.exports = exports = promisify(require('image-size'));
|
||||
4
build/lib/glob.js
Normal file
4
build/lib/glob.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
var { promisify } = require('util');
|
||||
const glob = require('glob');
|
||||
module.exports = exports = promisify(glob);
|
||||
181
build/lib/markdown-raw-html.js
Normal file
181
build/lib/markdown-raw-html.js
Normal file
@@ -0,0 +1,181 @@
|
||||
|
||||
module.exports = exports = function (md, options) {
|
||||
|
||||
options = {
|
||||
fence: '|||',
|
||||
...options,
|
||||
};
|
||||
|
||||
function debug (...args) {
|
||||
if (options.debug) console.log(...args); // eslint-disable-line
|
||||
}
|
||||
|
||||
const fenceLen = options.fence.length;
|
||||
// const fenceFirst = options.fence.charCodeAt(0);
|
||||
|
||||
function scanAhead (state, line, pos) {
|
||||
const position = state.src.indexOf(options.fence, pos);
|
||||
if (position === -1) {
|
||||
// there are no html blocks in this entire file
|
||||
state.discreteHtmlScan = {
|
||||
present: false,
|
||||
};
|
||||
return false;
|
||||
}
|
||||
|
||||
while (position > state.eMarks[line]) {
|
||||
line++;
|
||||
}
|
||||
|
||||
state.discreteHtmlScan = {
|
||||
present: true,
|
||||
position,
|
||||
line,
|
||||
};
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
md.block.ruler.before('fence', 'raw', (state, startLine, lastLine) => {
|
||||
let pos = state.bMarks[startLine] + state.tShift[startLine];
|
||||
let endOfLine = state.eMarks[startLine];
|
||||
|
||||
// if we have yet to do a scan of this file, perform one.
|
||||
if (!state.discreteHtmlScan && !scanAhead(state, startLine, pos)) {
|
||||
debug('First scan, nothing found');
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!state.discreteHtmlScan.present) {
|
||||
debug('Have scanned, did not find');
|
||||
return false;
|
||||
}
|
||||
|
||||
// add one to the line here in case there is a line break in a paragraph.
|
||||
if (state.discreteHtmlScan.line > startLine + 1) {
|
||||
debug('Have scanned, found, but after this line', { startLine, targetLine: state.discreteHtmlScan.line });
|
||||
return false;
|
||||
}
|
||||
|
||||
if (startLine > state.discreteHtmlScan.line) {
|
||||
// we dun fucked up
|
||||
debug('We somehow got ahead of ourselves', { startLine, line: state.discreteHtmlScan.line, lastLine, pos, endOfLine, tokens: state.tokens });
|
||||
throw new Error('markdown-it-discrete-html encountered a parsing error.');
|
||||
}
|
||||
|
||||
// at this point we should be on a line that contains a fence mark
|
||||
debug({ l: 67, startLine, scan: state.discreteHtmlScan });
|
||||
|
||||
let openIndex, closer, nextLine;
|
||||
openIndex = state.discreteHtmlScan.position;
|
||||
do {
|
||||
let token, closeIndex;
|
||||
const tokens = [];
|
||||
const preBlock = openIndex > pos && state.src.slice(pos, openIndex);
|
||||
debug({ l: 75, preBlock, startLine, lastLine });
|
||||
openIndex += fenceLen;
|
||||
pos = openIndex;
|
||||
|
||||
if (preBlock && !!preBlock.trim()) {
|
||||
md.block.parse(preBlock, md, state.env, tokens);
|
||||
|
||||
switch (tokens[tokens.length - 1].type) {
|
||||
case 'heading_close':
|
||||
case 'paragraph_close':
|
||||
closer = tokens.pop();
|
||||
// fallthrough
|
||||
default:
|
||||
state.tokens.push(...tokens);
|
||||
}
|
||||
}
|
||||
|
||||
debug({ l: 92, tokens });
|
||||
|
||||
// find terminating fence
|
||||
if (!scanAhead(state, startLine, pos)) {
|
||||
debug({ l: 96, remaining: state.src.slice(pos) });
|
||||
// console.error(state.src)
|
||||
throw new Error(`Could not find terminating "${options.fence}" for a raw html block.`);
|
||||
}
|
||||
|
||||
closeIndex = state.discreteHtmlScan.position;
|
||||
nextLine = state.discreteHtmlScan.line;
|
||||
|
||||
if (nextLine === startLine) nextLine++;
|
||||
endOfLine = state.eMarks[nextLine];
|
||||
|
||||
const content = state.src.substring(openIndex, closeIndex);
|
||||
closeIndex += fenceLen;
|
||||
pos = closeIndex;
|
||||
|
||||
if (content.trim()) {
|
||||
token = state.push(closer ? 'html_inline' : 'html_block', '', 0);
|
||||
token.map = [ startLine, nextLine ];
|
||||
token.content = content;
|
||||
token.block = true;
|
||||
debug({ l: 115, tokens: [ token ], nextLine, pos, endOfLine: state.eMarks[nextLine], len: state.src.length, remaining: state.src.slice(pos) }); // eslint-disable-line
|
||||
}
|
||||
|
||||
if (pos === endOfLine) {
|
||||
// we have ended this line, nothing more to do here.
|
||||
if (closer) {
|
||||
state.tokens.push(closer);
|
||||
debug({ l: 122, tokens: [ closer ] });
|
||||
}
|
||||
state.discreteHtmlScan = null;
|
||||
state.line = nextLine + 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
// still more left in this line, see if there is another block
|
||||
if (scanAhead(state, nextLine, pos)) {
|
||||
// we found another block, but it isn't on this line, so break out.
|
||||
if (state.discreteHtmlScan.line > nextLine) {
|
||||
if (closer) {
|
||||
state.tokens.push(closer);
|
||||
debug({ l: 135, tokens: [ closer ] });
|
||||
}
|
||||
state.line = nextLine + 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
// next block is on this line, grab everything between here and there
|
||||
openIndex = state.discreteHtmlScan.position;
|
||||
} else {
|
||||
// no more blocks on this line, grab everything between here and the end of the line
|
||||
openIndex = endOfLine;
|
||||
}
|
||||
debug({ l: 147, pos, openIndex, remaining: state.src.slice(pos) });
|
||||
|
||||
const postBlock = state.src.slice(pos, openIndex);
|
||||
token = null;
|
||||
if (postBlock.trim()) {
|
||||
token = state.push('inline', '', 0);
|
||||
token.content = postBlock;
|
||||
token.map = [ nextLine, nextLine ];
|
||||
token.children = [];
|
||||
tokens.push(token);
|
||||
}
|
||||
debug({ l: 158, tokens: [ token ], postBlock, pos, openIndex, closeIndex, endOfLine });
|
||||
|
||||
pos = openIndex;
|
||||
startLine = nextLine + 1;
|
||||
endOfLine = state.eMarks[startLine];
|
||||
|
||||
debug({ l: 164, pos, startLine, endOfLine, remaining: state.src.slice(pos) });
|
||||
} while (pos + fenceLen < endOfLine);
|
||||
|
||||
if (closer) {
|
||||
state.tokens.push(closer);
|
||||
debug({ l: 169, tokens: [ closer ] });
|
||||
}
|
||||
|
||||
openIndex += fenceLen;
|
||||
pos = openIndex;
|
||||
|
||||
state.line = startLine;
|
||||
return true;
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
46
build/lib/markdown-token-filter.js
Normal file
46
build/lib/markdown-token-filter.js
Normal file
@@ -0,0 +1,46 @@
|
||||
|
||||
const { flatten } = require('lodash');
|
||||
|
||||
module.exports = exports = function (md) {
|
||||
md.core.ruler.push(
|
||||
'modify-token',
|
||||
(state) => {
|
||||
state.tokens = flatten(state.tokens.map(descend).filter(Boolean));
|
||||
return false;
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
function descend (token) {
|
||||
|
||||
switch (token.type) {
|
||||
case 'link_open':
|
||||
case 'link_close':
|
||||
case 'html_block':
|
||||
return false;
|
||||
|
||||
case 'heading_open':
|
||||
token.type = 'paragraph_open';
|
||||
token.tag = 'p';
|
||||
token.markup = '';
|
||||
return token;
|
||||
|
||||
case 'heading_close':
|
||||
token.type = 'paragraph_close';
|
||||
token.tag = 'p';
|
||||
token.markup = '';
|
||||
return token;
|
||||
|
||||
case 'image':
|
||||
case 'container':
|
||||
return token.children;
|
||||
|
||||
default:
|
||||
|
||||
if (token.children && token.children.length) {
|
||||
token.children = flatten(token.children.map(descend).filter(Boolean));
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
47
build/lib/random.js
Normal file
47
build/lib/random.js
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
var uuid = require('uuid').v4;
|
||||
|
||||
// based on code from http://stackoverflow.com/a/25690754/110189
|
||||
function randomString (length, chars) {
|
||||
if (!chars) {
|
||||
throw new Error('Argument \'chars\' is undefined');
|
||||
}
|
||||
|
||||
var charsLength = chars.length;
|
||||
if (charsLength > 256) {
|
||||
throw new Error('Length must be less than 256 characters');
|
||||
}
|
||||
|
||||
var randomBytes = crypto.randomBytes(length);
|
||||
|
||||
var result = new Array(length);
|
||||
|
||||
var cursor = 0;
|
||||
for (var i = 0; i < length; i++) {
|
||||
cursor += randomBytes[i];
|
||||
result[i] = chars[cursor % charsLength];
|
||||
}
|
||||
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
module.exports = exports = function (min, max) {
|
||||
if (Array.isArray(min)) return exports.from(min);
|
||||
if (typeof max === 'undefined') {
|
||||
if (min > 0) {
|
||||
max = min;
|
||||
min = 0;
|
||||
} else {
|
||||
max = 0;
|
||||
}
|
||||
}
|
||||
return Math.floor((Math.random() * (max - min + 1)) + min);
|
||||
};
|
||||
|
||||
exports.alphanumeric = (length) => randomString(length, 'ABCDEFGHIJKLMNOPQRSTUWXYZ0123456789');
|
||||
exports.alpha = (length) => randomString(length, 'ABCDEFGHIJKLMNOPQRSTUWXYZ');
|
||||
exports.fromCharSet = randomString;
|
||||
exports.from = (array) => array[exports(array.length - 1)];
|
||||
exports.id = (length) => uuid().replace(/-/g, '').substr(0, length);
|
||||
140
build/lib/tweetparse.js
Normal file
140
build/lib/tweetparse.js
Normal file
@@ -0,0 +1,140 @@
|
||||
var twemoji = require('twemoji' );
|
||||
const { deepPick, has } = require('./util');
|
||||
|
||||
const schema = {
|
||||
id_str: true,
|
||||
created_at: true,
|
||||
user: {
|
||||
screen_name: true,
|
||||
avatar: true,
|
||||
name_html: true,
|
||||
verified: true,
|
||||
protected: true,
|
||||
},
|
||||
html: true,
|
||||
quoted_status: {
|
||||
user: {
|
||||
screen_name: true,
|
||||
avatar: true,
|
||||
name_html: true,
|
||||
verified: true,
|
||||
protected: true,
|
||||
},
|
||||
},
|
||||
entities: { media: [ {
|
||||
type: true,
|
||||
media_url_https: true,
|
||||
video_info: { variants: [ {
|
||||
url: true,
|
||||
content_type: true,
|
||||
} ] },
|
||||
} ] },
|
||||
media: true,
|
||||
};
|
||||
|
||||
var entityProcessors = {
|
||||
hashtags (tags, tweet) {
|
||||
tags.forEach((tagObj) => {
|
||||
tweet.html = tweet.html.replace('#' + tagObj.text, `<a href="https://twitter.com/hashtag/{tagObj.text}" class="hashtag">#${tagObj.text}</a>`);
|
||||
});
|
||||
},
|
||||
|
||||
symbols (/* symbols, tweet */) {
|
||||
|
||||
},
|
||||
|
||||
user_mentions (users, tweet) {
|
||||
users.forEach((userObj) => {
|
||||
var regex = new RegExp('@' + userObj.screen_name, 'gi' );
|
||||
tweet.html = tweet.html.replace(regex, `<a href="https://twitter.com/${userObj.screen_name}" class="mention">@${userObj.screen_name}</a>`);
|
||||
});
|
||||
},
|
||||
|
||||
urls (urls, tweet) {
|
||||
urls.forEach((urlObj) => {
|
||||
var quotedTweetHtml = '';
|
||||
var indices = urlObj.indices;
|
||||
var urlToReplace = (tweet.full_text || tweet.text).substring(indices[0], indices[1]);
|
||||
|
||||
var finalText = quotedTweetHtml || urlObj.display_url.link(urlObj.expanded_url);
|
||||
tweet.html = tweet.html.replace(urlToReplace, finalText);
|
||||
});
|
||||
},
|
||||
|
||||
media (media, tweet) {
|
||||
media.forEach((mediaObj) => {
|
||||
tweet.html = tweet.html.replace(mediaObj.url, '');
|
||||
return;
|
||||
|
||||
// if (mediaObj.type === 'photo') {
|
||||
// // Use HTTPS if available
|
||||
// var src = mediaObj.media_url_https ? mediaObj.media_url_https : mediaObj.media_url;
|
||||
|
||||
// if (options &&
|
||||
// options.photoSize &&
|
||||
// mediaObj.sizes &&
|
||||
// mediaObj.sizes[options.photoSize]) {
|
||||
// // If specified size is available, patch image src to use it
|
||||
// src = src + ':' + options.photoSize;
|
||||
// }
|
||||
|
||||
// tweet.html = tweet.html.replace(mediaObj.url, `<img src="${src}" alt=""/>`);
|
||||
// } else if (mediaObj.type === 'video') {
|
||||
// var source = '';
|
||||
// mediaObj.video_info.variants.forEach((info) => {
|
||||
// source += `<source src="${info.url}" type="${info.content_type}">`;
|
||||
// });
|
||||
// var video = `<video controls poster="${mediaObj.media_url}">${source}</video>`;
|
||||
// tweet.html = tweet.html.replace(mediaObj.url, video);
|
||||
// }
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = exports = function (tweets) {
|
||||
return tweets.length ? tweets.map(parseTweet) : parseTweet(tweets);
|
||||
|
||||
function parseTweet (tweet) {
|
||||
// clone the tweet so we're not altering the original
|
||||
tweet = JSON.parse(JSON.stringify(tweet));
|
||||
|
||||
tweet.user.avatar = {
|
||||
input: tweet.user.profile_image_url_https,
|
||||
output: 'tweets/' + tweet.user.screen_name + '.jpg',
|
||||
};
|
||||
|
||||
tweet.media = [
|
||||
tweet.user.avatar,
|
||||
];
|
||||
|
||||
// Copying text value to a new property html. The final output will be set to this property
|
||||
tweet.html = (tweet.full_text || tweet.text)
|
||||
.split(/(\r\n|\n\r|\r|\n)+/)
|
||||
.map((s) => s.trim() && '<p>' + s + '</p>')
|
||||
.filter(Boolean)
|
||||
.join('');
|
||||
|
||||
if (tweet.quoted_status) {
|
||||
tweet.quoted_status = parseTweet(tweet.quoted_status);
|
||||
}
|
||||
|
||||
if (has(tweet, 'entities.media') && has(tweet, 'extended_entities.media')) {
|
||||
tweet.entities.media = tweet.extended_entities.media;
|
||||
delete tweet.extended_entities;
|
||||
}
|
||||
|
||||
// Process entities
|
||||
if (Object.getOwnPropertyNames(tweet.entities).length) {
|
||||
for (let [ entityType, entity ] of Object.entries(tweet.entities)) { // eslint-disable-line prefer-const
|
||||
entityProcessors[entityType](entity, tweet);
|
||||
}
|
||||
}
|
||||
|
||||
// Process Emoji's
|
||||
tweet.html = twemoji.parse(tweet.html);
|
||||
tweet.user.name_html = twemoji.parse(tweet.user.name);
|
||||
|
||||
return deepPick(tweet, schema);
|
||||
}
|
||||
|
||||
};
|
||||
1654
build/lib/util.js
Normal file
1654
build/lib/util.js
Normal file
File diff suppressed because it is too large
Load Diff
89
build/page-tweets.js
Normal file
89
build/page-tweets.js
Normal file
@@ -0,0 +1,89 @@
|
||||
const { chunk, uniq, difference } = require('lodash');
|
||||
const fs = require('fs-extra');
|
||||
const { resolve } = require('./resolve');
|
||||
const log = require('fancy-log');
|
||||
const tweetparse = require('./lib/tweetparse');
|
||||
const Twitter = require('twitter-lite');
|
||||
|
||||
|
||||
module.exports = exports = async function tweets (pages) {
|
||||
const [ twitter, twitterBackup, twitterCache ] = await Promise.all([
|
||||
fs.readJson(resolve('twitter-config.json')).catch(() => null)
|
||||
.then(getTwitterClient),
|
||||
fs.readJson(resolve('twitter-backup.json')).catch(() => ({})),
|
||||
fs.readJson(resolve('twitter-cache.json')).catch(() => ({})),
|
||||
]);
|
||||
|
||||
let tweetsNeeded = [];
|
||||
const tweetsPresent = Object.keys(twitterCache);
|
||||
|
||||
for (const page of pages) {
|
||||
if (!page.tweets || !page.tweets.length) continue;
|
||||
|
||||
const missing = difference(page.tweets, tweetsPresent);
|
||||
tweetsNeeded.push(...missing);
|
||||
}
|
||||
|
||||
tweetsNeeded = uniq(tweetsNeeded);
|
||||
|
||||
/* Load Missing Tweets **************************************************/
|
||||
|
||||
if (tweetsNeeded.length) {
|
||||
log('Fetching tweets: ' + tweetsNeeded.join(', '));
|
||||
const arriving = await Promise.all(chunk(tweetsNeeded, 99).map(twitter));
|
||||
const loaded = [];
|
||||
for (const tweet of arriving.flat(1)) {
|
||||
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
|
||||
twitterCache[tweet.id_str] = tweetparse(tweet);
|
||||
loaded.push(tweet.id_str);
|
||||
}
|
||||
|
||||
const absent = difference(tweetsNeeded, loaded);
|
||||
for (const id of absent) {
|
||||
if (twitterBackup[id]) {
|
||||
log('Pulled tweet from backup ' + id);
|
||||
twitterCache[id] = tweetparse(twitterBackup[id]);
|
||||
continue;
|
||||
}
|
||||
log.error('Could not find tweet ' + id);
|
||||
}
|
||||
}
|
||||
|
||||
/* Apply Tweets to Pages **************************************************/
|
||||
|
||||
const twitterMedia = [];
|
||||
|
||||
// now loop through pages and substitute the tweet data for the ids
|
||||
for (const page of pages) {
|
||||
if (!page.tweets || !page.tweets.length) continue;
|
||||
|
||||
page.tweets = page.tweets.reduce((dict, tweetid) => {
|
||||
const tweet = twitterCache[tweetid];
|
||||
if (!tweet) {
|
||||
log.error(`Tweet ${tweetid} is missing from the cache.`);
|
||||
return dict;
|
||||
}
|
||||
dict[tweetid] = tweet;
|
||||
twitterMedia.push( ...tweet.media );
|
||||
return dict;
|
||||
}, {});
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
fs.writeFile(resolve('twitter-media.json'), JSON.stringify(twitterMedia, null, 2)),
|
||||
fs.writeFile(resolve('twitter-cache.json'), JSON.stringify(twitterCache, null, 2)),
|
||||
fs.writeFile(resolve('twitter-backup.json'), JSON.stringify(twitterBackup, null, 2)),
|
||||
]);
|
||||
|
||||
return pages;
|
||||
};
|
||||
|
||||
/* Utility Functions **************************************************/
|
||||
|
||||
function getTwitterClient (config) {
|
||||
if (!config) return () => [];
|
||||
const client = new Twitter(config);
|
||||
return (tweetids) => client
|
||||
.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
|
||||
.catch((e) => { log.error(e); return []; });
|
||||
}
|
||||
56
build/page-writer.js
Normal file
56
build/page-writer.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const path = require('path');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
const getEngines = require('./engines');
|
||||
const { resolve, ROOT } = require('./resolve');
|
||||
const { siteInfo } = require(resolve('package.json'));
|
||||
|
||||
module.exports = exports = async function writePageContent (pages, prod) {
|
||||
const engines = await getEngines(prod);
|
||||
|
||||
await Promise.map(pages, async (page) => {
|
||||
// page = new Page(page);
|
||||
|
||||
var data = {
|
||||
...page,
|
||||
meta: { ...page.meta, ...page },
|
||||
page: {
|
||||
domain: siteInfo.domain,
|
||||
title: page.meta.title
|
||||
? (page.meta.title + (page.meta.subtitle ? ', ' + page.meta.subtitle : '') + ' :: ' + siteInfo.title)
|
||||
: siteInfo.title,
|
||||
description: page.meta.description || siteInfo.description,
|
||||
},
|
||||
site: siteInfo,
|
||||
local: {
|
||||
cwd: resolve(page.cwd),
|
||||
root: ROOT,
|
||||
basename: page.basename,
|
||||
},
|
||||
pages,
|
||||
};
|
||||
|
||||
const html = String(engines[page.engine](data.source, data));
|
||||
const json = page.json && {
|
||||
url: page.fullurl,
|
||||
title: page.meta.title,
|
||||
subtitle: page.meta.subtitle,
|
||||
description: page.meta.description,
|
||||
tweets: page.tweets,
|
||||
images: page.images,
|
||||
dateCreated: page.dateCreated,
|
||||
dateModified: page.dateModified,
|
||||
titlecard: page.titlecard,
|
||||
preview: page.engine === 'md' && String(engines.preview(data.source, data)),
|
||||
};
|
||||
|
||||
const output = resolve('dist', page.output);
|
||||
await fs.ensureDir(path.dirname(output));
|
||||
await Promise.all([
|
||||
fs.writeFile(output, Buffer.from(html)),
|
||||
json && fs.writeFile(resolve('dist', page.json), Buffer.from(
|
||||
prod ? JSON.stringify(json) : JSON.stringify(json, null, 2),
|
||||
)),
|
||||
]);
|
||||
});
|
||||
};
|
||||
124
build/page.js
Normal file
124
build/page.js
Normal file
@@ -0,0 +1,124 @@
|
||||
|
||||
const path = require('path');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const File = require('./file');
|
||||
const actions = require('./actions');
|
||||
const { URL } = require('url');
|
||||
const { resolve, readFile, isCleanUrl, ENGINE } = require('./resolve');
|
||||
const { isObject } = require('./lib/util');
|
||||
|
||||
const pkg = require(resolve('package.json'));
|
||||
const frontmatter = require('front-matter');
|
||||
|
||||
module.exports = exports = class Page extends File {
|
||||
|
||||
constructor (filepath) {
|
||||
super(filepath);
|
||||
|
||||
this.serializable.push(
|
||||
'fullurl',
|
||||
'engine',
|
||||
'source',
|
||||
'meta',
|
||||
'images',
|
||||
'titlecard',
|
||||
'tweets',
|
||||
'dateCreated',
|
||||
'dateModified',
|
||||
'classes',
|
||||
'flags',
|
||||
);
|
||||
|
||||
var isIndexPage = (this.name === 'index');
|
||||
var isClean = isCleanUrl(this.ext);
|
||||
|
||||
if (isClean && isIndexPage) {
|
||||
this.output = path.join(this.base, 'index.html');
|
||||
this.json = path.join(this.base, 'index.json');
|
||||
this.url = this.dir;
|
||||
} else if (isClean) {
|
||||
this.output = path.join(this.base, this.name, 'index.html');
|
||||
this.json = path.join(this.base, this.name + '.json');
|
||||
this.url = path.join(this.dir, this.name);
|
||||
} else if (isIndexPage) {
|
||||
this.output = path.join(this.base, 'index.html');
|
||||
this.json = path.join(this.base, this.name + '.json');
|
||||
this.url = this.dir;
|
||||
} else {
|
||||
this.output = path.join(this.base, this.basename);
|
||||
this.json = path.join(this.base, this.basename + '.json');
|
||||
this.url = path.join(this.dir, this.basename);
|
||||
}
|
||||
|
||||
const url = new URL(pkg.siteInfo.siteUrl);
|
||||
url.pathname = this.url;
|
||||
this.fullurl = url.href;
|
||||
|
||||
this.engine = ENGINE[this.type] || ENGINE.COPY;
|
||||
}
|
||||
|
||||
async load (PublicFiles) {
|
||||
const [ raw, { ctime, mtime } ] = await Promise.all([
|
||||
readFile(this.input).catch(() => null),
|
||||
fs.stat(this.input).catch(() => ({})),
|
||||
]);
|
||||
|
||||
const { titlecard, assets } = PublicFiles.for(this.dir);
|
||||
|
||||
// empty file
|
||||
if (!raw || !ctime) {
|
||||
log.error('Could not load page: ' + this.filepath);
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
var { attributes: meta, body } = frontmatter(raw.toString('utf8'));
|
||||
} catch (e) {
|
||||
log.error('Error while parsing frontmatter for ' + this.filepath, e);
|
||||
return false;
|
||||
}
|
||||
|
||||
this.source = body;
|
||||
this.meta = meta;
|
||||
this.images = assets;
|
||||
this.titlecard = titlecard;
|
||||
this.tweets = (meta.tweets || []).map(parseTweetId);
|
||||
this.dateCreated = meta.date && new Date(meta.date) || ctime;
|
||||
this.dateModified = mtime;
|
||||
|
||||
this.classes = Array.from(new Set(meta.classes || []));
|
||||
this.flags = this.classes.reduce((res, item) => {
|
||||
var camelCased = item.replace(/-([a-z])/g, (g) => g[1].toUpperCase());
|
||||
res[camelCased] = true;
|
||||
return res;
|
||||
}, {});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
tasks () {
|
||||
if (!isObject(this.tweets)) return [];
|
||||
|
||||
return Object.values(this.tweets)
|
||||
.map((t) => t.media)
|
||||
.flat()
|
||||
.map((m) => ({ ...m, action: actions.fetch, output: m.output }));
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/* Utility Functions **************************************************/
|
||||
|
||||
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
|
||||
const tweetidcheck = /^\d+$/;
|
||||
function parseTweetId (tweetid) {
|
||||
// we can't trust an id that isn't a string
|
||||
if (typeof tweetid !== 'string') return false;
|
||||
|
||||
const match = tweetid.match(tweeturl);
|
||||
if (match) return match[1];
|
||||
if (tweetid.match(tweetidcheck)) return tweetid;
|
||||
return false;
|
||||
}
|
||||
64
build/public.js
Normal file
64
build/public.js
Normal file
@@ -0,0 +1,64 @@
|
||||
const glob = require('./lib/glob');
|
||||
const { groupBy, keyBy, filter, find, get, memoize } = require('lodash');
|
||||
const { ROOT, kind, KIND } = require('./resolve');
|
||||
const File = require('./file');
|
||||
const Asset = require('./asset');
|
||||
const Page = require('./page');
|
||||
const Promise = require('bluebird');
|
||||
|
||||
const KIND_MAP = {
|
||||
[KIND.PAGE]: Page,
|
||||
[KIND.ASSET]: Asset,
|
||||
[KIND.OTHER]: File,
|
||||
};
|
||||
|
||||
module.exports = exports = async function loadPublicFiles () {
|
||||
const files = await Promise.map(glob('public/**/*', { cwd: ROOT, nodir: true }), (filepath) => {
|
||||
const k = kind(filepath);
|
||||
const F = KIND_MAP[k];
|
||||
const f = new F(filepath);
|
||||
if (f.kind === KIND.PAGE && f.preprocessed) return false;
|
||||
return f;
|
||||
}).filter(Boolean);
|
||||
|
||||
const {
|
||||
[KIND.PAGE]: pages,
|
||||
[KIND.ASSET]: assets,
|
||||
} = groupBy(files, 'kind');
|
||||
|
||||
function within (dir) {
|
||||
const subset = filter(files, { dir });
|
||||
|
||||
const getTitlecard = memoize(() =>
|
||||
get(find(files, { name: 'titlecard' }), [ 0, 'url' ]),
|
||||
);
|
||||
|
||||
const {
|
||||
[KIND.PAGE]: subpages,
|
||||
[KIND.ASSET]: subassets,
|
||||
} = groupBy(subset, 'kind');
|
||||
|
||||
const webready = subassets && keyBy(subassets.map((a) => a.webready()), 'name');
|
||||
|
||||
return {
|
||||
all: subset,
|
||||
get titlecard () { return getTitlecard; },
|
||||
get pages () {
|
||||
return subpages;
|
||||
},
|
||||
get assets () {
|
||||
return webready;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
all: files,
|
||||
pages,
|
||||
assets,
|
||||
for: memoize(within),
|
||||
get tasks () {
|
||||
return files.map((a) => a.tasks()).flat(1);
|
||||
},
|
||||
};
|
||||
};
|
||||
147
build/resolve.js
Normal file
147
build/resolve.js
Normal file
@@ -0,0 +1,147 @@
|
||||
|
||||
const path = require('path');
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const fs = require('fs-extra');
|
||||
const { is: _is } = require('./lib/util');
|
||||
|
||||
function is (...args) {
|
||||
const fn = _is(...args);
|
||||
const ret = (ext) => fn(normalizedExt(ext));
|
||||
ret.matching = args;
|
||||
return ret;
|
||||
}
|
||||
|
||||
function dictMatch (dict, def) {
|
||||
const arr = Object.entries(dict);
|
||||
|
||||
return (tok) => {
|
||||
for (const [ key, fn ] of arr) {
|
||||
// console.log({ key, tok, r: fn(tok), matching: fn.matching })
|
||||
if (fn(tok)) return key;
|
||||
}
|
||||
return def;
|
||||
};
|
||||
}
|
||||
|
||||
const EXT = exports.EXT = {
|
||||
JPG: '.jpg',
|
||||
JPEG: '.jpeg',
|
||||
PNG: '.png',
|
||||
GIF: '.gif',
|
||||
MP4: '.mp4',
|
||||
M4V: '.m4v',
|
||||
MD: '.md',
|
||||
HBS: '.hbs',
|
||||
HTML: '.html',
|
||||
XML: '.xml',
|
||||
CSS: '.css',
|
||||
SCSS: '.scss',
|
||||
JS: '.js',
|
||||
};
|
||||
|
||||
const {
|
||||
JPG,
|
||||
JPEG,
|
||||
PNG,
|
||||
GIF,
|
||||
MP4,
|
||||
M4V,
|
||||
MD,
|
||||
HBS,
|
||||
HTML,
|
||||
XML,
|
||||
CSS,
|
||||
SCSS,
|
||||
JS,
|
||||
} = EXT;
|
||||
|
||||
const NORMALIZE_EXT = {
|
||||
[JPG]: JPEG,
|
||||
[M4V]: MP4,
|
||||
[HBS]: HTML,
|
||||
};
|
||||
|
||||
const normalizedExt = exports.normalizedExt = (ext) => {
|
||||
if (ext[0] !== '.') ext = '.' + ext.split('.').pop();
|
||||
return NORMALIZE_EXT[ext] || ext;
|
||||
};
|
||||
|
||||
const isVideo = exports.isVideo = is(MP4, M4V);
|
||||
const isImage = exports.isImage = is(JPG, JPEG, PNG, GIF);
|
||||
const isHandlebars = exports.isHandlebars = is(XML, HBS, HTML);
|
||||
const isMarkdown = exports.isMarkdown = is(MD);
|
||||
const isPage = exports.isPage = is(isHandlebars, isMarkdown);
|
||||
const isAsset = exports.isAsset = is(isImage, isVideo);
|
||||
const isArtifact = exports.isArtifact = is(CSS, SCSS, JS);
|
||||
exports.isCleanUrl = is(HBS, MD);
|
||||
|
||||
|
||||
|
||||
const TYPE = exports.TYPE = {
|
||||
IMAGE: 'IMAGE',
|
||||
VIDEO: 'VIDEO',
|
||||
HANDLEBARS: 'HANDLEBARS',
|
||||
MARKDOWN: 'MARKDOWN',
|
||||
SCRIPT: 'SCRIPT',
|
||||
STYLE: 'STYLE',
|
||||
OTHER: 'OTHER',
|
||||
};
|
||||
|
||||
exports.type = dictMatch({
|
||||
[TYPE.IMAGE]: isImage,
|
||||
[TYPE.HANDLEBARS]: isHandlebars,
|
||||
[TYPE.MARKDOWN]: isMarkdown,
|
||||
[TYPE.VIDEO]: isVideo,
|
||||
[TYPE.SCRIPT]: is(JS),
|
||||
[TYPE.STYLE]: is(SCSS, CSS),
|
||||
}, TYPE.OTHER);
|
||||
|
||||
|
||||
|
||||
const KIND = exports.KIND = {
|
||||
PAGE: 'PAGE',
|
||||
ASSET: 'ASSET',
|
||||
ARTIFACT: 'ARTIFACT',
|
||||
OTHER: 'OTHER',
|
||||
};
|
||||
|
||||
exports.kind = dictMatch({
|
||||
[KIND.ASSET]: isAsset,
|
||||
[KIND.PAGE]: isPage,
|
||||
[KIND.ARTIFACT]: isArtifact,
|
||||
}, KIND.OTHER);
|
||||
|
||||
|
||||
|
||||
const ENGINE = exports.ENGINE = {
|
||||
HANDLEBARS: 'HANDLEBARS',
|
||||
MARKDOWN: 'MARKDOWN',
|
||||
COPY: 'COPY',
|
||||
};
|
||||
|
||||
exports.engine = dictMatch({
|
||||
[ENGINE.HANDLEBARS]: is(XML, HBS, HTML),
|
||||
[ENGINE.MARKDOWN]: is(MD),
|
||||
}, ENGINE.COPY);
|
||||
|
||||
|
||||
|
||||
exports.readFile = function readFile (fpath) {
|
||||
fpath = exports.resolve(fpath);
|
||||
return fs.readFile(fpath).catch((err) => {
|
||||
throw new Error(err.message);
|
||||
});
|
||||
};
|
||||
|
||||
exports.resolve = function resolve (...args) {
|
||||
args = args.filter(Boolean);
|
||||
const fpath = args.shift();
|
||||
if (!fpath) return ROOT;
|
||||
return path.resolve(ROOT, fpath, ...args);
|
||||
};
|
||||
|
||||
exports.relative = function relative (fpath) {
|
||||
return path.relative(ROOT, fpath);
|
||||
};
|
||||
|
||||
exports.ROOT = ROOT;
|
||||
75
build/scripts.js
Normal file
75
build/scripts.js
Normal file
@@ -0,0 +1,75 @@
|
||||
const glob = require('./lib/glob');
|
||||
const { ROOT, readFile } = require('./resolve');
|
||||
const actions = require('./actions');
|
||||
const File = require('./file');
|
||||
const Promise = require('bluebird');
|
||||
const { minify } = require('terser');
|
||||
|
||||
module.exports = exports = async function scripts (prod) {
|
||||
const globalFiles = await glob('js/_*.js', { cwd: ROOT, nodir: true });
|
||||
globalFiles.unshift(
|
||||
require.resolve('jquery'),
|
||||
require.resolve('magnific-popup'),
|
||||
require.resolve('popper.js/dist/umd/popper.js'),
|
||||
require.resolve('bootstrap/js/dist/util.js'),
|
||||
require.resolve('bootstrap/js/dist/dropdown.js'),
|
||||
);
|
||||
|
||||
const globalScript = new ClientScript('js/global.js');
|
||||
await globalScript.concat(globalFiles, prod);
|
||||
|
||||
const files = await Promise.map(glob('js/*.js', { cwd: ROOT, nodir: true }), async (filepath) => {
|
||||
const f = new ClientScript(filepath);
|
||||
if (f.preprocessed) return false;
|
||||
await f.load(prod);
|
||||
return f;
|
||||
}).filter(Boolean);
|
||||
|
||||
const tasks = files.map((f) => f.tasks()).flat();
|
||||
|
||||
tasks.push(...globalScript.tasks());
|
||||
|
||||
return tasks;
|
||||
};
|
||||
|
||||
|
||||
class ClientScript extends File {
|
||||
|
||||
_dir (dir) {
|
||||
dir = dir.split('/');
|
||||
return dir;
|
||||
}
|
||||
|
||||
async load (prod) {
|
||||
let contents = (await readFile(this.input).catch(() => '')).toString('utf8');
|
||||
if (prod) {
|
||||
const { code, error } = minify(contents);
|
||||
if (error) throw new Error(error);
|
||||
contents = code;
|
||||
}
|
||||
this.content = contents;
|
||||
}
|
||||
|
||||
async concat (files, prod) {
|
||||
let contents = await Promise.map(files, readFile);
|
||||
contents = contents.join('\n\n');
|
||||
if (prod) {
|
||||
const { code, error } = minify(contents);
|
||||
if (error) throw new Error(error);
|
||||
contents = code;
|
||||
}
|
||||
this.content = contents;
|
||||
}
|
||||
|
||||
tasks () {
|
||||
|
||||
return [ {
|
||||
input: this.input,
|
||||
output: this.out,
|
||||
content: this.content,
|
||||
action: actions.write,
|
||||
nocache: true,
|
||||
} ];
|
||||
}
|
||||
|
||||
}
|
||||
90
build/scss.js
Normal file
90
build/scss.js
Normal file
@@ -0,0 +1,90 @@
|
||||
const glob = require('./lib/glob');
|
||||
const { ROOT, readFile, resolve } = require('./resolve');
|
||||
const actions = require('./actions');
|
||||
const File = require('./file');
|
||||
const sass = require('node-sass');
|
||||
const Promise = require('bluebird');
|
||||
const postcss = require('postcss');
|
||||
const autoprefixer = require('autoprefixer');
|
||||
const crass = require('crass');
|
||||
|
||||
|
||||
module.exports = exports = async function styles (prod) {
|
||||
const files = await Promise.map(glob('scss/*.scss', { cwd: ROOT, nodir: true }), async (filepath) => {
|
||||
const f = new Sass(filepath);
|
||||
if (f.preprocessed) return false;
|
||||
await f.load(prod);
|
||||
return f;
|
||||
}).filter(Boolean);
|
||||
|
||||
const tasks = files.map((f) => f.tasks()).flat();
|
||||
|
||||
return tasks;
|
||||
};
|
||||
|
||||
class Sass extends File {
|
||||
|
||||
_dir (dir) {
|
||||
dir = dir.split('/');
|
||||
if (dir[0] === 'scss') dir.shift();
|
||||
dir.unshift('css');
|
||||
return dir;
|
||||
}
|
||||
|
||||
|
||||
_out () {
|
||||
this.ext = '.css';
|
||||
super._out();
|
||||
}
|
||||
|
||||
async load (prod) {
|
||||
let contents = (await readFile(this.input).catch(() => null)).toString('utf8');
|
||||
|
||||
for (const [ match, fpath ] of contents.matchAll(/\|(.+?)\|/)) {
|
||||
const insert = await readFile(fpath);
|
||||
contents = contents.replace(match, insert);
|
||||
}
|
||||
|
||||
const sassOptions = {
|
||||
data: contents,
|
||||
file: resolve(this.input),
|
||||
includePaths: [
|
||||
resolve(this.cwd),
|
||||
resolve('node_modules'),
|
||||
],
|
||||
sourceMapEmbed: true,
|
||||
};
|
||||
|
||||
let { css } = await (new Promise((resolve, reject) => { // eslint-disable-line no-shadow
|
||||
sass.render(sassOptions, (err, result) => (
|
||||
err ? reject(err) : resolve(result)
|
||||
));
|
||||
}));
|
||||
|
||||
if (prod) {
|
||||
css = (await postcss([ autoprefixer ]).process(css, {
|
||||
from: this.input,
|
||||
to: this.out,
|
||||
map: { inline: true },
|
||||
})).css;
|
||||
|
||||
var parsed = crass.parse(css);
|
||||
parsed = parsed.optimize({ O1: true });
|
||||
// if (options.pretty) parsed = parsed.pretty();
|
||||
css = Buffer.from(parsed.toString());
|
||||
}
|
||||
|
||||
this.content = css;
|
||||
}
|
||||
|
||||
tasks () {
|
||||
return [ {
|
||||
input: this.input,
|
||||
output: this.out,
|
||||
content: this.content,
|
||||
action: actions.write,
|
||||
nocache: true,
|
||||
} ];
|
||||
}
|
||||
|
||||
}
|
||||
16
build/svg.js
Normal file
16
build/svg.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const glob = require('./lib/glob');
|
||||
const { ROOT } = require('./resolve');
|
||||
const actions = require('./actions');
|
||||
|
||||
module.exports = exports = async function svgIcons () {
|
||||
const files = await glob('svg/**/*.svg', { cwd: ROOT });
|
||||
|
||||
const tasks = files.map((f) => ({
|
||||
input: f,
|
||||
output: 'images/' + f,
|
||||
action: actions.copy,
|
||||
nocache: true,
|
||||
}));
|
||||
|
||||
return tasks;
|
||||
};
|
||||
13
build/twitter.js
Normal file
13
build/twitter.js
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const actions = require('./actions');
|
||||
const { uniqBy } = require('lodash');
|
||||
const { resolve } = require('./resolve');
|
||||
|
||||
module.exports = exports = async function twitter () {
|
||||
const media = await fs.readJson(resolve('twitter-media.json')).catch(() => ([]));
|
||||
const tasks = uniqBy(media, 'input')
|
||||
.map((m) => ({ ...m, action: actions.fetch, output: m.output }));
|
||||
|
||||
return tasks;
|
||||
};
|
||||
Reference in New Issue
Block a user