mirror of
https://github.com/GenderDysphoria/GenderDysphoria.fyi.git
synced 2025-11-25 12:32:42 +00:00
Initial site commit
This commit is contained in:
12
gulp/_template.js
Normal file
12
gulp/_template.js
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
const format = require('date-fns/format');
|
||||
|
||||
module.exports = exports = ({ id, date }) => `---
|
||||
id: "${id}"
|
||||
date: "${date.toISOString()}"
|
||||
title: ""
|
||||
description: "Outfit of the Day for ${format(date, 'MMM do, yyyy')}"
|
||||
tags:
|
||||
- OOTD
|
||||
---
|
||||
`;
|
||||
13
gulp/clean.js
Normal file
13
gulp/clean.js
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
const { src } = require('gulp');
|
||||
const clean = require('gulp-clean');
|
||||
|
||||
module.exports = exports = function cleanDistribution () {
|
||||
return src([ 'dist', 'rev-manifest.json', 'posts.json', 'posts-sans.json' ], { read: false, allowEmpty: true })
|
||||
.pipe(clean());
|
||||
};
|
||||
|
||||
exports.dev = function cleanDistributionForDev () {
|
||||
return src([ 'dist/**.{js|json|jsx}', 'rev-manifest.json', 'posts.json', 'posts-sans.json' ], { read: false, allowEmpty: true })
|
||||
.pipe(clean());
|
||||
};
|
||||
48
gulp/cloudfront.js
Normal file
48
gulp/cloudfront.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const log = require('fancy-log');
|
||||
const aws = require('aws-sdk');
|
||||
var credentials = require('../aws.json');
|
||||
var Promise = require('bluebird');
|
||||
|
||||
async function invalidate (wait) {
|
||||
var cloudfront = new aws.CloudFront();
|
||||
cloudfront.config.update({ credentials });
|
||||
|
||||
var poll = async function (id) {
|
||||
const res = await cloudfront.getInvalidation({
|
||||
DistributionId: credentials.distribution,
|
||||
Id: id,
|
||||
}).promise();
|
||||
|
||||
if (res.Invalidation.Status === 'Completed') {
|
||||
return;
|
||||
}
|
||||
|
||||
return Promise.delay(5000).then(() => poll(id));
|
||||
};
|
||||
|
||||
const { Invalidation } = await cloudfront.createInvalidation({
|
||||
DistributionId: credentials.distribution,
|
||||
InvalidationBatch: {
|
||||
CallerReference: Date.now().toString(),
|
||||
Paths: {
|
||||
Quantity: 1,
|
||||
Items: [ '/*' ],
|
||||
},
|
||||
},
|
||||
}).promise();
|
||||
|
||||
const id = Invalidation.Id;
|
||||
|
||||
log('Invalidation created, waiting for it to complete.', id);
|
||||
|
||||
if (wait) await poll(id);
|
||||
}
|
||||
|
||||
module.exports = exports = function invalidateCloudfrontAndWait () {
|
||||
return invalidate(true);
|
||||
};
|
||||
|
||||
exports.prod = function invalidateCloudfront () {
|
||||
return invalidate(false);
|
||||
};
|
||||
|
||||
579
gulp/contents.js
Normal file
579
gulp/contents.js
Normal file
@@ -0,0 +1,579 @@
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const { chunk, uniq, keyBy, difference, omit } = require('lodash');
|
||||
const log = require('fancy-log');
|
||||
const glob = require('./lib/glob');
|
||||
const getDimensions = require('./lib/dimensions');
|
||||
const memoize = require('memoizepromise');
|
||||
const { URL } = require('url');
|
||||
const { minify: htmlMinify } = require('html-minifier-terser');
|
||||
|
||||
const { src, dest } = require('gulp');
|
||||
const frontmatter = require('gulp-front-matter');
|
||||
const collect = require('gulp-collect');
|
||||
|
||||
const asyncthrough = require('./lib/through');
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist';
|
||||
|
||||
const { siteInfo } = require('../package.json');
|
||||
|
||||
const markdown = require('markdown-it');
|
||||
const striptags = require('string-strip-html');
|
||||
const tweetparse = require('./lib/tweetparse');
|
||||
|
||||
const slugs = require('slugify');
|
||||
const slugify = (s) => slugs(s, { remove: /[*+~.,()'"!?:@/\\]/g }).toLowerCase();
|
||||
|
||||
const handlebars = require('handlebars');
|
||||
const HandlebarsKit = require('hbs-kit');
|
||||
HandlebarsKit.load(handlebars);
|
||||
|
||||
const md = markdown({
|
||||
html: true,
|
||||
linkify: true,
|
||||
typographer: true,
|
||||
}).enable('image')
|
||||
.use(require('markdown-it-anchor'), {
|
||||
permalink: true,
|
||||
permalinkClass: 'header-link',
|
||||
permalinkSymbol: '<img src="/images/svg/paragraph.svg">',
|
||||
slugify,
|
||||
})
|
||||
.use(require('./lib/markdown-raw-html'))
|
||||
;
|
||||
|
||||
const mdPreview = markdown({
|
||||
html: false,
|
||||
linkify: false,
|
||||
typographer: true,
|
||||
})
|
||||
.use(require('./lib/markdown-token-filter'))
|
||||
;
|
||||
|
||||
let twitterClient;
|
||||
const Twitter = require('twitter-lite');
|
||||
try {
|
||||
twitterClient = new Twitter(require('../twitter.json'));
|
||||
} catch (e) {
|
||||
twitterClient = null;
|
||||
}
|
||||
|
||||
function twitter (tweetids) {
|
||||
if (!twitterClient) return [];
|
||||
return twitterClient.get('statuses/lookup', { id: tweetids.join(','), tweet_mode: 'extended' })
|
||||
.catch((e) => { log.error(e); return []; });
|
||||
}
|
||||
|
||||
|
||||
async function reloadLayouts () {
|
||||
const layouts = {
|
||||
layout: 'templates/layout.hbs.html',
|
||||
};
|
||||
|
||||
let pending = Object.entries(layouts)
|
||||
.map(async ([ name, file ]) =>
|
||||
[ name, (await fs.readFile(path.resolve(ROOT, file))).toString('utf8') ],
|
||||
);
|
||||
|
||||
pending = await Promise.all(pending);
|
||||
|
||||
pending.forEach(([ name, file ]) => handlebars.registerPartial(name, handlebars.compile(file)));
|
||||
|
||||
const injections = {};
|
||||
handlebars.registerHelper('inject', function (tpath, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const context = handlebars.createFrame(args[0] || this);
|
||||
Object.assign(context, hash || {});
|
||||
|
||||
if (tpath[0] === '/') tpath = path.join(this.local.root, tpath);
|
||||
else if (tpath[0] === '~') tpath = path.join(this.local.root, 'templates', tpath.slice(2));
|
||||
else tpath = path.resolve(this.local.cwd, tpath);
|
||||
tpath += '.hbs';
|
||||
|
||||
if (!injections[tpath]) {
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Template does not exist for injection ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
injections[tpath] = handlebars.compile(fs.readFileSync(tpath).toString('utf8'));
|
||||
} catch (e) {
|
||||
log.error('Could not load injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return new handlebars.SafeString(injections[tpath](context));
|
||||
} catch (e) {
|
||||
log.error('Could not execute injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
handlebars.registerHelper('icon', function (name, ...args) {
|
||||
const { hash } = args.pop();
|
||||
const tpath = path.join(this.local.root, 'svg', name + '.svg');
|
||||
|
||||
if (!injections[tpath]) {
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Template does not exist for injection ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
const svg = fs.readFileSync(tpath).toString('utf8');
|
||||
injections[tpath] = handlebars.compile(`<span class="svg-icon" {{#if size}}style="width:{{size}}px;height:{{size}}px"{{/if}}>${svg}</span>`);
|
||||
} catch (e) {
|
||||
log.error('Could not load injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return new handlebars.SafeString(injections[tpath]({ size: hash && hash.size }));
|
||||
} catch (e) {
|
||||
log.error('Could not execute injection template ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
handlebars.registerHelper('markdown', function (...args) {
|
||||
const { fn } = args.pop();
|
||||
let original;
|
||||
|
||||
if (fn) {
|
||||
original = fn(this);
|
||||
|
||||
const match = original.match(/^[^\S\n]*(?=\S)/gm);
|
||||
const indent = match && Math.min(...match.map((el) => el.length));
|
||||
|
||||
if (indent) {
|
||||
const regexp = new RegExp(`^.{${indent}}`, 'gm');
|
||||
original = original.replace(regexp, '');
|
||||
}
|
||||
|
||||
} else {
|
||||
let tpath = args.shift();
|
||||
if (!tpath) throw new Error('No content was provided for the Markdown helper');
|
||||
if (tpath[0] === '/') tpath = path.join(this.local.root, tpath);
|
||||
else tpath = path.resolve(this.local.cwd, tpath);
|
||||
tpath += '.md';
|
||||
|
||||
if (!injections[tpath]) {
|
||||
if (!fs.existsSync(tpath)) {
|
||||
log.error('Markdown does not exist for injection ' + path.relative(ROOT, tpath));
|
||||
return '';
|
||||
}
|
||||
|
||||
try {
|
||||
original = fs.readFileSync(tpath).toString('utf8');
|
||||
injections[tpath] = original;
|
||||
} catch (e) {
|
||||
log.error('Could not load markdown file ' + path.relative(ROOT, tpath), e);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
original = md.render(original);
|
||||
|
||||
return new handlebars.SafeString(original);
|
||||
});
|
||||
}
|
||||
|
||||
exports.loadLayout = async function loadLayout () {
|
||||
await reloadLayouts();
|
||||
handlebars.registerHelper('rev', (url) => {
|
||||
if (!url) return '';
|
||||
if (url[0] === '/') url = url.substr(1);
|
||||
return '/' + url;
|
||||
});
|
||||
handlebars.registerHelper('prod', function (options) {
|
||||
if (!options.inverse) return false;
|
||||
return options.inverse(this);
|
||||
});
|
||||
};
|
||||
|
||||
exports.loadLayout.prod = async function loadLayoutForProd () {
|
||||
const manifest = await fs.readJson(path.join(ROOT, 'rev-manifest.json')).catch(() => {}).then((r) => r || {});
|
||||
|
||||
await reloadLayouts();
|
||||
|
||||
handlebars.registerHelper('rev', (url) => {
|
||||
if (!url) return '';
|
||||
if (url[0] === '/') url = url.substr(1);
|
||||
if (manifest[url]) return '/' + manifest[url];
|
||||
return '/' + url;
|
||||
});
|
||||
handlebars.registerHelper('prod', function (options) {
|
||||
if (!options.fn) return true;
|
||||
return options.fn(this);
|
||||
});
|
||||
};
|
||||
|
||||
exports.pages = function buildPages ({ minify }) {
|
||||
var postTemplate = handlebars.compile(String(fs.readFileSync(path.join(ROOT, '/templates/post.hbs.html'))));
|
||||
const minifyConfig = {
|
||||
conservativeCollapse: true,
|
||||
collapseWhitespace: true,
|
||||
minifyCSS: true,
|
||||
removeComments: true,
|
||||
removeRedundantAttributes: true,
|
||||
};
|
||||
|
||||
return src([ 'pages/**/*.{md,html,xml}', '!pages/**/_*.{md,html}' ])
|
||||
.pipe(frontmatter({
|
||||
property: 'meta',
|
||||
}))
|
||||
.pipe(parseMeta())
|
||||
.pipe(parseTweets())
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
const cwd = path.dirname(file.path);
|
||||
let original = file.contents.toString('utf8').trim();
|
||||
|
||||
var data = {
|
||||
...file.meta,
|
||||
meta: file.meta,
|
||||
page: {
|
||||
domain: siteInfo.domain,
|
||||
title: file.meta.title
|
||||
? (file.meta.title + (file.meta.subtitle ? ', ' + file.meta.subtitle : '') + ' :: ' + siteInfo.title)
|
||||
: siteInfo.title,
|
||||
},
|
||||
local: {
|
||||
cwd,
|
||||
root: ROOT,
|
||||
basename: file.basename,
|
||||
},
|
||||
};
|
||||
|
||||
if ([ '.html', '.md' ].includes(file.extname)) {
|
||||
const datajs = file.clone();
|
||||
datajs.contents = Buffer.from(JSON.stringify(omit(file.meta, [ 'destination' ]), null, 2));
|
||||
datajs.basename = path.basename(file.path, file.extname) + '.json';
|
||||
stream.push(datajs);
|
||||
}
|
||||
|
||||
if ([ '.html', '.xml' ].includes(file.extname)) {
|
||||
// is a handlebars template
|
||||
try {
|
||||
const template = handlebars.compile(original);
|
||||
let html = template(data);
|
||||
if (minify) {
|
||||
html = htmlMinify(html, minifyConfig);
|
||||
}
|
||||
file.contents = Buffer.from(html);
|
||||
stream.push(file);
|
||||
} catch (err) {
|
||||
log.error('Encountered a crash while compiling ' + file.path, err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
original = original.replace(/\{!\{([\s\S]*?)\}!\}/mg, (match, contents) => {
|
||||
try {
|
||||
const result = handlebars.compile(contents)(data);
|
||||
return '|||' + result + '|||';
|
||||
} catch (e) {
|
||||
log.error(e);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
if (file.extname === '.md') {
|
||||
|
||||
let contents, preview;
|
||||
try {
|
||||
contents = md.render(original.replace(/<!--[[\]]-->/g, '')).trim();
|
||||
data.contents = contents;
|
||||
|
||||
preview = striptags(original
|
||||
.replace(/<!--\[[\s\S]*?\]-->/g, '')
|
||||
.replace(/|||[\s\S]*?|||/gi, ''),
|
||||
).trim();
|
||||
if (preview.length > 1000) preview = preview.slice(0, 1000) + '…';
|
||||
preview = preview ? mdPreview.render(preview) : '';
|
||||
|
||||
data.preview = preview;
|
||||
} catch (e) {
|
||||
log.error(`Error while rendering ${file.path}`, e);
|
||||
contents = preview = '';
|
||||
}
|
||||
|
||||
if (preview) {
|
||||
file.flags.add('has-preview');
|
||||
if (preview.length < 400) file.flags.add('short-preview');
|
||||
} else {
|
||||
file.flags.add('no-preview');
|
||||
}
|
||||
|
||||
const classes = Array.from(file.flags);
|
||||
const flags = classes.reduce((res, item) => {
|
||||
var camelCased = item.replace(/-([a-z])/g, (g) => g[1].toUpperCase());
|
||||
res[camelCased] = true;
|
||||
return res;
|
||||
}, {});
|
||||
|
||||
data.classes = data.meta.classes = classes;
|
||||
data.flags = data.meta.flags = flags;
|
||||
|
||||
file.path = file.meta.destination;
|
||||
|
||||
// is a markdown file
|
||||
try {
|
||||
let html = postTemplate(data);
|
||||
if (minify) {
|
||||
html = htmlMinify(html, minifyConfig);
|
||||
}
|
||||
file.contents = Buffer.from(html);
|
||||
stream.push(file);
|
||||
} catch (err) {
|
||||
log.error(`Error while rendering html for ${file.path}`, err);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
}))
|
||||
.pipe(dest(DEST));
|
||||
};
|
||||
|
||||
exports.pages.prod = function buildPagesProd () { return exports.pages({ minify: true }); };
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
|
||||
function parseMeta () {
|
||||
const getFileData = memoize(async (cwd, siteCwd) => {
|
||||
const imageFiles = (await glob('{*,_images/*}.{jpeg,jpg,png,gif,mp4}', { cwd }));
|
||||
|
||||
const images = (await Promise.all(imageFiles.map(async (imgpath) => {
|
||||
|
||||
const ext = path.extname(imgpath);
|
||||
let basename = path.basename(imgpath, ext);
|
||||
|
||||
if (basename === 'titlecard') return;
|
||||
|
||||
if (ext === '.mp4') {
|
||||
return {
|
||||
name: basename,
|
||||
type: 'movie',
|
||||
full: path.join(siteCwd, `${basename}${ext}`),
|
||||
};
|
||||
}
|
||||
|
||||
const dimensions = await getDimensions(path.resolve(cwd, imgpath));
|
||||
const { width, height } = dimensions;
|
||||
dimensions.ratioH = Math.round((height / width) * 100);
|
||||
dimensions.ratioW = Math.round((width / height) * 100);
|
||||
if (dimensions.ratioH > 100) {
|
||||
dimensions.orientation = 'tall';
|
||||
} else if (dimensions.ratioH === 100) {
|
||||
dimensions.orientation = 'square';
|
||||
} else {
|
||||
dimensions.orientation = 'wide';
|
||||
}
|
||||
|
||||
if (basename[0] === '_') {
|
||||
basename = basename.slice(1);
|
||||
}
|
||||
|
||||
const filetype = {
|
||||
'.jpeg': 'jpeg',
|
||||
'.jpg': 'jpeg',
|
||||
'.png': 'png',
|
||||
'.gif': 'gif',
|
||||
}[ext];
|
||||
|
||||
const sizes = [
|
||||
{
|
||||
url: path.join(siteCwd, `${basename}.${filetype}`),
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
},
|
||||
];
|
||||
|
||||
for (const w of [ 2048, 1024, 768, 576, 300, 100 ]) {
|
||||
if (w > dimensions.width) continue;
|
||||
sizes.push({
|
||||
url: path.join(siteCwd, `${basename}.${w}w.${filetype}`),
|
||||
width: w,
|
||||
height: Math.ceil((w / dimensions.width) * dimensions.height),
|
||||
});
|
||||
}
|
||||
|
||||
sizes.reverse();
|
||||
|
||||
return {
|
||||
name: basename,
|
||||
type: 'image',
|
||||
sizes,
|
||||
};
|
||||
}))).filter(Boolean);
|
||||
|
||||
const titlecard = (await glob('titlecard.{jpeg,jpg,png,gif}', { cwd }))[0];
|
||||
|
||||
return {
|
||||
images: keyBy(images, 'name'),
|
||||
titlecard: titlecard ? path.join(siteCwd, titlecard) : false,
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
return asyncthrough(async (stream, file) => {
|
||||
if (!file || (file.meta && file.meta.ignore)) return;
|
||||
|
||||
if (!file.meta) file.meta = {};
|
||||
|
||||
// if metadata has a date value, us it.
|
||||
// otherwise use creation date
|
||||
var date = new Date(file.meta.date);
|
||||
if (!date) date = file.stat.ctime;
|
||||
file.meta.data = date;
|
||||
|
||||
var cwd = path.dirname(file.path);
|
||||
var siteCwd = file.meta.cwd = '/' + path.relative(path.join(ROOT, 'pages'), cwd);
|
||||
var base = file.meta.base = path.basename(file.path, file.extname);
|
||||
|
||||
var flags = file.flags = new Set(file.meta.classes || []);
|
||||
var isIndexPage = file.meta.isIndex = (base === 'index');
|
||||
var isRootPage = file.meta.isRoot = (file.meta.cwd === '/');
|
||||
|
||||
if (isRootPage && isIndexPage) {
|
||||
file.meta.slug = '';
|
||||
file.meta.destination = path.join(path.dirname(file.path), 'index.html');
|
||||
} else if (isRootPage || !isIndexPage) {
|
||||
file.meta.slug = base;
|
||||
file.meta.destination = path.join(path.dirname(file.path), base, 'index.html');
|
||||
} else if (!isRootPage && isIndexPage) {
|
||||
file.meta.slug = '';
|
||||
file.meta.destination = path.join(path.dirname(file.path), 'index.html');
|
||||
} else {
|
||||
file.meta.slug = path.basename(cwd);
|
||||
file.meta.destination = path.join(path.dirname(file.path), 'index.html');
|
||||
}
|
||||
|
||||
const url = new URL(siteInfo.rss.site_url);
|
||||
file.meta.url = url.pathname = path.join(siteCwd, file.meta.slug);
|
||||
file.meta.fullurl = url.toString();
|
||||
// file.meta.originalpath = path.relative(file.cwd, file.path);
|
||||
|
||||
const { images, titlecard } = await getFileData(cwd, siteCwd);
|
||||
|
||||
file.meta.images = images;
|
||||
file.meta.titlecard = titlecard;
|
||||
|
||||
flags.add(titlecard ? 'has-titlecard' : 'no-titlecard');
|
||||
|
||||
if (file.meta['no-title']) {
|
||||
flags.add('hide-title');
|
||||
} else if (file.meta.title || file.meta.description) {
|
||||
flags.add('show-title');
|
||||
} else {
|
||||
flags.add('hide-title');
|
||||
}
|
||||
|
||||
flags.add(file.meta.title ? 'has-title' : 'no-title');
|
||||
flags.add(file.meta.subtitle ? 'has-subtitle' : 'no-subtitle');
|
||||
flags.add(file.meta.description ? 'has-descrip' : 'no-descrip');
|
||||
|
||||
stream.push(file);
|
||||
});
|
||||
}
|
||||
|
||||
function parseTweets () {
|
||||
const tweeturl = /https?:\/\/twitter\.com\/(?:#!\/)?(?:\w+)\/status(?:es)?\/(\d+)/i;
|
||||
const tweetidcheck = /^\d+$/;
|
||||
function parseTweetId (tweetid) {
|
||||
// we can't trust an id that isn't a string
|
||||
if (typeof tweetid !== 'string') return false;
|
||||
|
||||
const match = tweetid.match(tweeturl);
|
||||
if (match) return match[1];
|
||||
if (tweetid.match(tweetidcheck)) return tweetid;
|
||||
return false;
|
||||
}
|
||||
|
||||
return collect.list(async (files) => {
|
||||
const twitterBackup = (await fs.readJson(path.join(ROOT, 'twitter-backup.json')).catch(() => {})) || {};
|
||||
const twitterCache = (await fs.readJson(path.join(ROOT, 'twitter-cache.json')).catch(() => {})) || {};
|
||||
const needed = [];
|
||||
|
||||
// first loop through all posts and gather + validate all tweet ids
|
||||
for (const file of files) {
|
||||
if (!file.meta.tweets && !file.meta.tweet) continue;
|
||||
|
||||
const tweets = [];
|
||||
|
||||
if (file.meta.tweet) {
|
||||
file.meta.tweet = [ file.meta.tweet ].flat(1).map(parseTweetId);
|
||||
tweets.push(...file.meta.tweet);
|
||||
}
|
||||
|
||||
if (file.meta.tweets) {
|
||||
file.meta.tweets = file.meta.tweets.map(parseTweetId);
|
||||
tweets.push(...file.meta.tweets);
|
||||
}
|
||||
|
||||
for (const id of tweets) {
|
||||
if (!twitterCache[id]) {
|
||||
needed.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
file.meta.tweets = tweets;
|
||||
}
|
||||
|
||||
// if we have tweets we need to add to the cache, do so
|
||||
if (needed.length) {
|
||||
log('Fetching tweets: ' + needed.join(', '));
|
||||
const arriving = await Promise.all(chunk(uniq(needed), 99).map(twitter));
|
||||
|
||||
const loaded = [];
|
||||
for (const tweet of arriving.flat(1)) {
|
||||
if (!twitterBackup[tweet.id_str]) twitterBackup[tweet.id_str] = tweet;
|
||||
twitterCache[tweet.id_str] = tweetparse(tweet);
|
||||
loaded.push(tweet.id_str);
|
||||
}
|
||||
|
||||
const absent = difference(needed, loaded);
|
||||
for (const id of absent) {
|
||||
if (twitterBackup[id]) {
|
||||
log('Pulled tweet from backup ' + id);
|
||||
twitterCache[id] = tweetparse(twitterBackup[id]);
|
||||
continue;
|
||||
}
|
||||
log.error('Could not find tweet ' + id);
|
||||
}
|
||||
}
|
||||
|
||||
const media = [];
|
||||
|
||||
// now loop through posts and substitute the tweet data for the ids
|
||||
for (const file of files) {
|
||||
if (!file.meta.tweets) continue;
|
||||
|
||||
file.meta.tweets = file.meta.tweets.reduce((dict, tweetid) => {
|
||||
const tweet = twitterCache[tweetid];
|
||||
if (!tweet) log.error(`Tweet ${tweetid} is missing from the cache.`);
|
||||
dict[tweetid] = tweet;
|
||||
media.push( ...tweet.media );
|
||||
return dict;
|
||||
}, {});
|
||||
|
||||
}
|
||||
|
||||
await fs.writeFile(path.join(ROOT, 'twitter-media.json'), JSON.stringify(media, null, 2));
|
||||
await fs.writeFile(path.join(ROOT, 'twitter-cache.json'), JSON.stringify(twitterCache, null, 2));
|
||||
await fs.writeFile(path.join(ROOT, 'twitter-backup.json'), JSON.stringify(twitterBackup, null, 2));
|
||||
|
||||
return files;
|
||||
});
|
||||
}
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
48
gulp/files.js
Normal file
48
gulp/files.js
Normal file
@@ -0,0 +1,48 @@
|
||||
|
||||
const path = require('path');
|
||||
const { src, dest } = require('gulp');
|
||||
const rev = require('gulp-rev');
|
||||
const asyncthrough = require('./lib/through');
|
||||
const changed = require('gulp-changed');
|
||||
const merge = require('merge-stream');
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist';
|
||||
|
||||
module.exports = exports = function fileCopy () {
|
||||
const pageFiles = src([ 'pages/**/*', '!pages/**/*.{md,hbs,xml,html,jpeg,jpg,png,gif,mp4}' ])
|
||||
.pipe(changed(DEST))
|
||||
.pipe(dest(DEST))
|
||||
;
|
||||
|
||||
const svgs = src('svg/**/*.svg')
|
||||
// .pipe(changed(DEST))
|
||||
.pipe(dest(path.join(DEST, 'images/svg')))
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
file.base = path.resolve(file.base, '../..');
|
||||
stream.push(file);
|
||||
}))
|
||||
;
|
||||
|
||||
return merge(pageFiles, svgs);
|
||||
};
|
||||
|
||||
exports.prod = function fileCopyForProd () {
|
||||
return exports()
|
||||
.pipe(rev())
|
||||
.pipe(dest(DEST))
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
// Change rev's original base path back to the public root so that it uses the full
|
||||
// path as the original file name key in the manifest
|
||||
var base = path.resolve(ROOT, DEST);
|
||||
file.revOrigBase = base;
|
||||
file.base = base;
|
||||
|
||||
stream.push(file);
|
||||
}))
|
||||
.pipe(rev.manifest({
|
||||
merge: true, // Merge with the existing manifest if one exists
|
||||
}))
|
||||
.pipe(dest('.'))
|
||||
;
|
||||
};
|
||||
155
gulp/imgflow/actions.js
Normal file
155
gulp/imgflow/actions.js
Normal file
@@ -0,0 +1,155 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const gm = require('gm');
|
||||
const Promise = require('bluebird');
|
||||
const fetch = require('make-fetch-happen');
|
||||
const ico = require('png-to-ico');
|
||||
|
||||
const CWD = path.resolve(__dirname, '../..');
|
||||
|
||||
const actions = {
|
||||
async copy ({ input, output }) {
|
||||
await fs.copy(input, output);
|
||||
return fs.readFile(input);
|
||||
},
|
||||
|
||||
async transcode ({ input, output, cache }) {
|
||||
const result = await actions.image({
|
||||
input,
|
||||
output,
|
||||
format: 'jpeg',
|
||||
});
|
||||
await fs.writeFile(cache, result);
|
||||
return result;
|
||||
},
|
||||
|
||||
async fetch ({ input, output, cache }) {
|
||||
const res = await fetch(input);
|
||||
const body = await res.buffer();
|
||||
await fs.writeFile(output, body);
|
||||
await fs.writeFile(cache, body);
|
||||
return body;
|
||||
},
|
||||
|
||||
async image (options) {
|
||||
const input = path.resolve(CWD, options.input);
|
||||
const output = path.resolve(CWD, options.output);
|
||||
const contents = await fs.readFile(input);
|
||||
let gmfile = gm(contents, input);
|
||||
|
||||
const size = await Promise.fromCallback((cb) => gmfile.size(cb));
|
||||
|
||||
if (options.height || options.width) {
|
||||
|
||||
// if upscale is not requested, restrict size
|
||||
if (!options.upscale) {
|
||||
if (!isNaN(options.width)) {
|
||||
options.width = Math.min(options.width, size.width);
|
||||
}
|
||||
if (!isNaN(options.height)) {
|
||||
options.height = Math.min(options.height, size.height);
|
||||
}
|
||||
}
|
||||
|
||||
// if one dimension is not set - we fill it proportionally
|
||||
if (!options.height) {
|
||||
if (options.crop) {
|
||||
options.height = size.height;
|
||||
} else {
|
||||
options.height = Math.ceil((options.width / size.width) * size.height);
|
||||
}
|
||||
}
|
||||
if (!options.width) {
|
||||
if (options.crop) {
|
||||
options.width = size.width;
|
||||
} else {
|
||||
options.width = Math.ceil((options.height / size.height) * size.width);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.fill === 'crop') {
|
||||
if (size.height < options.height || size.width < options.width) {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '^')
|
||||
.borderColor(options.bgColor || '#FFFFFF')
|
||||
.border(options.width, options.height)
|
||||
.gravity(options.gravity)
|
||||
.crop(options.width, options.height);
|
||||
} else {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '^')
|
||||
.gravity(options.gravity)
|
||||
.crop(options.width, options.height);
|
||||
}
|
||||
} else if (options.fill === 'cover') {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '^');
|
||||
} else if (options.fill === 'box') {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height)
|
||||
.borderColor(options.bgColor || '#FFFFFF')
|
||||
.border(options.width, options.height)
|
||||
.gravity(options.gravity)
|
||||
.crop(options.width, options.height);
|
||||
} else if (options.fill === 'contain') {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height);
|
||||
} else {
|
||||
gmfile = gmfile
|
||||
.geometry(options.width, options.height, '!');
|
||||
}
|
||||
|
||||
} else if (options.percentage) {
|
||||
gmfile = gmfile
|
||||
.geometry(options.percentage, null, '%');
|
||||
}
|
||||
|
||||
if (options.format) {
|
||||
gmfile = gmfile
|
||||
.setFormat(options.format === 'ico' ? 'png' : options.format);
|
||||
}
|
||||
|
||||
if (options.quality) {
|
||||
gmfile = gmfile.quality(Math.floor(options.quality));
|
||||
} else {
|
||||
gmfile = gmfile.quality(Math.floor(95));
|
||||
}
|
||||
|
||||
|
||||
if (options.samplingFactor) {
|
||||
gmfile = gmfile
|
||||
.samplingFactor(options.samplingFactor[0], options.samplingFactor[1]);
|
||||
}
|
||||
|
||||
if (options.sharpen) {
|
||||
options.sharpen = (typeof options.sharpen === 'string') ? options.sharpen : '1.5x1+0.7+0.02';
|
||||
gmfile = gmfile.unsharp(options.sharpen);
|
||||
}
|
||||
|
||||
if (options.flatten) {
|
||||
gmfile = gmfile.flatten();
|
||||
}
|
||||
|
||||
if (options.interlace) {
|
||||
gmfile = gmfile.interlace('Line');
|
||||
}
|
||||
|
||||
if (options.background) {
|
||||
gmfile = gmfile.background(options.background);
|
||||
}
|
||||
|
||||
if (options.noProfile) {
|
||||
gmfile = gmfile.noProfile();
|
||||
}
|
||||
|
||||
await fs.ensureDir(path.dirname(output));
|
||||
let result = await Promise.fromCallback((cb) => gmfile.toBuffer(cb));
|
||||
if (options.format === 'ico') result = await ico(result);
|
||||
await fs.writeFile(output, result);
|
||||
if (options.cache) await fs.writeFile(options.cache, result);
|
||||
|
||||
return result;
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = exports = actions;
|
||||
326
gulp/imgflow/index.js
Normal file
326
gulp/imgflow/index.js
Normal file
@@ -0,0 +1,326 @@
|
||||
const path = require('path');
|
||||
const glob = require('../lib/glob');
|
||||
const { groupBy, sortBy, omitBy, uniqBy } = require('lodash');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const actions = require('./actions');
|
||||
const getDimensions = require('../lib/dimensions');
|
||||
|
||||
const CWD = path.resolve(__dirname, '../..');
|
||||
const PAGES = path.join(CWD, 'pages');
|
||||
const SOURCE = path.resolve(CWD, 'pages/**/*.{jpeg,jpg,png,gif,mp4}');
|
||||
const MANIFEST_PATH = path.resolve(CWD, 'if-manifest.json');
|
||||
const REV_MANIFEST_PATH = path.resolve(CWD, 'rev-manifest.json');
|
||||
const MEDIA_INDEX = path.resolve(CWD, 'twitter-media.json');
|
||||
const CACHE = 'if-cache';
|
||||
const revHash = require('rev-hash');
|
||||
const revPath = require('rev-path');
|
||||
|
||||
|
||||
const LOG = {
|
||||
new: true,
|
||||
update: true,
|
||||
skip: true,
|
||||
rebuild: true,
|
||||
cached: false,
|
||||
copy: false,
|
||||
};
|
||||
|
||||
module.exports = exports = async function postImages ({ rev = false }) {
|
||||
|
||||
var manifest;
|
||||
try {
|
||||
manifest = JSON.parse(await fs.readFile(MANIFEST_PATH));
|
||||
} catch (e) {
|
||||
manifest = {};
|
||||
}
|
||||
|
||||
await fs.ensureDir(path.resolve(CWD, CACHE));
|
||||
|
||||
const allfiles = (await glob(SOURCE));
|
||||
const tasks = [];
|
||||
|
||||
for (const filepath of allfiles) {
|
||||
const input = path.relative(CWD, filepath);
|
||||
const output = path.relative(PAGES, filepath).replace('/_images', '');
|
||||
const file = path.parse(output);
|
||||
// console.log(input, output);
|
||||
|
||||
// is a titlecard image or a video
|
||||
if (file.name === 'titlecard' || file.ext === '.mp4') {
|
||||
tasks.push({
|
||||
input,
|
||||
output,
|
||||
action: actions.copy,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// is a file we've pre-sized and do not want processed
|
||||
if (file.name[0] === '_') {
|
||||
tasks.push({
|
||||
input,
|
||||
output: path.format({ ...file, base: file.base.substring(1) }),
|
||||
action: actions.copy,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const format = {
|
||||
'.jpeg': 'jpeg',
|
||||
'.jpg': 'jpeg',
|
||||
'.png': 'png',
|
||||
'.gif': 'gif',
|
||||
}[file.ext];
|
||||
|
||||
if (!format) throw new Error('Got an unexpected format: ' + file.ext);
|
||||
|
||||
const dimensions = await getDimensions(filepath);
|
||||
|
||||
tasks.push({
|
||||
input: filepath,
|
||||
output: `${file.dir}/${file.name}.${format}`,
|
||||
format,
|
||||
action: actions.image,
|
||||
});
|
||||
|
||||
for (const w of [ 2048, 1024, 768, 576, 300, 100 ]) {
|
||||
if (w > dimensions.width) continue;
|
||||
tasks.push({
|
||||
input: filepath,
|
||||
output: `${file.dir}/${file.name}.${w}w.${format}`,
|
||||
format,
|
||||
width: w,
|
||||
action: actions.image,
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const filtered = await filter(manifest, tasks);
|
||||
await execute(manifest, filtered, rev);
|
||||
};
|
||||
|
||||
exports.prod = function imagesProd () { return exports({ rev: true }); };
|
||||
|
||||
exports.twitter = async function twitterImages ({ rev = false }) {
|
||||
await fs.ensureDir(path.resolve(CWD, CACHE));
|
||||
|
||||
var manifest;
|
||||
try {
|
||||
manifest = JSON.parse(await fs.readFile(MANIFEST_PATH));
|
||||
} catch (e) {
|
||||
manifest = {};
|
||||
}
|
||||
|
||||
var media;
|
||||
try {
|
||||
media = JSON.parse(await fs.readFile(MEDIA_INDEX));
|
||||
} catch (e) {
|
||||
media = [];
|
||||
}
|
||||
|
||||
media = uniqBy(media, 'output');
|
||||
|
||||
const tasks = media.map((m) => ({ ...m, action: actions.fetch }));
|
||||
const filtered = await filter(manifest, tasks);
|
||||
await execute(manifest, filtered, rev);
|
||||
};
|
||||
|
||||
exports.twitter.prod = function imagesProd () { return exports.twitter({ rev: true }); };
|
||||
|
||||
|
||||
exports.favicon = async function favicon ({ rev = false }) {
|
||||
await fs.ensureDir(path.resolve(CWD, CACHE));
|
||||
|
||||
const input = path.resolve(CWD, 'favicon.png');
|
||||
|
||||
var manifest;
|
||||
try {
|
||||
manifest = JSON.parse(await fs.readFile(MANIFEST_PATH));
|
||||
} catch (e) {
|
||||
manifest = {};
|
||||
}
|
||||
|
||||
const tasks = [ 32, 57, 64, 76, 96, 114, 120, 128, 144, 152, 180, 192, 196, 228 ].map((width) => ({
|
||||
input,
|
||||
output: `favicon${width}.png`,
|
||||
format: 'png',
|
||||
width,
|
||||
action: actions.image,
|
||||
}));
|
||||
|
||||
tasks.push({
|
||||
input,
|
||||
output: 'favicon.ico',
|
||||
format: 'ico',
|
||||
action: actions.image,
|
||||
});
|
||||
|
||||
const filtered = await filter(manifest, tasks);
|
||||
await execute(manifest, filtered, rev);
|
||||
};
|
||||
|
||||
exports.favicon.prod = function imagesProd () { return exports.favicon({ rev: true }); };
|
||||
|
||||
|
||||
async function filter (manifest, tasks) {
|
||||
const statMap = new Map();
|
||||
async function stat (f) {
|
||||
if (statMap.has(f)) return statMap.get(f);
|
||||
|
||||
const p = fs.stat(path.resolve(CWD, f))
|
||||
.catch(() => null)
|
||||
.then((stats) => (stats && Math.floor(stats.mtimeMs / 1000)));
|
||||
|
||||
statMap.set(f, p);
|
||||
return p;
|
||||
}
|
||||
|
||||
return Promise.filter(tasks, async (task) => {
|
||||
|
||||
const local = task.input.slice(0, 4) !== 'http';
|
||||
const hash = task.action.name + '.' + revHash(task.input) + '|' + revHash(task.output);
|
||||
const cachePath = path.join(CACHE, `${hash}${path.extname(task.output)}`);
|
||||
const [ inTime, outTime, cachedTime ] = await Promise.all([
|
||||
local && stat(path.resolve(CWD, task.input)),
|
||||
stat(path.resolve(CWD, 'dist', task.output)),
|
||||
stat(path.resolve(CWD, cachePath)),
|
||||
]);
|
||||
|
||||
task.manifest = manifest[hash];
|
||||
task.hash = hash;
|
||||
task.cache = cachePath;
|
||||
|
||||
// how did this happen?
|
||||
if (local && !inTime) {
|
||||
log.error('Input file could not be found?', task.input);
|
||||
return false;
|
||||
}
|
||||
|
||||
// never seen this file before
|
||||
if (!task.manifest) {
|
||||
task.apply = {
|
||||
hash,
|
||||
input: task.input,
|
||||
output: task.output,
|
||||
mtime: inTime,
|
||||
};
|
||||
task.log = [ 'new', task.input, task.output, hash ];
|
||||
return true;
|
||||
}
|
||||
|
||||
// file modification time does not match last read, rebuild
|
||||
if (local && inTime > task.manifest.mtime) {
|
||||
task.log = [ 'update', task.input, task.output ];
|
||||
task.apply = {
|
||||
mtime: inTime,
|
||||
};
|
||||
return true;
|
||||
}
|
||||
|
||||
task.apply = {
|
||||
mtime: local ? inTime : Math.floor(Date.now() / 1000),
|
||||
};
|
||||
|
||||
// target file exists, nothing to do
|
||||
if (outTime) {
|
||||
return false;
|
||||
// task.log = [ 'skip', task.input, task.output, inTime, task.manifest.mtime ];
|
||||
// task.action = null;
|
||||
// return true;
|
||||
}
|
||||
|
||||
// file exists in the cache, change the task to a copy action
|
||||
if (cachedTime) {
|
||||
task.log = [ 'cached', task.input, task.output ];
|
||||
task.action = actions.copy;
|
||||
task.input = cachePath;
|
||||
return true;
|
||||
}
|
||||
|
||||
// task is a file copy
|
||||
if (task.action === actions.copy) {
|
||||
task.log = [ 'copy', task.input, task.output ];
|
||||
return true;
|
||||
}
|
||||
|
||||
// file does not exist in cache, build it
|
||||
task.log = [ 'rebuild', task.input, task.output ];
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
async function execute (manifest, tasks, rev) {
|
||||
const lastSeen = Math.floor(Date.now() / 1000);
|
||||
const revManifest = {};
|
||||
|
||||
let writeCounter = 0;
|
||||
let lastWriteTime = 0;
|
||||
async function writeManifest (force) {
|
||||
if (!force && rev) return; // disable interim writes during prod builds.
|
||||
if (!force && ++writeCounter % 100) return;
|
||||
const now = Date.now();
|
||||
if (!force && now - lastWriteTime < 10000) return;
|
||||
lastWriteTime = now;
|
||||
await fs.writeFile(MANIFEST_PATH, JSON.stringify(manifest, null, 2));
|
||||
}
|
||||
|
||||
await Promise.map(sortBy(tasks, [ 'input', 'output' ]), async (task) => {
|
||||
const output = path.resolve(CWD, 'dist', task.output);
|
||||
|
||||
const result = task.action && await task.action({ ...task, output });
|
||||
const apply = task.apply || {};
|
||||
if (task.log && LOG[task.log[0]]) log.info(...task.log);
|
||||
apply.lastSeen = lastSeen;
|
||||
apply.lastSeenHuman = new Date();
|
||||
|
||||
if (!result) log('Nothing happened?', task);
|
||||
|
||||
const rhash = result && revHash(result);
|
||||
const hashedPath = revPath(task.output, rhash);
|
||||
apply.revHash = rhash;
|
||||
apply.revPath = hashedPath;
|
||||
|
||||
if (rev && rhash) {
|
||||
const rOutPath = task.output;
|
||||
const rNewPath = hashedPath;
|
||||
|
||||
revManifest[rOutPath] = rNewPath;
|
||||
|
||||
await fs.copy(output, path.resolve(CWD, 'dist', hashedPath));
|
||||
}
|
||||
|
||||
manifest[task.hash] = { ...manifest[task.hash], ...apply };
|
||||
await writeManifest();
|
||||
|
||||
}, { concurrency: rev ? 20 : 10 });
|
||||
|
||||
// filter unseen files from history
|
||||
// manifest = omitBy(manifest, (m) => m.lastSeen !== lastSeen);
|
||||
|
||||
await writeManifest(true);
|
||||
|
||||
if (rev) {
|
||||
let originalManifest = {};
|
||||
try {
|
||||
if (await fs.exists(REV_MANIFEST_PATH)) {
|
||||
originalManifest = JSON.parse(await fs.readFile(REV_MANIFEST_PATH));
|
||||
}
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
Object.assign(originalManifest, revManifest);
|
||||
|
||||
await fs.writeFile(REV_MANIFEST_PATH, JSON.stringify(originalManifest, null, 2));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
exports().catch(console.error).then(() => process.exit()); // eslint-disable-line
|
||||
}
|
||||
|
||||
113
gulp/index.js
Normal file
113
gulp/index.js
Normal file
@@ -0,0 +1,113 @@
|
||||
|
||||
const { series, parallel, watch } = require('gulp');
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
var { loadLayout, pages } = require('./contents');
|
||||
var contentTask = series( loadLayout, pages );
|
||||
exports.pages = series( loadLayout, pages );
|
||||
exports.content = contentTask;
|
||||
|
||||
var images = require('./imgflow');
|
||||
exports.twimages = images.twitter;
|
||||
exports.images = images;
|
||||
exports['images-prod'] = images.prod;
|
||||
exports['twimages-prod'] = images.twitter.prod;
|
||||
exports.favicon = images.favicon;
|
||||
|
||||
const filesTask = require('./files');
|
||||
exports.files = filesTask;
|
||||
exports['files-prod'] = filesTask.prod;
|
||||
|
||||
var scssTask = require('./scss');
|
||||
exports.scss = scssTask;
|
||||
|
||||
var jsTask = require('./scripts');
|
||||
exports.js = jsTask;
|
||||
|
||||
var jsRollupTask = require('./rollup');
|
||||
exports.jsr = jsRollupTask;
|
||||
|
||||
|
||||
var cleanTask = require('./clean');
|
||||
exports.clean = cleanTask;
|
||||
|
||||
const pushToProd = require('./publish');
|
||||
exports.push = pushToProd;
|
||||
|
||||
const cloudfront = require('./cloudfront');
|
||||
exports.cloudfront = cloudfront;
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
exports.new = require('./new');
|
||||
|
||||
var buildTask = series(
|
||||
images.prod,
|
||||
images.favicon.prod,
|
||||
scssTask.prod,
|
||||
jsTask.prod,
|
||||
filesTask.prod,
|
||||
loadLayout.prod,
|
||||
pages.prod,
|
||||
images.twitter.prod,
|
||||
);
|
||||
|
||||
var devBuildTask = series(
|
||||
parallel(
|
||||
images,
|
||||
images.favicon,
|
||||
scssTask,
|
||||
jsTask,
|
||||
filesTask,
|
||||
),
|
||||
loadLayout,
|
||||
pages,
|
||||
images.twitter,
|
||||
);
|
||||
|
||||
exports.dev = devBuildTask;
|
||||
exports.prod = buildTask;
|
||||
exports.publish = series(
|
||||
cleanTask,
|
||||
buildTask,
|
||||
pushToProd,
|
||||
cloudfront.prod,
|
||||
);
|
||||
exports.testpush = pushToProd.dryrun;
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
function watcher () {
|
||||
|
||||
watch([
|
||||
'pages/**/*.{md,hbs,html}',
|
||||
'templates/*.{md,hbs,html}',
|
||||
], series(contentTask, images.twitter));
|
||||
|
||||
watch('page/**/*.{jpeg,jpg,png,gif}', images);
|
||||
|
||||
watch('scss/*.scss', scssTask);
|
||||
watch('js/*.js', jsTask);
|
||||
|
||||
var forever = require('forever');
|
||||
var srv = new forever.Monitor('server.js');
|
||||
srv.start();
|
||||
forever.startServer(srv);
|
||||
}
|
||||
|
||||
function server () {
|
||||
|
||||
var forever = require('forever');
|
||||
var srv = new forever.Monitor('server.js');
|
||||
srv.start();
|
||||
forever.startServer(srv);
|
||||
|
||||
}
|
||||
|
||||
exports.watch = series(contentTask, watcher);
|
||||
exports.uat = series(cleanTask, buildTask, server);
|
||||
|
||||
/** **************************************************************************************************************** **/
|
||||
|
||||
exports.default = series(cleanTask.dev, devBuildTask, watcher);
|
||||
31
gulp/lib/crass.js
Normal file
31
gulp/lib/crass.js
Normal file
@@ -0,0 +1,31 @@
|
||||
|
||||
const through = require('./through');
|
||||
const crass = require('crass');
|
||||
const PluginError = require('plugin-error');
|
||||
|
||||
module.exports = exports = function (options) {
|
||||
options = {
|
||||
pretty: false,
|
||||
o1: true,
|
||||
...options,
|
||||
};
|
||||
|
||||
return through(async (stream, file) => {
|
||||
if (file.isNull()) {
|
||||
stream.push(file);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
var parsed = crass.parse(file.contents.toString());
|
||||
parsed = parsed.optimize({ O1: !!options.o1 });
|
||||
if (options.pretty) parsed = parsed.pretty();
|
||||
|
||||
file.contents = Buffer.from(parsed.toString());
|
||||
} catch (err) {
|
||||
this.emit('error', new PluginError('gulp-crass', err));
|
||||
}
|
||||
|
||||
stream.push(file);
|
||||
});
|
||||
};
|
||||
32
gulp/lib/debug.js
Normal file
32
gulp/lib/debug.js
Normal file
@@ -0,0 +1,32 @@
|
||||
|
||||
const through = require('./through');
|
||||
const log = require('fancy-log');
|
||||
const { get } = require('lodash');
|
||||
|
||||
module.exports = exports = function debug (...targets) {
|
||||
return through(async (stream, file) => {
|
||||
var data;
|
||||
const { path, relative, base, basename, extname } = file;
|
||||
|
||||
if (targets.length === 1 && Array.isArray(targets[0])) {
|
||||
targets = targets[0];
|
||||
}
|
||||
|
||||
if (targets.length) {
|
||||
data = targets.reduce((result, target) => {
|
||||
if (target === 'contents') {
|
||||
result.contents = file.contents.toString();
|
||||
return result;
|
||||
}
|
||||
|
||||
result[target] = get(file, target);
|
||||
return result;
|
||||
}, {});
|
||||
} else {
|
||||
data = { ...file, path, relative, base, basename, extname };
|
||||
}
|
||||
log(data);
|
||||
stream.push(file);
|
||||
});
|
||||
};
|
||||
|
||||
3
gulp/lib/dimensions.js
Normal file
3
gulp/lib/dimensions.js
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
var { promisify } = require('util');
|
||||
module.exports = exports = promisify(require('image-size'));
|
||||
18
gulp/lib/filter.js
Normal file
18
gulp/lib/filter.js
Normal file
@@ -0,0 +1,18 @@
|
||||
|
||||
const filter = require('gulp-filter');
|
||||
|
||||
module.exports = exports = function filter2 (pattern, options) {
|
||||
if (pattern instanceof RegExp) {
|
||||
return filter((file) => pattern.test(file.path), options);
|
||||
}
|
||||
|
||||
return filter(pattern, options);
|
||||
};
|
||||
|
||||
exports.not = function notfilter2 (pattern, options) {
|
||||
if (pattern instanceof RegExp) {
|
||||
return filter((file) => !pattern.test(file.path), options);
|
||||
}
|
||||
|
||||
throw new Error('filter.not only takes regular expressions');
|
||||
};
|
||||
4
gulp/lib/glob.js
Normal file
4
gulp/lib/glob.js
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
var { promisify } = require('util');
|
||||
const glob = require('glob');
|
||||
module.exports = exports = promisify(glob);
|
||||
26
gulp/lib/load.js
Normal file
26
gulp/lib/load.js
Normal file
@@ -0,0 +1,26 @@
|
||||
|
||||
const through = require('./through');
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const parallelize = require('concurrent-transform');
|
||||
|
||||
module.exports = exports = function load () {
|
||||
return parallelize(through(async (stream, file) => {
|
||||
|
||||
if (file.contents) {
|
||||
// file already has contents, ignore
|
||||
stream.push(file);
|
||||
return;
|
||||
}
|
||||
|
||||
const exists = await fs.pathExists(file.path);
|
||||
// if (!exists) return;
|
||||
|
||||
log('[loading]', file.path, exists);
|
||||
|
||||
file.contents = await fs.readFile(file.path);
|
||||
|
||||
stream.push(file);
|
||||
}), 20);
|
||||
};
|
||||
|
||||
181
gulp/lib/markdown-raw-html.js
Normal file
181
gulp/lib/markdown-raw-html.js
Normal file
@@ -0,0 +1,181 @@
|
||||
|
||||
module.exports = exports = function (md, options) {
|
||||
|
||||
options = {
|
||||
fence: '|||',
|
||||
...options,
|
||||
};
|
||||
|
||||
function debug (...args) {
|
||||
if (options.debug) console.log(...args); // eslint-disable-line
|
||||
}
|
||||
|
||||
const fenceLen = options.fence.length;
|
||||
// const fenceFirst = options.fence.charCodeAt(0);
|
||||
|
||||
function scanAhead (state, line, pos) {
|
||||
const position = state.src.indexOf(options.fence, pos);
|
||||
if (position === -1) {
|
||||
// there are no html blocks in this entire file
|
||||
state.discreteHtmlScan = {
|
||||
present: false,
|
||||
};
|
||||
return false;
|
||||
}
|
||||
|
||||
while (position > state.eMarks[line]) {
|
||||
line++;
|
||||
}
|
||||
|
||||
state.discreteHtmlScan = {
|
||||
present: true,
|
||||
position,
|
||||
line,
|
||||
};
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
md.block.ruler.before('fence', 'raw', (state, startLine, lastLine) => {
|
||||
let pos = state.bMarks[startLine] + state.tShift[startLine];
|
||||
let endOfLine = state.eMarks[startLine];
|
||||
|
||||
// if we have yet to do a scan of this file, perform one.
|
||||
if (!state.discreteHtmlScan && !scanAhead(state, startLine, pos)) {
|
||||
debug('First scan, nothing found');
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!state.discreteHtmlScan.present) {
|
||||
debug('Have scanned, did not find');
|
||||
return false;
|
||||
}
|
||||
|
||||
// add one to the line here in case there is a line break in a paragraph.
|
||||
if (state.discreteHtmlScan.line > startLine + 1) {
|
||||
debug('Have scanned, found, but after this line', { startLine, targetLine: state.discreteHtmlScan.line });
|
||||
return false;
|
||||
}
|
||||
|
||||
if (startLine > state.discreteHtmlScan.line) {
|
||||
// we dun fucked up
|
||||
debug('We somehow got ahead of ourselves', { startLine, line: state.discreteHtmlScan.line, lastLine, pos, endOfLine, tokens: state.tokens });
|
||||
throw new Error('markdown-it-discrete-html encountered a parsing error.');
|
||||
}
|
||||
|
||||
// at this point we should be on a line that contains a fence mark
|
||||
debug({ l: 67, startLine, scan: state.discreteHtmlScan });
|
||||
|
||||
let openIndex, closer, nextLine;
|
||||
openIndex = state.discreteHtmlScan.position;
|
||||
do {
|
||||
let token, closeIndex;
|
||||
const tokens = [];
|
||||
const preBlock = openIndex > pos && state.src.slice(pos, openIndex);
|
||||
debug({ l: 75, preBlock, startLine, lastLine });
|
||||
openIndex += fenceLen;
|
||||
pos = openIndex;
|
||||
|
||||
if (preBlock && !!preBlock.trim()) {
|
||||
md.block.parse(preBlock, md, state.env, tokens);
|
||||
|
||||
switch (tokens[tokens.length - 1].type) {
|
||||
case 'heading_close':
|
||||
case 'paragraph_close':
|
||||
closer = tokens.pop();
|
||||
// fallthrough
|
||||
default:
|
||||
state.tokens.push(...tokens);
|
||||
}
|
||||
}
|
||||
|
||||
debug({ l: 92, tokens });
|
||||
|
||||
// find terminating fence
|
||||
if (!scanAhead(state, startLine, pos)) {
|
||||
debug({ l: 96, remaining: state.src.slice(pos) });
|
||||
// console.error(state.src)
|
||||
throw new Error(`Could not find terminating "${options.fence}" for a raw html block.`);
|
||||
}
|
||||
|
||||
closeIndex = state.discreteHtmlScan.position;
|
||||
nextLine = state.discreteHtmlScan.line;
|
||||
|
||||
if (nextLine === startLine) nextLine++;
|
||||
endOfLine = state.eMarks[nextLine];
|
||||
|
||||
const content = state.src.substring(openIndex, closeIndex);
|
||||
closeIndex += fenceLen;
|
||||
pos = closeIndex;
|
||||
|
||||
if (content.trim()) {
|
||||
token = state.push(closer ? 'html_inline' : 'html_block', '', 0);
|
||||
token.map = [ startLine, nextLine ];
|
||||
token.content = content;
|
||||
token.block = true;
|
||||
debug({ l: 115, tokens: [ token ], nextLine, pos, endOfLine: state.eMarks[nextLine], len: state.src.length, remaining: state.src.slice(pos) }); // eslint-disable-line
|
||||
}
|
||||
|
||||
if (pos === endOfLine) {
|
||||
// we have ended this line, nothing more to do here.
|
||||
if (closer) {
|
||||
state.tokens.push(closer);
|
||||
debug({ l: 122, tokens: [ closer ] });
|
||||
}
|
||||
state.discreteHtmlScan = null;
|
||||
state.line = nextLine + 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
// still more left in this line, see if there is another block
|
||||
if (scanAhead(state, nextLine, pos)) {
|
||||
// we found another block, but it isn't on this line, so break out.
|
||||
if (state.discreteHtmlScan.line > nextLine) {
|
||||
if (closer) {
|
||||
state.tokens.push(closer);
|
||||
debug({ l: 135, tokens: [ closer ] });
|
||||
}
|
||||
state.line = nextLine + 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
// next block is on this line, grab everything between here and there
|
||||
openIndex = state.discreteHtmlScan.position;
|
||||
} else {
|
||||
// no more blocks on this line, grab everything between here and the end of the line
|
||||
openIndex = endOfLine;
|
||||
}
|
||||
debug({ l: 147, pos, openIndex, remaining: state.src.slice(pos) });
|
||||
|
||||
const postBlock = state.src.slice(pos, openIndex);
|
||||
token = null;
|
||||
if (postBlock.trim()) {
|
||||
token = state.push('inline', '', 0);
|
||||
token.content = postBlock;
|
||||
token.map = [ nextLine, nextLine ];
|
||||
token.children = [];
|
||||
tokens.push(token);
|
||||
}
|
||||
debug({ l: 158, tokens: [ token ], postBlock, pos, openIndex, closeIndex, endOfLine });
|
||||
|
||||
pos = openIndex;
|
||||
startLine = nextLine + 1;
|
||||
endOfLine = state.eMarks[startLine];
|
||||
|
||||
debug({ l: 164, pos, startLine, endOfLine, remaining: state.src.slice(pos) });
|
||||
} while (pos + fenceLen < endOfLine);
|
||||
|
||||
if (closer) {
|
||||
state.tokens.push(closer);
|
||||
debug({ l: 169, tokens: [ closer ] });
|
||||
}
|
||||
|
||||
openIndex += fenceLen;
|
||||
pos = openIndex;
|
||||
|
||||
state.line = startLine;
|
||||
return true;
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
46
gulp/lib/markdown-token-filter.js
Normal file
46
gulp/lib/markdown-token-filter.js
Normal file
@@ -0,0 +1,46 @@
|
||||
|
||||
const { flatten } = require('lodash');
|
||||
|
||||
module.exports = exports = function (md) {
|
||||
md.core.ruler.push(
|
||||
'modify-token',
|
||||
(state) => {
|
||||
state.tokens = flatten(state.tokens.map(descend).filter(Boolean));
|
||||
return false;
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
function descend (token) {
|
||||
|
||||
switch (token.type) {
|
||||
case 'link_open':
|
||||
case 'link_close':
|
||||
case 'html_block':
|
||||
return false;
|
||||
|
||||
case 'heading_open':
|
||||
token.type = 'paragraph_open';
|
||||
token.tag = 'p';
|
||||
token.markup = '';
|
||||
return token;
|
||||
|
||||
case 'heading_close':
|
||||
token.type = 'paragraph_close';
|
||||
token.tag = 'p';
|
||||
token.markup = '';
|
||||
return token;
|
||||
|
||||
case 'image':
|
||||
case 'container':
|
||||
return token.children;
|
||||
|
||||
default:
|
||||
|
||||
if (token.children && token.children.length) {
|
||||
token.children = flatten(token.children.map(descend).filter(Boolean));
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
47
gulp/lib/random.js
Normal file
47
gulp/lib/random.js
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
var uuid = require('uuid').v4;
|
||||
|
||||
// based on code from http://stackoverflow.com/a/25690754/110189
|
||||
function randomString (length, chars) {
|
||||
if (!chars) {
|
||||
throw new Error('Argument \'chars\' is undefined');
|
||||
}
|
||||
|
||||
var charsLength = chars.length;
|
||||
if (charsLength > 256) {
|
||||
throw new Error('Length must be less than 256 characters');
|
||||
}
|
||||
|
||||
var randomBytes = crypto.randomBytes(length);
|
||||
|
||||
var result = new Array(length);
|
||||
|
||||
var cursor = 0;
|
||||
for (var i = 0; i < length; i++) {
|
||||
cursor += randomBytes[i];
|
||||
result[i] = chars[cursor % charsLength];
|
||||
}
|
||||
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
module.exports = exports = function (min, max) {
|
||||
if (Array.isArray(min)) return exports.from(min);
|
||||
if (typeof max === 'undefined') {
|
||||
if (min > 0) {
|
||||
max = min;
|
||||
min = 0;
|
||||
} else {
|
||||
max = 0;
|
||||
}
|
||||
}
|
||||
return Math.floor((Math.random() * (max - min + 1)) + min);
|
||||
};
|
||||
|
||||
exports.alphanumeric = (length) => randomString(length, 'ABCDEFGHIJKLMNOPQRSTUWXYZ0123456789');
|
||||
exports.alpha = (length) => randomString(length, 'ABCDEFGHIJKLMNOPQRSTUWXYZ');
|
||||
exports.fromCharSet = randomString;
|
||||
exports.from = (array) => array[exports(array.length - 1)];
|
||||
exports.id = (length) => uuid().replace(/-/g, '').substr(0, length);
|
||||
27
gulp/lib/sort.js
Normal file
27
gulp/lib/sort.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const through = require('./through');
|
||||
const sortBy = require('lodash/sortBy');
|
||||
|
||||
function sleep (ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
module.exports = exports = function (iteratees) {
|
||||
var files = [];
|
||||
|
||||
return through(
|
||||
async (stream, file) => {
|
||||
if (file.isNull()) return;
|
||||
|
||||
files.push(file);
|
||||
},
|
||||
async (stream) => {
|
||||
const queue = sortBy(files, iteratees);
|
||||
files = null;
|
||||
|
||||
for (const file of queue) {
|
||||
stream.push(file);
|
||||
await sleep(100);
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
19
gulp/lib/through.js
Normal file
19
gulp/lib/through.js
Normal file
@@ -0,0 +1,19 @@
|
||||
|
||||
const log = require('fancy-log');
|
||||
var through = require('through2');
|
||||
|
||||
module.exports = exports = function asyncthrough (...args) {
|
||||
const [ fn, donefn ] = args;
|
||||
|
||||
args[0] = function (file, enc, next) {
|
||||
fn(this, file, enc).then(() => next(), (err) => { log.error(err, 'Error thrown'); next(err); });
|
||||
};
|
||||
|
||||
if (donefn) {
|
||||
args[1] = function (next) {
|
||||
donefn(this).then(() => next(), (err) => { log.error(err, 'Error thrown'); next(err); });
|
||||
};
|
||||
}
|
||||
|
||||
return through.obj(...args);
|
||||
};
|
||||
140
gulp/lib/tweetparse.js
Normal file
140
gulp/lib/tweetparse.js
Normal file
@@ -0,0 +1,140 @@
|
||||
var twemoji = require('twemoji' );
|
||||
const { deepPick, has } = require('./util');
|
||||
|
||||
const schema = {
|
||||
id_str: true,
|
||||
created_at: true,
|
||||
user: {
|
||||
screen_name: true,
|
||||
avatar: true,
|
||||
name_html: true,
|
||||
verified: true,
|
||||
protected: true,
|
||||
},
|
||||
html: true,
|
||||
quoted_status: {
|
||||
user: {
|
||||
screen_name: true,
|
||||
avatar: true,
|
||||
name_html: true,
|
||||
verified: true,
|
||||
protected: true,
|
||||
},
|
||||
},
|
||||
entities: { media: [ {
|
||||
type: true,
|
||||
media_url_https: true,
|
||||
video_info: { variants: [ {
|
||||
url: true,
|
||||
content_type: true,
|
||||
} ] },
|
||||
} ] },
|
||||
media: true,
|
||||
};
|
||||
|
||||
var entityProcessors = {
|
||||
hashtags (tags, tweet) {
|
||||
tags.forEach((tagObj) => {
|
||||
tweet.html = tweet.html.replace('#' + tagObj.text, `<a href="https://twitter.com/hashtag/{tagObj.text}" class="hashtag">#${tagObj.text}</a>`);
|
||||
});
|
||||
},
|
||||
|
||||
symbols (/* symbols, tweet */) {
|
||||
|
||||
},
|
||||
|
||||
user_mentions (users, tweet) {
|
||||
users.forEach((userObj) => {
|
||||
var regex = new RegExp('@' + userObj.screen_name, 'gi' );
|
||||
tweet.html = tweet.html.replace(regex, `<a href="https://twitter.com/${userObj.screen_name}" class="mention">@${userObj.screen_name}</a>`);
|
||||
});
|
||||
},
|
||||
|
||||
urls (urls, tweet) {
|
||||
urls.forEach((urlObj) => {
|
||||
var quotedTweetHtml = '';
|
||||
var indices = urlObj.indices;
|
||||
var urlToReplace = (tweet.full_text || tweet.text).substring(indices[0], indices[1]);
|
||||
|
||||
var finalText = quotedTweetHtml || urlObj.display_url.link(urlObj.expanded_url);
|
||||
tweet.html = tweet.html.replace(urlToReplace, finalText);
|
||||
});
|
||||
},
|
||||
|
||||
media (media, tweet) {
|
||||
media.forEach((mediaObj) => {
|
||||
tweet.html = tweet.html.replace(mediaObj.url, '');
|
||||
return;
|
||||
|
||||
// if (mediaObj.type === 'photo') {
|
||||
// // Use HTTPS if available
|
||||
// var src = mediaObj.media_url_https ? mediaObj.media_url_https : mediaObj.media_url;
|
||||
|
||||
// if (options &&
|
||||
// options.photoSize &&
|
||||
// mediaObj.sizes &&
|
||||
// mediaObj.sizes[options.photoSize]) {
|
||||
// // If specified size is available, patch image src to use it
|
||||
// src = src + ':' + options.photoSize;
|
||||
// }
|
||||
|
||||
// tweet.html = tweet.html.replace(mediaObj.url, `<img src="${src}" alt=""/>`);
|
||||
// } else if (mediaObj.type === 'video') {
|
||||
// var source = '';
|
||||
// mediaObj.video_info.variants.forEach((info) => {
|
||||
// source += `<source src="${info.url}" type="${info.content_type}">`;
|
||||
// });
|
||||
// var video = `<video controls poster="${mediaObj.media_url}">${source}</video>`;
|
||||
// tweet.html = tweet.html.replace(mediaObj.url, video);
|
||||
// }
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = exports = function (tweets) {
|
||||
return Array.isArray(tweets) ? tweets.map(parseTweet) : parseTweet(tweets);
|
||||
|
||||
function parseTweet (tweet) {
|
||||
// clone the tweet so we're not altering the original
|
||||
tweet = JSON.parse(JSON.stringify(tweet));
|
||||
|
||||
tweet.user.avatar = {
|
||||
input: tweet.user.profile_image_url_https,
|
||||
output: 'tweets/' + tweet.user.screen_name + '.jpg',
|
||||
};
|
||||
|
||||
tweet.media = [
|
||||
tweet.user.avatar,
|
||||
];
|
||||
|
||||
// Copying text value to a new property html. The final output will be set to this property
|
||||
tweet.html = (tweet.full_text || tweet.text)
|
||||
.split(/(\r\n|\n\r|\r|\n)+/)
|
||||
.map((s) => s.trim() && '<p>' + s + '</p>')
|
||||
.filter(Boolean)
|
||||
.join('');
|
||||
|
||||
if (tweet.quoted_status) {
|
||||
tweet.quoted_status = parseTweet(tweet.quoted_status);
|
||||
}
|
||||
|
||||
if (has(tweet, 'entities.media') && has(tweet, 'extended_entities.media')) {
|
||||
tweet.entities.media = tweet.extended_entities.media;
|
||||
delete tweet.extended_entities;
|
||||
}
|
||||
|
||||
// Process entities
|
||||
if (Object.getOwnPropertyNames(tweet.entities).length) {
|
||||
for (let [ entityType, entity ] of Object.entries(tweet.entities)) { // eslint-disable-line prefer-const
|
||||
entityProcessors[entityType](entity, tweet);
|
||||
}
|
||||
}
|
||||
|
||||
// Process Emoji's
|
||||
tweet.html = twemoji.parse(tweet.html);
|
||||
tweet.user.name_html = twemoji.parse(tweet.user.name);
|
||||
|
||||
return deepPick(tweet, schema);
|
||||
}
|
||||
|
||||
};
|
||||
1480
gulp/lib/util.js
Normal file
1480
gulp/lib/util.js
Normal file
File diff suppressed because it is too large
Load Diff
35
gulp/new.js
Normal file
35
gulp/new.js
Normal file
@@ -0,0 +1,35 @@
|
||||
|
||||
const argv = require('minimist')(process.argv.slice(2));
|
||||
const format = require('date-fns/format');
|
||||
const parse = require('date-fns/parse');
|
||||
const random = require('./lib/random');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const log = require('fancy-log');
|
||||
const template = require('./_template');
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
|
||||
module.exports = exports = async function newPost () {
|
||||
var date = argv.date ? parse(argv.date, 'yyyy-MM-dd', new Date()) : new Date();
|
||||
|
||||
if (!date.getHours()) {
|
||||
const now = new Date();
|
||||
date.setHours(now.getHours());
|
||||
date.setMinutes(now.getMinutes());
|
||||
}
|
||||
|
||||
// console.log(date);return;
|
||||
var id = random.id().substr(-6).toUpperCase();
|
||||
var fname = format(date, 'yyyy-MM-dd.HHmm.') + id;
|
||||
|
||||
var target = path.join(ROOT, 'posts', fname);
|
||||
|
||||
await fs.ensureDir(target);
|
||||
|
||||
var contents = template({ id, date });
|
||||
|
||||
await fs.writeFile(path.join(target, 'index.md'), contents);
|
||||
|
||||
log('Created new post at posts/' + fname);
|
||||
};
|
||||
83
gulp/publish.js
Normal file
83
gulp/publish.js
Normal file
@@ -0,0 +1,83 @@
|
||||
const { src } = require('gulp');
|
||||
const awspublish = require('gulp-awspublish');
|
||||
const awsrouter = require('gulp-awspublish-router');
|
||||
const parallelize = require('concurrent-transform');
|
||||
// const cloudfront = require('gulp-cloudfront-invalidate-aws-publish');
|
||||
const debug = require('./lib/debug');
|
||||
|
||||
// const path = require('path');
|
||||
// const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist';
|
||||
|
||||
var credentials = require('../aws.json');
|
||||
|
||||
const routes = {
|
||||
'p\\/.*\\.(?:jpeg|jpg|png|gif)$': {
|
||||
cacheTime: 86400, // one day on client
|
||||
sharedCacheTime: 2592000, // 30 days on server
|
||||
},
|
||||
|
||||
'^(?:index|tags|drafts)\\.html$': {
|
||||
cacheTime: 60, // one minute on client
|
||||
sharedCacheTime: 60, // one minute on server
|
||||
},
|
||||
|
||||
'^(?:sitemap|atom)\\.xml$': {
|
||||
cacheTime: 3600, // one hour on client
|
||||
sharedCacheTime: 86400, // one day on server
|
||||
},
|
||||
|
||||
'^404\\.html$': {
|
||||
cacheTime: 2592000, // 30 days on server
|
||||
sharedCacheTime: 2592000, // 30 days on server
|
||||
},
|
||||
|
||||
'\\.html$': {
|
||||
cacheTime: 3600, // 1 hour on client
|
||||
sharedCacheTime: 3600, // 1 hour on server
|
||||
},
|
||||
|
||||
'\\.(?:js|css)$': {
|
||||
cacheTime: 604800, // one week on client
|
||||
sharedCacheTime: 2592000, // one month on server
|
||||
},
|
||||
|
||||
// pass-through for anything that wasn't matched by routes above, to be uploaded with default options
|
||||
'^.+$': '$&',
|
||||
};
|
||||
|
||||
module.exports = exports = function s3deploy () {
|
||||
var publisher = awspublish.create(credentials);
|
||||
|
||||
return src(`${DEST}/**/*`)
|
||||
.pipe(awsrouter({
|
||||
cache: {
|
||||
gzip: true,
|
||||
cacheTime: 1800, // 30 minutes on client
|
||||
sharedCacheTime: 86400, // one day on server
|
||||
},
|
||||
|
||||
routes,
|
||||
}))
|
||||
.pipe(parallelize(publisher.publish(), 10))
|
||||
.pipe(publisher.sync())
|
||||
.pipe(publisher.cache())
|
||||
.pipe(awspublish.reporter({
|
||||
states: [ 'create', 'update', 'delete' ],
|
||||
}));
|
||||
};
|
||||
|
||||
exports.dryrun = function s3DryRun () {
|
||||
return src(`${DEST}/**/*`)
|
||||
.pipe(awsrouter({
|
||||
cache: {
|
||||
gzip: true,
|
||||
cacheTime: 1800, // 30 minutes on client
|
||||
sharedCacheTime: 86400, // one day on server
|
||||
},
|
||||
|
||||
routes,
|
||||
}))
|
||||
.pipe(debug('s3'))
|
||||
;
|
||||
};
|
||||
72
gulp/rollup.js
Normal file
72
gulp/rollup.js
Normal file
@@ -0,0 +1,72 @@
|
||||
|
||||
const path = require('path');
|
||||
const { src, dest } = require('gulp');
|
||||
const rollup = require('gulp-better-rollup');
|
||||
const { string } = require('rollup-plugin-string');
|
||||
const resolveNodeModules = require('rollup-plugin-node-resolve');
|
||||
const commonJs = require('rollup-plugin-commonjs');
|
||||
const json = require('rollup-plugin-json');
|
||||
// const alias = require('rollup-plugin-alias');
|
||||
|
||||
const minify = require('gulp-minify');
|
||||
const rev = require('gulp-rev');
|
||||
const asyncthrough = require('./lib/through');
|
||||
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist/js';
|
||||
|
||||
function rollupPipe () {
|
||||
return src('js-rollup/*.js')
|
||||
.pipe(rollup({
|
||||
// There is no `input` option as rollup integrates into the gulp pipeline
|
||||
plugins: [
|
||||
string({
|
||||
include: '**/*.html',
|
||||
}),
|
||||
resolveNodeModules(),
|
||||
commonJs(),
|
||||
json(),
|
||||
],
|
||||
external: [ 'jquery', 'lodash', 'underscore' ],
|
||||
}, {
|
||||
// Rollups `sourcemap` option is unsupported. Use `gulp-sourcemaps` plugin instead
|
||||
format: 'iife',
|
||||
globals: {
|
||||
jquery: '$',
|
||||
lodash: '_',
|
||||
backbone: 'Backbone',
|
||||
underscore: '_',
|
||||
},
|
||||
}));
|
||||
};
|
||||
|
||||
module.exports = exports = function rollupJS () {
|
||||
return rollupPipe()
|
||||
.pipe(dest(DEST));
|
||||
};
|
||||
|
||||
exports.prod = function rollupJSForProd () {
|
||||
return rollupPipe()
|
||||
.pipe(minify({
|
||||
ext: { min: '.js' },
|
||||
noSource: true,
|
||||
}))
|
||||
.pipe(dest(DEST))
|
||||
.pipe(rev())
|
||||
.pipe(dest(DEST))
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
// Change rev's original base path back to the public root so that it uses the full
|
||||
// path as the original file name key in the manifest
|
||||
var base = path.resolve(ROOT, 'dist');
|
||||
file.revOrigBase = base;
|
||||
file.base = base;
|
||||
|
||||
stream.push(file);
|
||||
}))
|
||||
.pipe(rev.manifest({
|
||||
merge: true, // Merge with the existing manifest if one exists
|
||||
}))
|
||||
.pipe(dest('.'))
|
||||
;
|
||||
};
|
||||
52
gulp/scripts.js
Normal file
52
gulp/scripts.js
Normal file
@@ -0,0 +1,52 @@
|
||||
|
||||
const path = require('path');
|
||||
const { src, dest } = require('gulp');
|
||||
const minify = require('gulp-minify');
|
||||
const rev = require('gulp-rev');
|
||||
const concat = require('gulp-concat');
|
||||
const merge = require('merge-stream');
|
||||
const asyncthrough = require('./lib/through');
|
||||
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist/js';
|
||||
|
||||
module.exports = exports = function sourceJS () {
|
||||
|
||||
return merge(
|
||||
src([ 'js/*.js', 'js/_*.js' ]),
|
||||
src([
|
||||
require.resolve('jquery'),
|
||||
require.resolve('magnific-popup'),
|
||||
require.resolve('popper.js/dist/umd/popper.js'),
|
||||
require.resolve('bootstrap/js/dist/util.js'),
|
||||
require.resolve('bootstrap/js/dist/dropdown.js'),
|
||||
'js/_*.js',
|
||||
]).pipe(concat('global.js')),
|
||||
).pipe(dest(DEST));
|
||||
};
|
||||
|
||||
exports.prod = function sourceJSForProd () {
|
||||
return exports()
|
||||
.pipe(minify({
|
||||
ext: { min: '.js' },
|
||||
noSource: true,
|
||||
}))
|
||||
.pipe(dest(DEST))
|
||||
.pipe(rev())
|
||||
.pipe(dest(DEST))
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
// Change rev's original base path back to the public root so that it uses the full
|
||||
// path as the original file name key in the manifest
|
||||
var base = path.resolve(ROOT, 'dist');
|
||||
file.revOrigBase = base;
|
||||
file.base = base;
|
||||
|
||||
stream.push(file);
|
||||
}))
|
||||
.pipe(rev.manifest({
|
||||
merge: true, // Merge with the existing manifest if one exists
|
||||
}))
|
||||
.pipe(dest('.'))
|
||||
;
|
||||
};
|
||||
59
gulp/scss.js
Normal file
59
gulp/scss.js
Normal file
@@ -0,0 +1,59 @@
|
||||
|
||||
const path = require('path');
|
||||
const { src, dest } = require('gulp');
|
||||
const scss = require('gulp-sass');
|
||||
const rev = require('gulp-rev');
|
||||
const asyncthrough = require('./lib/through');
|
||||
const crass = require('./lib/crass');
|
||||
const concat = require('gulp-concat');
|
||||
const merge = require('merge-stream');
|
||||
const postcss = require('gulp-postcss');
|
||||
const autoprefixer = require('autoprefixer');
|
||||
|
||||
// const minifyCSS = require('gulp-minify-css');
|
||||
|
||||
|
||||
const ROOT = path.dirname(__dirname);
|
||||
const DEST = 'dist/css';
|
||||
|
||||
module.exports = exports = function buildScss () {
|
||||
|
||||
const scssStream = src([ 'scss/*.scss', '!scss/_*.scss' ])
|
||||
.pipe(scss({
|
||||
includePaths: [ path.join(ROOT, 'node_modules') ],
|
||||
}));
|
||||
|
||||
return merge(
|
||||
scssStream,
|
||||
src([
|
||||
require.resolve('magnific-popup/dist/magnific-popup.css'),
|
||||
]),
|
||||
)
|
||||
.pipe(concat('style.css'))
|
||||
.pipe(postcss([
|
||||
autoprefixer(),
|
||||
]))
|
||||
.pipe(dest(DEST));
|
||||
};
|
||||
|
||||
exports.prod = function buildScssForProd () {
|
||||
return exports()
|
||||
.pipe(crass())
|
||||
.pipe(dest(DEST))
|
||||
.pipe(rev())
|
||||
.pipe(dest(DEST))
|
||||
.pipe(asyncthrough(async (stream, file) => {
|
||||
// Change rev's original base path back to the public root so that it uses the full
|
||||
// path as the original file name key in the manifest
|
||||
var base = path.resolve(ROOT, 'dist');
|
||||
file.revOrigBase = base;
|
||||
file.base = base;
|
||||
|
||||
stream.push(file);
|
||||
}))
|
||||
.pipe(rev.manifest({
|
||||
merge: true, // Merge with the existing manifest if one exists
|
||||
}))
|
||||
.pipe(dest('.'))
|
||||
;
|
||||
};
|
||||
Reference in New Issue
Block a user