1
0
mirror of https://github.com/GenderDysphoria/GenderDysphoria.fyi.git synced 2025-11-25 20:42:40 +00:00

Site JS is now compiled inside of content. No more gulp streams.

This commit is contained in:
Jocelyn Badgley (Twipped)
2020-02-28 10:27:52 -08:00
parent 695637c3e7
commit 823f662743
13 changed files with 105 additions and 633 deletions

View File

@@ -12,6 +12,8 @@ const { resolve } = require('./resolve');
const favicon = require('./favicon');
const scss = require('./scss');
const svg = require('./svg');
const scripts = require('./scripts');
exports.everything = function (prod = false) {
const fn = async () => {
@@ -30,23 +32,20 @@ exports.everything = function (prod = false) {
const tasks = await Promise.all([
PublicFiles.tasks,
scss(prod),
scripts(prod),
svg(prod),
favicon(prod),
]);
async function crankTasks () {
if (!tasks.length) return;
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
}
await fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2));
await Promise.all([
fs.writeFile(resolve('pages.json'), JSON.stringify(pages.map((p) => p.toJson()), null, 2)),
pageWriter(pages, prod),
crankTasks(),
]);
await fs.ensureDir(resolve('dist'));
const cache = new Cache({ prod });
await cache.load();
await evaluate(tasks.flat(), cache);
await cache.save();
await pageWriter(pages, prod);
};
const ret = () => fn().catch((err) => { console.log(err.trace || err); throw err; });

View File

@@ -2,7 +2,7 @@
const path = require('path');
const ROOT = path.resolve(__dirname, '../..');
const fs = require('fs-extra');
const { is: _is, re } = require('../lib/util');
const { is: _is } = require('../lib/util');
function is (...args) {
const fn = _is(...args);
@@ -135,10 +135,8 @@ exports.readFile = function readFile (fpath) {
exports.resolve = function resolve (...args) {
args = args.filter(Boolean);
let fpath = args.shift();
const fpath = args.shift();
if (!fpath) return ROOT;
if (fpath[0] === '/') throw new Error('Did you mean to resolve this? ' + fpath);
// if (fpath[0] === '/') fpath = fpath.slice(1);
return path.resolve(ROOT, fpath, ...args);
};

75
gulp/content/scripts.js Normal file
View File

@@ -0,0 +1,75 @@
const glob = require('../lib/glob');
const { ROOT, readFile } = require('./resolve');
const actions = require('./actions');
const File = require('./file');
const Promise = require('bluebird');
const { minify } = require('terser');
module.exports = exports = async function scripts (prod) {
const globalFiles = await glob('js/_*.js', { cwd: ROOT, nodir: true });
globalFiles.unshift(
require.resolve('jquery'),
require.resolve('magnific-popup'),
require.resolve('popper.js/dist/umd/popper.js'),
require.resolve('bootstrap/js/dist/util.js'),
require.resolve('bootstrap/js/dist/dropdown.js'),
);
const globalScript = new ClientScript('js/global.js');
await globalScript.concat(globalFiles, prod);
const files = await Promise.map(glob('js/*.js', { cwd: ROOT, nodir: true }), async (filepath) => {
const f = new ClientScript(filepath);
if (f.preprocessed) return false;
await f.load(prod);
return f;
}).filter(Boolean);
const tasks = files.map((f) => f.tasks()).flat();
tasks.push(...globalScript.tasks());
return tasks;
};
class ClientScript extends File {
_dir (dir) {
dir = dir.split('/');
return dir;
}
async load (prod) {
let contents = (await readFile(this.input).catch(() => '')).toString('utf8');
if (prod) {
const { code, error } = minify(contents);
if (error) throw new Error(error);
contents = code;
}
this.content = contents;
}
async concat (files, prod) {
let contents = await Promise.map(files, readFile);
contents = contents.join('\n\n');
if (prod) {
const { code, error } = minify(contents);
if (error) throw new Error(error);
contents = code;
}
this.content = contents;
}
tasks () {
return [ {
input: this.input,
output: this.out,
content: this.content,
action: actions.write,
nocache: true,
} ];
}
}

View File

@@ -1,17 +1,12 @@
const { series, parallel, watch } = require('gulp');
const { series, watch } = require('gulp');
/** **************************************************************************************************************** **/
var content = require('./content');
const everything = content.everything();
everything.prod = content.everything(true);
exports.go = series(everything);
var jsTask = require('./scripts');
exports.js = jsTask;
const devBuildTask = content.everything();
const prodBuildTask = content.everything(true);
var cleanTask = require('./clean');
exports.clean = cleanTask;
@@ -24,18 +19,8 @@ exports.cloudfront = cloudfront;
/** **************************************************************************************************************** **/
var prodBuildTask = parallel(
jsTask.prod,
everything.prod,
);
var devBuildTask = parallel(
jsTask,
everything,
);
exports.dev = devBuildTask;
exports.prod = prodBuildTask;
exports.dev = series(devBuildTask);
exports.prod = series(prodBuildTask);
exports.publish = series(
cleanTask,
prodBuildTask,
@@ -52,14 +37,10 @@ function watcher () {
'public/**/*',
'templates/*.{md,hbs,html}',
'scss/*.scss',
], everything);
'js/*.js',
], devBuildTask);
watch('js/*.js', jsTask);
var forever = require('forever');
var srv = new forever.Monitor('server.js');
srv.start();
forever.startServer(srv);
server();
}
function server () {
@@ -71,7 +52,7 @@ function server () {
}
exports.watch = series(everything, watcher);
exports.watch = series(devBuildTask, watcher);
exports.uat = series(cleanTask, prodBuildTask, server);
/** **************************************************************************************************************** **/

View File

@@ -1,31 +0,0 @@
const through = require('./through');
const crass = require('crass');
const PluginError = require('plugin-error');
module.exports = exports = function (options) {
options = {
pretty: false,
o1: true,
...options,
};
return through(async (stream, file) => {
if (file.isNull()) {
stream.push(file);
return;
}
try {
var parsed = crass.parse(file.contents.toString());
parsed = parsed.optimize({ O1: !!options.o1 });
if (options.pretty) parsed = parsed.pretty();
file.contents = Buffer.from(parsed.toString());
} catch (err) {
this.emit('error', new PluginError('gulp-crass', err));
}
stream.push(file);
});
};

View File

@@ -1,32 +0,0 @@
const through = require('./through');
const log = require('fancy-log');
const { get } = require('lodash');
module.exports = exports = function debug (...targets) {
return through(async (stream, file) => {
var data;
const { path, relative, base, basename, extname } = file;
if (targets.length === 1 && Array.isArray(targets[0])) {
targets = targets[0];
}
if (targets.length) {
data = targets.reduce((result, target) => {
if (target === 'contents') {
result.contents = file.contents.toString();
return result;
}
result[target] = get(file, target);
return result;
}, {});
} else {
data = { ...file, path, relative, base, basename, extname };
}
log(data);
stream.push(file);
});
};

View File

@@ -1,18 +0,0 @@
const filter = require('gulp-filter');
module.exports = exports = function filter2 (pattern, options) {
if (pattern instanceof RegExp) {
return filter((file) => pattern.test(file.path), options);
}
return filter(pattern, options);
};
exports.not = function notfilter2 (pattern, options) {
if (pattern instanceof RegExp) {
return filter((file) => !pattern.test(file.path), options);
}
throw new Error('filter.not only takes regular expressions');
};

View File

@@ -1,27 +0,0 @@
const through = require('./through');
const sortBy = require('lodash/sortBy');
function sleep (ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
module.exports = exports = function (iteratees) {
var files = [];
return through(
async (stream, file) => {
if (file.isNull()) return;
files.push(file);
},
async (stream) => {
const queue = sortBy(files, iteratees);
files = null;
for (const file of queue) {
stream.push(file);
await sleep(100);
}
},
);
};

View File

@@ -1,19 +0,0 @@
const log = require('fancy-log');
var through = require('through2');
module.exports = exports = function asyncthrough (...args) {
const [ fn, donefn ] = args;
args[0] = function (file, enc, next) {
fn(this, file, enc).then(() => next(), (err) => { log.error(err, 'Error thrown'); next(err); });
};
if (donefn) {
args[1] = function (next) {
donefn(this).then(() => next(), (err) => { log.error(err, 'Error thrown'); next(err); });
};
}
return through.obj(...args);
};

View File

@@ -2,12 +2,6 @@ const { src } = require('gulp');
const awspublish = require('gulp-awspublish');
const awsrouter = require('gulp-awspublish-router');
const parallelize = require('concurrent-transform');
// const cloudfront = require('gulp-cloudfront-invalidate-aws-publish');
const debug = require('./lib/debug');
// const path = require('path');
// const ROOT = path.dirname(__dirname);
const DEST = 'dist';
var credentials = require('../aws.json');
@@ -44,7 +38,7 @@ const routes = {
module.exports = exports = function s3deploy () {
var publisher = awspublish.create(credentials);
return src(`${DEST}/**/*`)
return src('dist/**/*')
.pipe(awsrouter({
cache: {
gzip: true,
@@ -61,18 +55,3 @@ module.exports = exports = function s3deploy () {
states: [ 'create', 'update', 'delete' ],
}));
};
exports.dryrun = function s3DryRun () {
return src(`${DEST}/**/*`)
.pipe(awsrouter({
cache: {
gzip: true,
cacheTime: 1800, // 30 minutes on client
sharedCacheTime: 86400, // one day on server
},
routes,
}))
.pipe(debug('s3'))
;
};

View File

@@ -1,52 +0,0 @@
const path = require('path');
const { src, dest } = require('gulp');
const minify = require('gulp-minify');
const rev = require('gulp-rev');
const concat = require('gulp-concat');
const merge = require('merge-stream');
const asyncthrough = require('./lib/through');
const ROOT = path.dirname(__dirname);
const DEST = 'dist/js';
module.exports = exports = function sourceJS () {
return merge(
src([ 'js/*.js', 'js/_*.js' ]),
src([
require.resolve('jquery'),
require.resolve('magnific-popup'),
require.resolve('popper.js/dist/umd/popper.js'),
require.resolve('bootstrap/js/dist/util.js'),
require.resolve('bootstrap/js/dist/dropdown.js'),
'js/_*.js',
]).pipe(concat('global.js')),
).pipe(dest(DEST));
};
exports.prod = function sourceJSForProd () {
return exports()
.pipe(minify({
ext: { min: '.js' },
noSource: true,
}))
.pipe(dest(DEST))
.pipe(rev())
.pipe(dest(DEST))
.pipe(asyncthrough(async (stream, file) => {
// Change rev's original base path back to the public root so that it uses the full
// path as the original file name key in the manifest
var base = path.resolve(ROOT, 'dist');
file.revOrigBase = base;
file.base = base;
stream.push(file);
}))
.pipe(rev.manifest({
merge: true, // Merge with the existing manifest if one exists
}))
.pipe(dest('.'))
;
};