forked from github.com/blag
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d3bb0c0f3 | ||
|
|
01b203ff5c | ||
|
|
6f70f7ca93 | ||
|
|
7f832a1445 | ||
|
|
aac2d70fed | ||
|
|
dc6547290b | ||
|
|
b077e22984 | ||
|
|
af5825b412 | ||
|
|
7d69c37032 |
@@ -14,6 +14,7 @@ blag is named after [the blag of the webcomic xkcd][blagxkcd].
|
||||
* Theming support using [Jinja2][] templates
|
||||
* Generation of Atom feeds for blog content
|
||||
* Fenced code blocks and syntax highlighting using [Pygments][]
|
||||
* Integrated devserver
|
||||
|
||||
blag runs on Linux, Mac and Windows and requires Python >= 3.8
|
||||
|
||||
|
||||
68
blag/blag.py
68
blag/blag.py
@@ -19,10 +19,11 @@ from jinja2 import Environment, ChoiceLoader, FileSystemLoader, PackageLoader
|
||||
import feedgenerator
|
||||
|
||||
from blag.markdown import markdown_factory, convert_markdown
|
||||
from blag.devserver import serve
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s %(levelname)s %(name)s %(message)s',
|
||||
)
|
||||
|
||||
@@ -50,7 +51,10 @@ def parse_args(args=None):
|
||||
commands = parser.add_subparsers(dest='command')
|
||||
commands.required = True
|
||||
|
||||
build_parser = commands.add_parser('build')
|
||||
build_parser = commands.add_parser(
|
||||
'build',
|
||||
help='Build website.',
|
||||
)
|
||||
build_parser.set_defaults(func=build)
|
||||
build_parser.add_argument(
|
||||
'-i', '--input-dir',
|
||||
@@ -73,9 +77,38 @@ def parse_args(args=None):
|
||||
help='Static directory (default: static)',
|
||||
)
|
||||
|
||||
quickstart_parser = commands.add_parser('quickstart')
|
||||
quickstart_parser = commands.add_parser(
|
||||
'quickstart',
|
||||
help="Quickstart blag, creating necessary configuration.",
|
||||
)
|
||||
quickstart_parser.set_defaults(func=quickstart)
|
||||
|
||||
serve_parser = commands.add_parser(
|
||||
'serve',
|
||||
help="Start development server.",
|
||||
)
|
||||
serve_parser.set_defaults(func=serve)
|
||||
serve_parser.add_argument(
|
||||
'-i', '--input-dir',
|
||||
default='content',
|
||||
help='Input directory (default: content)',
|
||||
)
|
||||
serve_parser.add_argument(
|
||||
'-o', '--output-dir',
|
||||
default='build',
|
||||
help='Ouptut directory (default: build)',
|
||||
)
|
||||
serve_parser.add_argument(
|
||||
'-t', '--template-dir',
|
||||
default='templates',
|
||||
help='Template directory (default: templates)',
|
||||
)
|
||||
serve_parser.add_argument(
|
||||
'-s', '--static-dir',
|
||||
default='static',
|
||||
help='Static directory (default: static)',
|
||||
)
|
||||
|
||||
return parser.parse_args(args)
|
||||
|
||||
|
||||
@@ -205,12 +238,13 @@ def process_markdown(convertibles, input_dir, output_dir,
|
||||
articles, pages : List[Tuple[str, Dict]]
|
||||
|
||||
"""
|
||||
logger.info("Converting Markdown files...")
|
||||
md = markdown_factory()
|
||||
|
||||
articles = []
|
||||
pages = []
|
||||
for src, dst in convertibles:
|
||||
logger.debug(f'Processing {src}')
|
||||
logger.info(f'Processing {src}')
|
||||
with open(f'{input_dir}/{src}', 'r') as fh:
|
||||
body = fh.read()
|
||||
|
||||
@@ -243,6 +277,25 @@ def generate_feed(
|
||||
blog_description,
|
||||
blog_author,
|
||||
):
|
||||
"""Generate Atom feed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
articles : list[list[str, dict]]
|
||||
list of relative output path and article dictionary
|
||||
output_dir : str
|
||||
where the feed is stored
|
||||
base_url : str
|
||||
base url
|
||||
blog_title : str
|
||||
blog title
|
||||
blog_description : str
|
||||
blog description
|
||||
blog_author : str
|
||||
blog author
|
||||
|
||||
"""
|
||||
logger.info('Generating Atom feed.')
|
||||
feed = feedgenerator.Atom1Feed(
|
||||
link=base_url,
|
||||
title=blog_title,
|
||||
@@ -251,11 +304,15 @@ def generate_feed(
|
||||
)
|
||||
|
||||
for dst, context in articles:
|
||||
# if article has a description, use that. otherwise fall back to
|
||||
# the title
|
||||
description = context.get('description', context['title'])
|
||||
|
||||
feed.add_item(
|
||||
title=context['title'],
|
||||
author_name=blog_author,
|
||||
link=base_url + dst,
|
||||
description=context['title'],
|
||||
description=description,
|
||||
content=context['content'],
|
||||
pubdate=context['date'],
|
||||
)
|
||||
@@ -277,6 +334,7 @@ def generate_archive(articles, template, output_dir):
|
||||
|
||||
|
||||
def generate_tags(articles, tags_template, tag_template, output_dir):
|
||||
logger.info("Generating Tag-pages.")
|
||||
os.makedirs(f'{output_dir}/tags', exist_ok=True)
|
||||
|
||||
# get tags number of occurrences
|
||||
|
||||
60
blag/devserver.py
Normal file
60
blag/devserver.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import os
|
||||
import logging
|
||||
import time
|
||||
import multiprocessing
|
||||
from http.server import SimpleHTTPRequestHandler, HTTPServer
|
||||
from functools import partial
|
||||
|
||||
from blag import blag
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_last_modified(dirs):
|
||||
"""Get the last modified time.
|
||||
|
||||
This method recursively goes through `dirs` and returns the most
|
||||
recent modification time time found.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dirs : list[str]
|
||||
list of directories to search
|
||||
|
||||
Returns
|
||||
-------
|
||||
int : most recent modification time found in `dirs`
|
||||
|
||||
"""
|
||||
last_mtime = 0
|
||||
|
||||
for dir in dirs:
|
||||
for root, dirs, files in os.walk(dir):
|
||||
for f in files:
|
||||
mtime = os.stat(os.path.join(root, f)).st_mtime
|
||||
if mtime > last_mtime:
|
||||
last_mtime = mtime
|
||||
|
||||
return last_mtime
|
||||
|
||||
|
||||
def autoreload(args):
|
||||
dirs = [args.input_dir, args.template_dir, args.static_dir]
|
||||
logger.info(f'Monitoring {dirs} for changes...')
|
||||
last_mtime = get_last_modified(dirs)
|
||||
while True:
|
||||
mtime = get_last_modified(dirs)
|
||||
if mtime > last_mtime:
|
||||
last_mtime = mtime
|
||||
logger.debug('Change detected, rebuilding...')
|
||||
blag.build(args)
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def serve(args):
|
||||
httpd = HTTPServer(('', 8000), partial(SimpleHTTPRequestHandler,
|
||||
directory=args.output_dir))
|
||||
proc = multiprocessing.Process(target=autoreload, args=(args,))
|
||||
proc.start()
|
||||
httpd.serve_forever()
|
||||
@@ -1 +1 @@
|
||||
__VERSION__ = '0.0.3'
|
||||
__VERSION__ = '0.0.4'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
twine==3.3.0
|
||||
twine==3.4.1
|
||||
wheel==0.36.2
|
||||
pytest==6.2.1
|
||||
pytest-cov==2.10.1
|
||||
flake8==3.8.4
|
||||
pytest==6.2.2
|
||||
pytest-cov==2.11.1
|
||||
flake8==3.9.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
markdown==3.3.3
|
||||
markdown==3.3.4
|
||||
feedgenerator==1.9.1
|
||||
jinja2==2.11.2
|
||||
pygments==2.7.3
|
||||
jinja2==2.11.3
|
||||
pygments==2.8.1
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from tempfile import TemporaryDirectory
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,6 +19,76 @@ def test_generate_feed(outdir):
|
||||
assert os.path.exists(f'{outdir}/atom.xml')
|
||||
|
||||
|
||||
def test_feed(outdir):
|
||||
articles = [
|
||||
[
|
||||
'dest1.html',
|
||||
{
|
||||
'title': 'title1',
|
||||
'date': datetime(2019, 6, 6),
|
||||
'content': 'content1',
|
||||
}
|
||||
],
|
||||
[
|
||||
'dest2.html',
|
||||
{
|
||||
'title': 'title2',
|
||||
'date': datetime(1980, 5, 9),
|
||||
'content': 'content2',
|
||||
}
|
||||
],
|
||||
|
||||
]
|
||||
|
||||
blag.generate_feed(articles, outdir, 'https://example.com/', 'blog title',
|
||||
'blog description', 'blog author')
|
||||
with open(f'{outdir}/atom.xml') as fh:
|
||||
feed = fh.read()
|
||||
|
||||
assert '<title>blog title</title>' in feed
|
||||
# enable when https://github.com/getpelican/feedgenerator/issues/22
|
||||
# is fixed
|
||||
# assert '<subtitle>blog description</subtitle>' in feed
|
||||
assert '<author><name>blog author</name></author>' in feed
|
||||
|
||||
# article 1
|
||||
assert '<title>title1</title>' in feed
|
||||
assert '<summary type="html">title1' in feed
|
||||
assert '<published>2019-06-06' in feed
|
||||
assert '<content type="html">content1' in feed
|
||||
assert '<link href="https://example.com/dest1.html"' in feed
|
||||
|
||||
# article 2
|
||||
assert '<title>title2</title>' in feed
|
||||
assert '<summary type="html">title2' in feed
|
||||
assert '<published>1980-05-09' in feed
|
||||
assert '<content type="html">content2' in feed
|
||||
assert '<link href="https://example.com/dest2.html"' in feed
|
||||
|
||||
|
||||
def test_generate_feed_with_description(outdir):
|
||||
# if a description is provided, it will be used as the summary in
|
||||
# the feed, otherwise we simply use the title of the article
|
||||
articles = [[
|
||||
'dest.html',
|
||||
{
|
||||
'title': 'title',
|
||||
'description': 'description',
|
||||
'date': datetime(2019, 6, 6),
|
||||
'content': 'content',
|
||||
}
|
||||
]]
|
||||
blag.generate_feed(articles, outdir, ' ', ' ', ' ', ' ')
|
||||
|
||||
with open(f'{outdir}/atom.xml') as fh:
|
||||
feed = fh.read()
|
||||
|
||||
assert '<title>title</title>' in feed
|
||||
assert '<summary type="html">description' in feed
|
||||
assert '<published>2019-06-06' in feed
|
||||
assert '<content type="html">content' in feed
|
||||
|
||||
|
||||
def test_parse_args_build():
|
||||
# test default args
|
||||
args = blag.parse_args(['build'])
|
||||
|
||||
30
tests/test_devserver.py
Normal file
30
tests/test_devserver.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
from tempfile import TemporaryDirectory
|
||||
from blag import devserver
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tempdir():
|
||||
with TemporaryDirectory() as dir:
|
||||
yield dir
|
||||
|
||||
|
||||
def test_get_last_modified(tempdir):
|
||||
# take initial time
|
||||
t1 = devserver.get_last_modified([tempdir])
|
||||
|
||||
# wait a bit, create a file and measure again
|
||||
time.sleep(0.1)
|
||||
with open(f'{tempdir}/test', 'w') as fh:
|
||||
fh.write('boo')
|
||||
t2 = devserver.get_last_modified([tempdir])
|
||||
|
||||
# wait a bit and take time again
|
||||
time.sleep(0.1)
|
||||
t3 = devserver.get_last_modified([tempdir])
|
||||
|
||||
assert t2 > t1
|
||||
assert t2 == t3
|
||||
Reference in New Issue
Block a user