reformatted code

This commit is contained in:
Bastian Venthur
2023-06-16 10:25:21 +02:00
parent f6c5eaf375
commit 0349bd3359
11 changed files with 381 additions and 380 deletions

View File

@@ -6,32 +6,28 @@
# remove when we don't support py38 anymore
from __future__ import annotations
from typing import Any
import argparse
import configparser
import logging
import os
import shutil
import logging
import configparser
import sys
from typing import Any
from jinja2 import (
Environment,
FileSystemLoader,
Template,
TemplateNotFound,
)
import feedgenerator
from jinja2 import Environment, FileSystemLoader, Template, TemplateNotFound
import blag
from blag.markdown import markdown_factory, convert_markdown
from blag.devserver import serve
from blag.version import __VERSION__
from blag.markdown import convert_markdown, markdown_factory
from blag.quickstart import quickstart
from blag.version import __VERSION__
logger = logging.getLogger(__name__)
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s %(message)s',
format="%(asctime)s %(levelname)s %(name)s %(message)s",
)
@@ -70,84 +66,84 @@ def parse_args(args: list[str] | None = None) -> argparse.Namespace:
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--version',
action='version',
version='%(prog)s ' + __VERSION__,
"--version",
action="version",
version="%(prog)s " + __VERSION__,
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
help='Verbose output.',
"-v",
"--verbose",
action="store_true",
help="Verbose output.",
)
commands = parser.add_subparsers(dest='command')
commands = parser.add_subparsers(dest="command")
commands.required = True
build_parser = commands.add_parser(
'build',
help='Build website.',
"build",
help="Build website.",
)
build_parser.set_defaults(func=build)
build_parser.add_argument(
'-i',
'--input-dir',
default='content',
help='Input directory (default: content)',
"-i",
"--input-dir",
default="content",
help="Input directory (default: content)",
)
build_parser.add_argument(
'-o',
'--output-dir',
default='build',
help='Ouptut directory (default: build)',
"-o",
"--output-dir",
default="build",
help="Ouptut directory (default: build)",
)
build_parser.add_argument(
'-t',
'--template-dir',
default='templates',
help='Template directory (default: templates)',
"-t",
"--template-dir",
default="templates",
help="Template directory (default: templates)",
)
build_parser.add_argument(
'-s',
'--static-dir',
default='static',
help='Static directory (default: static)',
"-s",
"--static-dir",
default="static",
help="Static directory (default: static)",
)
quickstart_parser = commands.add_parser(
'quickstart',
"quickstart",
help="Quickstart blag, creating necessary configuration.",
)
quickstart_parser.set_defaults(func=quickstart)
serve_parser = commands.add_parser(
'serve',
"serve",
help="Start development server.",
)
serve_parser.set_defaults(func=serve)
serve_parser.add_argument(
'-i',
'--input-dir',
default='content',
help='Input directory (default: content)',
"-i",
"--input-dir",
default="content",
help="Input directory (default: content)",
)
serve_parser.add_argument(
'-o',
'--output-dir',
default='build',
help='Ouptut directory (default: build)',
"-o",
"--output-dir",
default="build",
help="Ouptut directory (default: build)",
)
serve_parser.add_argument(
'-t',
'--template-dir',
default='templates',
help='Template directory (default: templates)',
"-t",
"--template-dir",
default="templates",
help="Template directory (default: templates)",
)
serve_parser.add_argument(
'-s',
'--static-dir',
default='static',
help='Static directory (default: static)',
"-s",
"--static-dir",
default="static",
help="Static directory (default: static)",
)
return parser.parse_args(args)
@@ -170,18 +166,18 @@ def get_config(configfile: str) -> configparser.SectionProxy:
config = configparser.ConfigParser()
config.read(configfile)
# check for the mandatory options
for value in 'base_url', 'title', 'description', 'author':
for value in "base_url", "title", "description", "author":
try:
config['main'][value]
config["main"][value]
except Exception:
print(f'{value} is missing in {configfile}!')
print(f"{value} is missing in {configfile}!")
sys.exit(1)
if not config['main']['base_url'].endswith('/'):
logger.warning('base_url does not end with a slash, adding it.')
config['main']['base_url'] += '/'
if not config["main"]["base_url"].endswith("/"):
logger.warning("base_url does not end with a slash, adding it.")
config["main"]["base_url"] += "/"
return config['main']
return config["main"]
def environment_factory(
@@ -222,51 +218,51 @@ def build(args: argparse.Namespace) -> None:
args
"""
os.makedirs(f'{args.output_dir}', exist_ok=True)
os.makedirs(f"{args.output_dir}", exist_ok=True)
convertibles = []
for root, dirnames, filenames in os.walk(args.input_dir):
for filename in filenames:
rel_src = os.path.relpath(
f'{root}/{filename}', start=args.input_dir
f"{root}/{filename}", start=args.input_dir
)
# all non-markdown files are just copied over, the markdown
# files are converted to html
if rel_src.endswith('.md'):
if rel_src.endswith(".md"):
rel_dst = rel_src
rel_dst = rel_dst[:-3] + '.html'
rel_dst = rel_dst[:-3] + ".html"
convertibles.append((rel_src, rel_dst))
else:
shutil.copy(
f'{args.input_dir}/{rel_src}',
f'{args.output_dir}/{rel_src}',
f"{args.input_dir}/{rel_src}",
f"{args.output_dir}/{rel_src}",
)
for dirname in dirnames:
# all directories are copied into the output directory
path = os.path.relpath(f'{root}/{dirname}', start=args.input_dir)
os.makedirs(f'{args.output_dir}/{path}', exist_ok=True)
path = os.path.relpath(f"{root}/{dirname}", start=args.input_dir)
os.makedirs(f"{args.output_dir}/{path}", exist_ok=True)
# copy static files over
logger.info('Copying static files.')
logger.info("Copying static files.")
if os.path.exists(args.static_dir):
shutil.copytree(args.static_dir, args.output_dir, dirs_exist_ok=True)
config = get_config('config.ini')
config = get_config("config.ini")
env = environment_factory(args.template_dir, dict(site=config))
try:
page_template = env.get_template('page.html')
article_template = env.get_template('article.html')
index_template = env.get_template('index.html')
archive_template = env.get_template('archive.html')
tags_template = env.get_template('tags.html')
tag_template = env.get_template('tag.html')
page_template = env.get_template("page.html")
article_template = env.get_template("article.html")
index_template = env.get_template("index.html")
archive_template = env.get_template("archive.html")
tags_template = env.get_template("tags.html")
tag_template = env.get_template("tag.html")
except TemplateNotFound as exc:
tmpl = os.path.join(blag.__path__[0], 'templates')
tmpl = os.path.join(blag.__path__[0], "templates")
logger.error(
f'Template "{exc.name}" not found in {args.template_dir}! '
'Consider running `blag quickstart` or copying the '
f'missing template from {tmpl}.'
"Consider running `blag quickstart` or copying the "
f"missing template from {tmpl}."
)
sys.exit(1)
@@ -282,10 +278,10 @@ def build(args: argparse.Namespace) -> None:
generate_feed(
articles,
args.output_dir,
base_url=config['base_url'],
blog_title=config['title'],
blog_description=config['description'],
blog_author=config['author'],
base_url=config["base_url"],
blog_title=config["title"],
blog_description=config["description"],
blog_author=config["author"],
)
generate_index(articles, index_template, args.output_dir)
generate_archive(articles, archive_template, args.output_dir)
@@ -330,9 +326,9 @@ def process_markdown(
articles = []
pages = []
for src, dst in convertibles:
logger.debug(f'Processing {src}')
logger.debug(f"Processing {src}")
with open(f'{input_dir}/{src}', 'r') as fh:
with open(f"{input_dir}/{src}", "r") as fh:
body = fh.read()
content, meta = convert_markdown(md, body)
@@ -342,17 +338,17 @@ def process_markdown(
# if markdown has date in meta, we treat it as a blog article,
# everything else are just pages
if meta and 'date' in meta:
if meta and "date" in meta:
articles.append((dst, context))
result = article_template.render(context)
else:
pages.append((dst, context))
result = page_template.render(context)
with open(f'{output_dir}/{dst}', 'w') as fh_dest:
with open(f"{output_dir}/{dst}", "w") as fh_dest:
fh_dest.write(result)
# sort articles by date, descending
articles = sorted(articles, key=lambda x: x[1]['date'], reverse=True)
articles = sorted(articles, key=lambda x: x[1]["date"], reverse=True)
return articles, pages
@@ -382,30 +378,30 @@ def generate_feed(
blog author
"""
logger.info('Generating Atom feed.')
logger.info("Generating Atom feed.")
feed = feedgenerator.Atom1Feed(
link=base_url,
title=blog_title,
description=blog_description,
feed_url=base_url + 'atom.xml',
feed_url=base_url + "atom.xml",
)
for dst, context in articles:
# if article has a description, use that. otherwise fall back to
# the title
description = context.get('description', context['title'])
description = context.get("description", context["title"])
feed.add_item(
title=context['title'],
title=context["title"],
author_name=blog_author,
link=base_url + dst,
description=description,
content=context['content'],
pubdate=context['date'],
content=context["content"],
pubdate=context["date"],
)
with open(f'{output_dir}/atom.xml', 'w') as fh:
feed.write(fh, encoding='utf8')
with open(f"{output_dir}/atom.xml", "w") as fh:
feed.write(fh, encoding="utf8")
def generate_index(
@@ -429,11 +425,11 @@ def generate_index(
archive = []
for dst, context in articles:
entry = context.copy()
entry['dst'] = dst
entry["dst"] = dst
archive.append(entry)
result = template.render(dict(archive=archive))
with open(f'{output_dir}/index.html', 'w') as fh:
with open(f"{output_dir}/index.html", "w") as fh:
fh.write(result)
@@ -458,11 +454,11 @@ def generate_archive(
archive = []
for dst, context in articles:
entry = context.copy()
entry['dst'] = dst
entry["dst"] = dst
archive.append(entry)
result = template.render(dict(archive=archive))
with open(f'{output_dir}/archive.html', 'w') as fh:
with open(f"{output_dir}/archive.html", "w") as fh:
fh.write(result)
@@ -484,11 +480,11 @@ def generate_tags(
"""
logger.info("Generating Tag-pages.")
os.makedirs(f'{output_dir}/tags', exist_ok=True)
os.makedirs(f"{output_dir}/tags", exist_ok=True)
# get tags number of occurrences
all_tags: dict[str, int] = {}
for _, context in articles:
tags: list[str] = context.get('tags', [])
tags: list[str] = context.get("tags", [])
for tag in tags:
all_tags[tag] = all_tags.get(tag, 0) + 1
# sort by occurrence
@@ -497,25 +493,25 @@ def generate_tags(
)
result = tags_template.render(dict(tags=taglist))
with open(f'{output_dir}/tags/index.html', 'w') as fh:
with open(f"{output_dir}/tags/index.html", "w") as fh:
fh.write(result)
# get tags and archive per tag
all_tags2: dict[str, list[dict[str, Any]]] = {}
for dst, context in articles:
tags = context.get('tags', [])
tags = context.get("tags", [])
for tag in tags:
archive: list[dict[str, Any]] = all_tags2.get(tag, [])
entry = context.copy()
entry['dst'] = dst
entry["dst"] = dst
archive.append(entry)
all_tags2[tag] = archive
for tag, archive in all_tags2.items():
result = tag_template.render(dict(archive=archive, tag=tag))
with open(f'{output_dir}/tags/{tag}.html', 'w') as fh:
with open(f"{output_dir}/tags/{tag}.html", "w") as fh:
fh.write(result)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -8,18 +8,18 @@ site if necessary.
# remove when we don't support py38 anymore
from __future__ import annotations
from typing import NoReturn
import os
import logging
import time
import multiprocessing
from http.server import SimpleHTTPRequestHandler, HTTPServer
from functools import partial
import argparse
import logging
import multiprocessing
import os
import time
from functools import partial
from http.server import HTTPServer, SimpleHTTPRequestHandler
from typing import NoReturn
from blag import blag
logger = logging.getLogger(__name__)
@@ -69,7 +69,7 @@ def autoreload(args: argparse.Namespace) -> NoReturn:
"""
dirs = [args.input_dir, args.template_dir, args.static_dir]
logger.info(f'Monitoring {dirs} for changes...')
logger.info(f"Monitoring {dirs} for changes...")
# make sure we trigger the rebuild immediately when we enter the
# loop to avoid serving stale contents
last_mtime = 0.0
@@ -77,7 +77,7 @@ def autoreload(args: argparse.Namespace) -> NoReturn:
mtime = get_last_modified(dirs)
if mtime > last_mtime:
last_mtime = mtime
logger.info('Change detected, rebuilding...')
logger.info("Change detected, rebuilding...")
blag.build(args)
time.sleep(1)
@@ -92,7 +92,7 @@ def serve(args: argparse.Namespace) -> None:
"""
httpd = HTTPServer(
('', 8000),
("", 8000),
partial(SimpleHTTPRequestHandler, directory=args.output_dir),
)
proc = multiprocessing.Process(target=autoreload, args=(args,))

View File

@@ -7,8 +7,9 @@ processing.
# remove when we don't support py38 anymore
from __future__ import annotations
from datetime import datetime
import logging
from datetime import datetime
from urllib.parse import urlsplit, urlunsplit
from xml.etree.ElementTree import Element
@@ -16,7 +17,6 @@ from markdown import Markdown
from markdown.extensions import Extension
from markdown.treeprocessors import Treeprocessor
logger = logging.getLogger(__name__)
@@ -33,13 +33,13 @@ def markdown_factory() -> Markdown:
"""
md = Markdown(
extensions=[
'meta',
'fenced_code',
'codehilite',
'smarty',
"meta",
"fenced_code",
"codehilite",
"smarty",
MarkdownLinkExtension(),
],
output_format='html',
output_format="html",
)
return md
@@ -75,20 +75,20 @@ def convert_markdown(
# markdowns metadata consists as list of strings -- one item per
# line. let's convert into single strings.
for key, value in meta.items():
value = '\n'.join(value)
value = "\n".join(value)
meta[key] = value
# convert known metadata
# date: datetime
if 'date' in meta:
meta['date'] = datetime.fromisoformat(meta['date'])
meta['date'] = meta['date'].astimezone()
if "date" in meta:
meta["date"] = datetime.fromisoformat(meta["date"])
meta["date"] = meta["date"].astimezone()
# tags: list[str] and lower case
if 'tags' in meta:
tags = meta['tags'].split(',')
if "tags" in meta:
tags = meta["tags"].split(",")
tags = [t.lower() for t in tags]
tags = [t.strip() for t in tags]
meta['tags'] = tags
meta["tags"] = tags
return content, meta
@@ -98,25 +98,25 @@ class MarkdownLinkTreeprocessor(Treeprocessor):
def run(self, root: Element) -> Element:
for element in root.iter():
if element.tag == 'a':
url = element.get('href')
if element.tag == "a":
url = element.get("href")
# element.get could also return None, we haven't seen this so
# far, so lets wait if we raise this
assert url is not None
url = str(url)
converted = self.convert(url)
element.set('href', converted)
element.set("href", converted)
return root
def convert(self, url: str) -> str:
scheme, netloc, path, query, fragment = urlsplit(url)
logger.debug(
f'{url}: {scheme=} {netloc=} {path=} {query=} {fragment=}'
f"{url}: {scheme=} {netloc=} {path=} {query=} {fragment=}"
)
if scheme or netloc or not path:
return url
if path.endswith('.md'):
path = path[:-3] + '.html'
if path.endswith(".md"):
path = path[:-3] + ".html"
url = urlunsplit((scheme, netloc, path, query, fragment))
return url
@@ -128,6 +128,6 @@ class MarkdownLinkExtension(Extension):
def extendMarkdown(self, md: Markdown) -> None:
md.treeprocessors.register(
MarkdownLinkTreeprocessor(md),
'mdlink',
"mdlink",
0,
)

View File

@@ -4,10 +4,11 @@
# remove when we don't support py38 anymore
from __future__ import annotations
import configparser
import argparse
import shutil
import configparser
import os
import shutil
import blag
@@ -47,7 +48,7 @@ def copy_default_theme() -> None:
"""
print("Copying default theme...")
for dir_ in 'templates', 'content', 'static':
for dir_ in "templates", "content", "static":
print(f" Copying {dir_}...")
try:
shutil.copytree(
@@ -89,13 +90,13 @@ def quickstart(args: argparse.Namespace | None) -> None:
)
config = configparser.ConfigParser()
config['main'] = {
'base_url': base_url,
'title': title,
'description': description,
'author': author,
config["main"] = {
"base_url": base_url,
"title": title,
"description": description,
"author": author,
}
with open('config.ini', 'w') as fh:
with open("config.ini", "w") as fh:
config.write(fh)
copy_default_theme()

View File

@@ -1 +1 @@
__VERSION__ = '1.5.0'
__VERSION__ = "1.5.0"

View File

@@ -1,9 +1,10 @@
# remove when we don't support py38 anymore
from __future__ import annotations
from argparse import Namespace
from typing import Iterator, Callable
from tempfile import TemporaryDirectory
import os
from argparse import Namespace
from tempfile import TemporaryDirectory
from typing import Callable, Iterator
import pytest
from jinja2 import Environment, Template
@@ -14,43 +15,43 @@ from blag import blag, quickstart
@pytest.fixture
def environment(cleandir: str) -> Iterator[Environment]:
site = {
'base_url': 'site base_url',
'title': 'site title',
'description': 'site description',
'author': 'site author',
"base_url": "site base_url",
"title": "site title",
"description": "site description",
"author": "site author",
}
env = blag.environment_factory('templates', globals_=dict(site=site))
env = blag.environment_factory("templates", globals_=dict(site=site))
yield env
@pytest.fixture
def page_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('page.html')
yield environment.get_template("page.html")
@pytest.fixture
def article_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('article.html')
yield environment.get_template("article.html")
@pytest.fixture
def index_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('index.html')
yield environment.get_template("index.html")
@pytest.fixture
def archive_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('archive.html')
yield environment.get_template("archive.html")
@pytest.fixture
def tags_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('tags.html')
yield environment.get_template("tags.html")
@pytest.fixture
def tag_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('tag.html')
yield environment.get_template("tag.html")
@pytest.fixture
@@ -65,8 +66,8 @@ author = a. u. thor
"""
with TemporaryDirectory() as dir:
os.mkdir(f'{dir}/build')
with open(f'{dir}/config.ini', 'w') as fh:
os.mkdir(f"{dir}/build")
with open(f"{dir}/config.ini", "w") as fh:
fh.write(config)
# change directory
old_cwd = os.getcwd()
@@ -79,11 +80,10 @@ author = a. u. thor
@pytest.fixture
def args(cleandir: Callable[[], Iterator[str]]) -> Iterator[Namespace]:
args = Namespace(
input_dir='content',
output_dir='build',
static_dir='static',
template_dir='templates',
input_dir="content",
output_dir="build",
static_dir="static",
template_dir="templates",
)
yield args

View File

@@ -1,73 +1,73 @@
# remove when we don't support py38 anymore
from __future__ import annotations
from tempfile import TemporaryDirectory
import os
from datetime import datetime
from typing import Any
from argparse import Namespace
from datetime import datetime
from tempfile import TemporaryDirectory
from typing import Any
import pytest
from pytest import CaptureFixture, LogCaptureFixture
from jinja2 import Template
from pytest import CaptureFixture, LogCaptureFixture
from blag import __VERSION__
from blag import blag
from blag import __VERSION__, blag
def test_generate_feed(cleandir: str) -> None:
articles: list[tuple[str, dict[str, Any]]] = []
blag.generate_feed(articles, 'build', ' ', ' ', ' ', ' ')
assert os.path.exists('build/atom.xml')
blag.generate_feed(articles, "build", " ", " ", " ", " ")
assert os.path.exists("build/atom.xml")
def test_feed(cleandir: str) -> None:
articles: list[tuple[str, dict[str, Any]]] = [
(
'dest1.html',
"dest1.html",
{
'title': 'title1',
'date': datetime(2019, 6, 6),
'content': 'content1',
"title": "title1",
"date": datetime(2019, 6, 6),
"content": "content1",
},
),
(
'dest2.html',
"dest2.html",
{
'title': 'title2',
'date': datetime(1980, 5, 9),
'content': 'content2',
"title": "title2",
"date": datetime(1980, 5, 9),
"content": "content2",
},
),
]
blag.generate_feed(
articles,
'build',
'https://example.com/',
'blog title',
'blog description',
'blog author',
"build",
"https://example.com/",
"blog title",
"blog description",
"blog author",
)
with open('build/atom.xml') as fh:
with open("build/atom.xml") as fh:
feed = fh.read()
assert '<title>blog title</title>' in feed
assert "<title>blog title</title>" in feed
# enable when https://github.com/getpelican/feedgenerator/issues/22
# is fixed
# assert '<subtitle>blog description</subtitle>' in feed
assert '<author><name>blog author</name></author>' in feed
assert "<author><name>blog author</name></author>" in feed
# article 1
assert '<title>title1</title>' in feed
assert "<title>title1</title>" in feed
assert '<summary type="html">title1' in feed
assert '<published>2019-06-06' in feed
assert "<published>2019-06-06" in feed
assert '<content type="html">content1' in feed
assert '<link href="https://example.com/dest1.html"' in feed
# article 2
assert '<title>title2</title>' in feed
assert "<title>title2</title>" in feed
assert '<summary type="html">title2' in feed
assert '<published>1980-05-09' in feed
assert "<published>1980-05-09" in feed
assert '<content type="html">content2' in feed
assert '<link href="https://example.com/dest2.html"' in feed
@@ -77,57 +77,57 @@ def test_generate_feed_with_description(cleandir: str) -> None:
# the feed, otherwise we simply use the title of the article
articles: list[tuple[str, dict[str, Any]]] = [
(
'dest.html',
"dest.html",
{
'title': 'title',
'description': 'description',
'date': datetime(2019, 6, 6),
'content': 'content',
"title": "title",
"description": "description",
"date": datetime(2019, 6, 6),
"content": "content",
},
)
]
blag.generate_feed(articles, 'build', ' ', ' ', ' ', ' ')
blag.generate_feed(articles, "build", " ", " ", " ", " ")
with open('build/atom.xml') as fh:
with open("build/atom.xml") as fh:
feed = fh.read()
assert '<title>title</title>' in feed
assert "<title>title</title>" in feed
assert '<summary type="html">description' in feed
assert '<published>2019-06-06' in feed
assert "<published>2019-06-06" in feed
assert '<content type="html">content' in feed
def test_parse_args_build() -> None:
# test default args
args = blag.parse_args(['build'])
assert args.input_dir == 'content'
assert args.output_dir == 'build'
assert args.template_dir == 'templates'
assert args.static_dir == 'static'
args = blag.parse_args(["build"])
assert args.input_dir == "content"
assert args.output_dir == "build"
assert args.template_dir == "templates"
assert args.static_dir == "static"
# input dir
args = blag.parse_args(['build', '-i', 'foo'])
assert args.input_dir == 'foo'
args = blag.parse_args(['build', '--input-dir', 'foo'])
assert args.input_dir == 'foo'
args = blag.parse_args(["build", "-i", "foo"])
assert args.input_dir == "foo"
args = blag.parse_args(["build", "--input-dir", "foo"])
assert args.input_dir == "foo"
# output dir
args = blag.parse_args(['build', '-o', 'foo'])
assert args.output_dir == 'foo'
args = blag.parse_args(['build', '--output-dir', 'foo'])
assert args.output_dir == 'foo'
args = blag.parse_args(["build", "-o", "foo"])
assert args.output_dir == "foo"
args = blag.parse_args(["build", "--output-dir", "foo"])
assert args.output_dir == "foo"
# template dir
args = blag.parse_args(['build', '-t', 'foo'])
assert args.template_dir == 'foo'
args = blag.parse_args(['build', '--template-dir', 'foo'])
assert args.template_dir == 'foo'
args = blag.parse_args(["build", "-t", "foo"])
assert args.template_dir == "foo"
args = blag.parse_args(["build", "--template-dir", "foo"])
assert args.template_dir == "foo"
# static dir
args = blag.parse_args(['build', '-s', 'foo'])
assert args.static_dir == 'foo'
args = blag.parse_args(['build', '--static-dir', 'foo'])
assert args.static_dir == 'foo'
args = blag.parse_args(["build", "-s", "foo"])
assert args.static_dir == "foo"
args = blag.parse_args(["build", "--static-dir", "foo"])
assert args.static_dir == "foo"
def test_get_config() -> None:
@@ -140,24 +140,24 @@ author = a. u. thor
"""
# happy path
with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini'
with open(configfile, 'w') as fh:
configfile = f"{dir}/config.ini"
with open(configfile, "w") as fh:
fh.write(config)
config_parsed = blag.get_config(configfile)
assert config_parsed['base_url'] == 'https://example.com/'
assert config_parsed['title'] == 'title'
assert config_parsed['description'] == 'description'
assert config_parsed['author'] == 'a. u. thor'
assert config_parsed["base_url"] == "https://example.com/"
assert config_parsed["title"] == "title"
assert config_parsed["description"] == "description"
assert config_parsed["author"] == "a. u. thor"
# a missing required config causes a sys.exit
for x in 'base_url', 'title', 'description', 'author':
config2 = '\n'.join(
for x in "base_url", "title", "description", "author":
config2 = "\n".join(
[line for line in config.splitlines() if not line.startswith(x)]
)
with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini'
with open(configfile, 'w') as fh:
configfile = f"{dir}/config.ini"
with open(configfile, "w") as fh:
fh.write(config2)
with pytest.raises(SystemExit):
config_parsed = blag.get_config(configfile)
@@ -171,19 +171,19 @@ description = description
author = a. u. thor
"""
with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini'
with open(configfile, 'w') as fh:
configfile = f"{dir}/config.ini"
with open(configfile, "w") as fh:
fh.write(config)
config_parsed = blag.get_config(configfile)
assert config_parsed['base_url'] == 'https://example.com/'
assert config_parsed["base_url"] == "https://example.com/"
def test_environment_factory(cleandir: str) -> None:
globals_: dict[str, object] = {'foo': 'bar', 'test': 'me'}
globals_: dict[str, object] = {"foo": "bar", "test": "me"}
env = blag.environment_factory("templates", globals_=globals_)
assert env.globals['foo'] == 'bar'
assert env.globals['test'] == 'me'
assert env.globals["foo"] == "bar"
assert env.globals["test"] == "me"
def test_process_markdown(
@@ -216,12 +216,12 @@ foo bar
convertibles = []
for i, txt in enumerate((page1, article1, article2)):
with open(f'content/{str(i)}', 'w') as fh:
with open(f"content/{str(i)}", "w") as fh:
fh.write(txt)
convertibles.append((str(i), str(i)))
articles, pages = blag.process_markdown(
convertibles, 'content', 'build', page_template, article_template
convertibles, "content", "build", page_template, article_template
)
assert isinstance(articles, list)
@@ -229,14 +229,14 @@ foo bar
for dst, context in articles:
assert isinstance(dst, str)
assert isinstance(context, dict)
assert 'content' in context
assert "content" in context
assert isinstance(pages, list)
assert len(pages) == 1
for dst, context in pages:
assert isinstance(dst, str)
assert isinstance(context, dict)
assert 'content' in context
assert "content" in context
def test_build(args: Namespace) -> None:
@@ -268,63 +268,63 @@ foo bar
# write some convertibles
convertibles = []
for i, txt in enumerate((page1, article1, article2)):
with open(f'{args.input_dir}/{str(i)}.md', 'w') as fh:
with open(f"{args.input_dir}/{str(i)}.md", "w") as fh:
fh.write(txt)
convertibles.append((str(i), str(i)))
# some static files
with open(f'{args.static_dir}/test', 'w') as fh:
fh.write('hello')
with open(f"{args.static_dir}/test", "w") as fh:
fh.write("hello")
os.mkdir(f'{args.input_dir}/testdir')
with open(f'{args.input_dir}/testdir/test', 'w') as fh:
fh.write('hello')
os.mkdir(f"{args.input_dir}/testdir")
with open(f"{args.input_dir}/testdir/test", "w") as fh:
fh.write("hello")
blag.build(args)
# test existence of the three converted files
for i in range(3):
assert os.path.exists(f'{args.output_dir}/{i}.html')
assert os.path.exists(f"{args.output_dir}/{i}.html")
# ... static file
assert os.path.exists(f'{args.output_dir}/test')
assert os.path.exists(f"{args.output_dir}/test")
# ... directory
assert os.path.exists(f'{args.output_dir}/testdir/test')
assert os.path.exists(f"{args.output_dir}/testdir/test")
# ... feed
assert os.path.exists(f'{args.output_dir}/atom.xml')
assert os.path.exists(f"{args.output_dir}/atom.xml")
# ... index
assert os.path.exists(f'{args.output_dir}/index.html')
assert os.path.exists(f"{args.output_dir}/index.html")
# ... archive
assert os.path.exists(f'{args.output_dir}/archive.html')
assert os.path.exists(f"{args.output_dir}/archive.html")
# ... tags
assert os.path.exists(f'{args.output_dir}/tags/index.html')
assert os.path.exists(f'{args.output_dir}/tags/foo.html')
assert os.path.exists(f'{args.output_dir}/tags/bar.html')
assert os.path.exists(f"{args.output_dir}/tags/index.html")
assert os.path.exists(f"{args.output_dir}/tags/foo.html")
assert os.path.exists(f"{args.output_dir}/tags/bar.html")
@pytest.mark.parametrize(
'template',
"template",
[
'page.html',
'article.html',
'index.html',
'archive.html',
'tags.html',
'tag.html',
]
"page.html",
"article.html",
"index.html",
"archive.html",
"tags.html",
"tag.html",
],
)
def test_missing_template_raises(template: str, args: Namespace) -> None:
os.remove(f'templates/{template}')
os.remove(f"templates/{template}")
with pytest.raises(SystemExit):
blag.build(args)
def test_main(cleandir: str) -> None:
blag.main(['build'])
blag.main(["build"])
def test_cli_version(capsys: CaptureFixture[str]) -> None:
with pytest.raises(SystemExit) as ex:
blag.main(['--version'])
blag.main(["--version"])
# normal system exit
assert ex.value.code == 0
# proper version reported
@@ -333,8 +333,8 @@ def test_cli_version(capsys: CaptureFixture[str]) -> None:
def test_cli_verbose(cleandir: str, caplog: LogCaptureFixture) -> None:
blag.main(['build'])
assert 'DEBUG' not in caplog.text
blag.main(["build"])
assert "DEBUG" not in caplog.text
blag.main(['--verbose', 'build'])
assert 'DEBUG' in caplog.text
blag.main(["--verbose", "build"])
assert "DEBUG" in caplog.text

View File

@@ -1,7 +1,8 @@
# remove when we don't support py38 anymore
from __future__ import annotations
import time
import threading
import time
from argparse import Namespace
import pytest
@@ -11,17 +12,17 @@ from blag import devserver
def test_get_last_modified(cleandir: str) -> None:
# take initial time
t1 = devserver.get_last_modified(['content'])
t1 = devserver.get_last_modified(["content"])
# wait a bit, create a file and measure again
time.sleep(0.1)
with open('content/test', 'w') as fh:
fh.write('boo')
t2 = devserver.get_last_modified(['content'])
with open("content/test", "w") as fh:
fh.write("boo")
t2 = devserver.get_last_modified(["content"])
# wait a bit and take time again
time.sleep(0.1)
t3 = devserver.get_last_modified(['content'])
t3 = devserver.get_last_modified(["content"])
assert t2 > t1
assert t2 == t3
@@ -29,20 +30,20 @@ def test_get_last_modified(cleandir: str) -> None:
def test_autoreload_builds_immediately(args: Namespace) -> None:
# create a dummy file that can be build
with open('content/test.md', 'w') as fh:
fh.write('boo')
with open("content/test.md", "w") as fh:
fh.write("boo")
t = threading.Thread(
target=devserver.autoreload,
args=(args,),
daemon=True,
)
t0 = devserver.get_last_modified(['build'])
t0 = devserver.get_last_modified(["build"])
t.start()
# try for 5 seconds...
for i in range(5):
time.sleep(1)
t1 = devserver.get_last_modified(['build'])
t1 = devserver.get_last_modified(["build"])
print(t1)
if t1 > t0:
break
@@ -60,16 +61,16 @@ def test_autoreload(args: Namespace) -> None:
)
t.start()
t0 = devserver.get_last_modified(['build'])
t0 = devserver.get_last_modified(["build"])
# create a dummy file that can be build
with open('content/test.md', 'w') as fh:
fh.write('boo')
with open("content/test.md", "w") as fh:
fh.write("boo")
# try for 5 seconds to see if we rebuild once...
for i in range(5):
time.sleep(1)
t1 = devserver.get_last_modified(['build'])
t1 = devserver.get_last_modified(["build"])
if t1 > t0:
break
assert t1 > t0

View File

@@ -1,10 +1,11 @@
# remove when we don't support py38 anymore
from __future__ import annotations
from datetime import datetime
from typing import Any
import pytest
import markdown
import pytest
from blag.markdown import convert_markdown, markdown_factory
@@ -13,23 +14,23 @@ from blag.markdown import convert_markdown, markdown_factory
"input_, expected",
[
# inline
('[test](test.md)', 'test.html'),
('[test](test.md "test")', 'test.html'),
('[test](a/test.md)', 'a/test.html'),
('[test](a/test.md "test")', 'a/test.html'),
('[test](/test.md)', '/test.html'),
('[test](/test.md "test")', '/test.html'),
('[test](/a/test.md)', '/a/test.html'),
('[test](/a/test.md "test")', '/a/test.html'),
("[test](test.md)", "test.html"),
('[test](test.md "test")', "test.html"),
("[test](a/test.md)", "a/test.html"),
('[test](a/test.md "test")', "a/test.html"),
("[test](/test.md)", "/test.html"),
('[test](/test.md "test")', "/test.html"),
("[test](/a/test.md)", "/a/test.html"),
('[test](/a/test.md "test")', "/a/test.html"),
# reference
('[test][]\n[test]: test.md ' '', 'test.html'),
('[test][]\n[test]: test.md "test"', 'test.html'),
('[test][]\n[test]: a/test.md', 'a/test.html'),
('[test][]\n[test]: a/test.md "test"', 'a/test.html'),
('[test][]\n[test]: /test.md', '/test.html'),
('[test][]\n[test]: /test.md "test"', '/test.html'),
('[test][]\n[test]: /a/test.md', '/a/test.html'),
('[test][]\n[test]: /a/test.md "test"', '/a/test.html'),
("[test][]\n[test]: test.md " "", "test.html"),
('[test][]\n[test]: test.md "test"', "test.html"),
("[test][]\n[test]: a/test.md", "a/test.html"),
('[test][]\n[test]: a/test.md "test"', "a/test.html"),
("[test][]\n[test]: /test.md", "/test.html"),
('[test][]\n[test]: /test.md "test"', "/test.html"),
("[test][]\n[test]: /a/test.md", "/a/test.html"),
('[test][]\n[test]: /a/test.md "test"', "/a/test.html"),
],
)
def test_convert_markdown_links(input_: str, expected: str) -> None:
@@ -42,11 +43,11 @@ def test_convert_markdown_links(input_: str, expected: str) -> None:
"input_, expected",
[
# scheme
('[test](https://)', 'https://'),
("[test](https://)", "https://"),
# netloc
('[test](//test.md)', '//test.md'),
("[test](//test.md)", "//test.md"),
# no path
('[test]()', ''),
("[test]()", ""),
],
)
def test_dont_convert_normal_links(input_: str, expected: str) -> None:
@@ -58,13 +59,13 @@ def test_dont_convert_normal_links(input_: str, expected: str) -> None:
@pytest.mark.parametrize(
"input_, expected",
[
('foo: bar', {'foo': 'bar'}),
('foo: those are several words', {'foo': 'those are several words'}),
('tags: this, is, a, test\n', {'tags': ['this', 'is', 'a', 'test']}),
('tags: this, IS, a, test', {'tags': ['this', 'is', 'a', 'test']}),
("foo: bar", {"foo": "bar"}),
("foo: those are several words", {"foo": "those are several words"}),
("tags: this, is, a, test\n", {"tags": ["this", "is", "a", "test"]}),
("tags: this, IS, a, test", {"tags": ["this", "is", "a", "test"]}),
(
'date: 2020-01-01 12:10',
{'date': datetime(2020, 1, 1, 12, 10).astimezone()},
"date: 2020-01-01 12:10",
{"date": datetime(2020, 1, 1, 12, 10).astimezone()},
),
],
)
@@ -88,9 +89,9 @@ this --- is -- a test ...
"""
html, meta = convert_markdown(md, md1)
assert 'mdash' in html
assert 'ndash' in html
assert 'hellip' in html
assert "mdash" in html
assert "ndash" in html
assert "hellip" in html
def test_smarty_code() -> None:
@@ -102,6 +103,6 @@ this --- is -- a test ...
```
"""
html, meta = convert_markdown(md, md1)
assert 'mdash' not in html
assert 'ndash' not in html
assert 'hellip' not in html
assert "mdash" not in html
assert "ndash" not in html
assert "hellip" not in html

View File

@@ -1,5 +1,6 @@
# remove when we don't support py38 anymore
from __future__ import annotations
import os
from pytest import MonkeyPatch
@@ -8,26 +9,26 @@ from blag.quickstart import get_input, quickstart
def test_get_input_default_answer(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr('builtins.input', lambda x: '')
monkeypatch.setattr("builtins.input", lambda x: "")
answer = get_input("foo", "bar")
assert answer == 'bar'
assert answer == "bar"
def test_get_input(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr('builtins.input', lambda x: 'baz')
monkeypatch.setattr("builtins.input", lambda x: "baz")
answer = get_input("foo", "bar")
assert answer == 'baz'
assert answer == "baz"
def test_quickstart(cleandir: str, monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr('builtins.input', lambda x: 'foo')
monkeypatch.setattr("builtins.input", lambda x: "foo")
quickstart(None)
with open('config.ini', 'r') as fh:
with open("config.ini", "r") as fh:
data = fh.read()
assert 'base_url = foo' in data
assert 'title = foo' in data
assert 'description = foo' in data
assert 'author = foo' in data
assert "base_url = foo" in data
assert "title = foo" in data
assert "description = foo" in data
assert "author = foo" in data
for template in (
"archive.html",
@@ -38,7 +39,7 @@ def test_quickstart(cleandir: str, monkeypatch: MonkeyPatch) -> None:
"tag.html",
"tags.html",
):
assert os.path.exists(f'templates/{template}')
assert os.path.exists(f"templates/{template}")
for directory in "build", "content", "static":
assert os.path.exists(directory)

View File

@@ -1,5 +1,6 @@
# remove when we don't support py38 anymore
from __future__ import annotations
import datetime
from jinja2 import Template
@@ -7,91 +8,91 @@ from jinja2 import Template
def test_page(page_template: Template) -> None:
ctx = {
'content': 'this is the content',
'title': 'this is the title',
"content": "this is the content",
"title": "this is the title",
}
result = page_template.render(ctx)
assert 'this is the content' in result
assert 'this is the title' in result
assert "this is the content" in result
assert "this is the title" in result
def test_article(article_template: Template) -> None:
ctx = {
'content': 'this is the content',
'title': 'this is the title',
'date': datetime.datetime(1980, 5, 9),
"content": "this is the content",
"title": "this is the title",
"date": datetime.datetime(1980, 5, 9),
}
result = article_template.render(ctx)
assert 'this is the content' in result
assert 'this is the title' in result
assert '1980-05-09' in result
assert "this is the content" in result
assert "this is the title" in result
assert "1980-05-09" in result
def test_index(index_template: Template) -> None:
entry = {
'title': 'this is a title',
'dst': 'https://example.com/link',
'date': datetime.datetime(1980, 5, 9),
"title": "this is a title",
"dst": "https://example.com/link",
"date": datetime.datetime(1980, 5, 9),
}
archive = [entry]
ctx = {
'archive': archive,
"archive": archive,
}
result = index_template.render(ctx)
assert 'site title' in result
assert "site title" in result
assert 'this is a title' in result
assert '1980-05-09' in result
assert 'https://example.com/link' in result
assert "this is a title" in result
assert "1980-05-09" in result
assert "https://example.com/link" in result
assert '/archive.html' in result
assert "/archive.html" in result
def test_archive(archive_template: Template) -> None:
entry = {
'title': 'this is a title',
'dst': 'https://example.com/link',
'date': datetime.datetime(1980, 5, 9),
"title": "this is a title",
"dst": "https://example.com/link",
"date": datetime.datetime(1980, 5, 9),
}
archive = [entry]
ctx = {
'archive': archive,
"archive": archive,
}
result = archive_template.render(ctx)
assert 'Archive' in result
assert "Archive" in result
assert 'this is a title' in result
assert '1980-05-09' in result
assert 'https://example.com/link' in result
assert "this is a title" in result
assert "1980-05-09" in result
assert "https://example.com/link" in result
def test_tags(tags_template: Template) -> None:
tags = [('foo', 42)]
tags = [("foo", 42)]
ctx = {
'tags': tags,
"tags": tags,
}
result = tags_template.render(ctx)
assert 'Tags' in result
assert "Tags" in result
assert 'foo.html' in result
assert 'foo' in result
assert '42' in result
assert "foo.html" in result
assert "foo" in result
assert "42" in result
def test_tag(tag_template: Template) -> None:
entry = {
'title': 'this is a title',
'dst': 'https://example.com/link',
'date': datetime.datetime(1980, 5, 9),
"title": "this is a title",
"dst": "https://example.com/link",
"date": datetime.datetime(1980, 5, 9),
}
archive = [entry]
ctx = {
'tag': 'foo',
'archive': archive,
"tag": "foo",
"archive": archive,
}
result = tag_template.render(ctx)
assert 'foo' in result
assert "foo" in result
assert 'this is a title' in result
assert '1980-05-09' in result
assert 'https://example.com/link' in result
assert "this is a title" in result
assert "1980-05-09" in result
assert "https://example.com/link" in result