1
0
mirror of https://github.com/venthur/blag.git synced 2025-11-25 20:52:43 +00:00

reformatted code

This commit is contained in:
Bastian Venthur
2023-06-16 10:25:21 +02:00
parent f6c5eaf375
commit 0349bd3359
11 changed files with 381 additions and 380 deletions

View File

@@ -6,32 +6,28 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
from typing import Any
import argparse import argparse
import configparser
import logging
import os import os
import shutil import shutil
import logging
import configparser
import sys import sys
from typing import Any
from jinja2 import (
Environment,
FileSystemLoader,
Template,
TemplateNotFound,
)
import feedgenerator import feedgenerator
from jinja2 import Environment, FileSystemLoader, Template, TemplateNotFound
import blag import blag
from blag.markdown import markdown_factory, convert_markdown
from blag.devserver import serve from blag.devserver import serve
from blag.version import __VERSION__ from blag.markdown import convert_markdown, markdown_factory
from blag.quickstart import quickstart from blag.quickstart import quickstart
from blag.version import __VERSION__
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s %(message)s', format="%(asctime)s %(levelname)s %(name)s %(message)s",
) )
@@ -70,84 +66,84 @@ def parse_args(args: list[str] | None = None) -> argparse.Namespace:
""" """
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--version', "--version",
action='version', action="version",
version='%(prog)s ' + __VERSION__, version="%(prog)s " + __VERSION__,
) )
parser.add_argument( parser.add_argument(
'-v', "-v",
'--verbose', "--verbose",
action='store_true', action="store_true",
help='Verbose output.', help="Verbose output.",
) )
commands = parser.add_subparsers(dest='command') commands = parser.add_subparsers(dest="command")
commands.required = True commands.required = True
build_parser = commands.add_parser( build_parser = commands.add_parser(
'build', "build",
help='Build website.', help="Build website.",
) )
build_parser.set_defaults(func=build) build_parser.set_defaults(func=build)
build_parser.add_argument( build_parser.add_argument(
'-i', "-i",
'--input-dir', "--input-dir",
default='content', default="content",
help='Input directory (default: content)', help="Input directory (default: content)",
) )
build_parser.add_argument( build_parser.add_argument(
'-o', "-o",
'--output-dir', "--output-dir",
default='build', default="build",
help='Ouptut directory (default: build)', help="Ouptut directory (default: build)",
) )
build_parser.add_argument( build_parser.add_argument(
'-t', "-t",
'--template-dir', "--template-dir",
default='templates', default="templates",
help='Template directory (default: templates)', help="Template directory (default: templates)",
) )
build_parser.add_argument( build_parser.add_argument(
'-s', "-s",
'--static-dir', "--static-dir",
default='static', default="static",
help='Static directory (default: static)', help="Static directory (default: static)",
) )
quickstart_parser = commands.add_parser( quickstart_parser = commands.add_parser(
'quickstart', "quickstart",
help="Quickstart blag, creating necessary configuration.", help="Quickstart blag, creating necessary configuration.",
) )
quickstart_parser.set_defaults(func=quickstart) quickstart_parser.set_defaults(func=quickstart)
serve_parser = commands.add_parser( serve_parser = commands.add_parser(
'serve', "serve",
help="Start development server.", help="Start development server.",
) )
serve_parser.set_defaults(func=serve) serve_parser.set_defaults(func=serve)
serve_parser.add_argument( serve_parser.add_argument(
'-i', "-i",
'--input-dir', "--input-dir",
default='content', default="content",
help='Input directory (default: content)', help="Input directory (default: content)",
) )
serve_parser.add_argument( serve_parser.add_argument(
'-o', "-o",
'--output-dir', "--output-dir",
default='build', default="build",
help='Ouptut directory (default: build)', help="Ouptut directory (default: build)",
) )
serve_parser.add_argument( serve_parser.add_argument(
'-t', "-t",
'--template-dir', "--template-dir",
default='templates', default="templates",
help='Template directory (default: templates)', help="Template directory (default: templates)",
) )
serve_parser.add_argument( serve_parser.add_argument(
'-s', "-s",
'--static-dir', "--static-dir",
default='static', default="static",
help='Static directory (default: static)', help="Static directory (default: static)",
) )
return parser.parse_args(args) return parser.parse_args(args)
@@ -170,18 +166,18 @@ def get_config(configfile: str) -> configparser.SectionProxy:
config = configparser.ConfigParser() config = configparser.ConfigParser()
config.read(configfile) config.read(configfile)
# check for the mandatory options # check for the mandatory options
for value in 'base_url', 'title', 'description', 'author': for value in "base_url", "title", "description", "author":
try: try:
config['main'][value] config["main"][value]
except Exception: except Exception:
print(f'{value} is missing in {configfile}!') print(f"{value} is missing in {configfile}!")
sys.exit(1) sys.exit(1)
if not config['main']['base_url'].endswith('/'): if not config["main"]["base_url"].endswith("/"):
logger.warning('base_url does not end with a slash, adding it.') logger.warning("base_url does not end with a slash, adding it.")
config['main']['base_url'] += '/' config["main"]["base_url"] += "/"
return config['main'] return config["main"]
def environment_factory( def environment_factory(
@@ -222,51 +218,51 @@ def build(args: argparse.Namespace) -> None:
args args
""" """
os.makedirs(f'{args.output_dir}', exist_ok=True) os.makedirs(f"{args.output_dir}", exist_ok=True)
convertibles = [] convertibles = []
for root, dirnames, filenames in os.walk(args.input_dir): for root, dirnames, filenames in os.walk(args.input_dir):
for filename in filenames: for filename in filenames:
rel_src = os.path.relpath( rel_src = os.path.relpath(
f'{root}/{filename}', start=args.input_dir f"{root}/{filename}", start=args.input_dir
) )
# all non-markdown files are just copied over, the markdown # all non-markdown files are just copied over, the markdown
# files are converted to html # files are converted to html
if rel_src.endswith('.md'): if rel_src.endswith(".md"):
rel_dst = rel_src rel_dst = rel_src
rel_dst = rel_dst[:-3] + '.html' rel_dst = rel_dst[:-3] + ".html"
convertibles.append((rel_src, rel_dst)) convertibles.append((rel_src, rel_dst))
else: else:
shutil.copy( shutil.copy(
f'{args.input_dir}/{rel_src}', f"{args.input_dir}/{rel_src}",
f'{args.output_dir}/{rel_src}', f"{args.output_dir}/{rel_src}",
) )
for dirname in dirnames: for dirname in dirnames:
# all directories are copied into the output directory # all directories are copied into the output directory
path = os.path.relpath(f'{root}/{dirname}', start=args.input_dir) path = os.path.relpath(f"{root}/{dirname}", start=args.input_dir)
os.makedirs(f'{args.output_dir}/{path}', exist_ok=True) os.makedirs(f"{args.output_dir}/{path}", exist_ok=True)
# copy static files over # copy static files over
logger.info('Copying static files.') logger.info("Copying static files.")
if os.path.exists(args.static_dir): if os.path.exists(args.static_dir):
shutil.copytree(args.static_dir, args.output_dir, dirs_exist_ok=True) shutil.copytree(args.static_dir, args.output_dir, dirs_exist_ok=True)
config = get_config('config.ini') config = get_config("config.ini")
env = environment_factory(args.template_dir, dict(site=config)) env = environment_factory(args.template_dir, dict(site=config))
try: try:
page_template = env.get_template('page.html') page_template = env.get_template("page.html")
article_template = env.get_template('article.html') article_template = env.get_template("article.html")
index_template = env.get_template('index.html') index_template = env.get_template("index.html")
archive_template = env.get_template('archive.html') archive_template = env.get_template("archive.html")
tags_template = env.get_template('tags.html') tags_template = env.get_template("tags.html")
tag_template = env.get_template('tag.html') tag_template = env.get_template("tag.html")
except TemplateNotFound as exc: except TemplateNotFound as exc:
tmpl = os.path.join(blag.__path__[0], 'templates') tmpl = os.path.join(blag.__path__[0], "templates")
logger.error( logger.error(
f'Template "{exc.name}" not found in {args.template_dir}! ' f'Template "{exc.name}" not found in {args.template_dir}! '
'Consider running `blag quickstart` or copying the ' "Consider running `blag quickstart` or copying the "
f'missing template from {tmpl}.' f"missing template from {tmpl}."
) )
sys.exit(1) sys.exit(1)
@@ -282,10 +278,10 @@ def build(args: argparse.Namespace) -> None:
generate_feed( generate_feed(
articles, articles,
args.output_dir, args.output_dir,
base_url=config['base_url'], base_url=config["base_url"],
blog_title=config['title'], blog_title=config["title"],
blog_description=config['description'], blog_description=config["description"],
blog_author=config['author'], blog_author=config["author"],
) )
generate_index(articles, index_template, args.output_dir) generate_index(articles, index_template, args.output_dir)
generate_archive(articles, archive_template, args.output_dir) generate_archive(articles, archive_template, args.output_dir)
@@ -330,9 +326,9 @@ def process_markdown(
articles = [] articles = []
pages = [] pages = []
for src, dst in convertibles: for src, dst in convertibles:
logger.debug(f'Processing {src}') logger.debug(f"Processing {src}")
with open(f'{input_dir}/{src}', 'r') as fh: with open(f"{input_dir}/{src}", "r") as fh:
body = fh.read() body = fh.read()
content, meta = convert_markdown(md, body) content, meta = convert_markdown(md, body)
@@ -342,17 +338,17 @@ def process_markdown(
# if markdown has date in meta, we treat it as a blog article, # if markdown has date in meta, we treat it as a blog article,
# everything else are just pages # everything else are just pages
if meta and 'date' in meta: if meta and "date" in meta:
articles.append((dst, context)) articles.append((dst, context))
result = article_template.render(context) result = article_template.render(context)
else: else:
pages.append((dst, context)) pages.append((dst, context))
result = page_template.render(context) result = page_template.render(context)
with open(f'{output_dir}/{dst}', 'w') as fh_dest: with open(f"{output_dir}/{dst}", "w") as fh_dest:
fh_dest.write(result) fh_dest.write(result)
# sort articles by date, descending # sort articles by date, descending
articles = sorted(articles, key=lambda x: x[1]['date'], reverse=True) articles = sorted(articles, key=lambda x: x[1]["date"], reverse=True)
return articles, pages return articles, pages
@@ -382,30 +378,30 @@ def generate_feed(
blog author blog author
""" """
logger.info('Generating Atom feed.') logger.info("Generating Atom feed.")
feed = feedgenerator.Atom1Feed( feed = feedgenerator.Atom1Feed(
link=base_url, link=base_url,
title=blog_title, title=blog_title,
description=blog_description, description=blog_description,
feed_url=base_url + 'atom.xml', feed_url=base_url + "atom.xml",
) )
for dst, context in articles: for dst, context in articles:
# if article has a description, use that. otherwise fall back to # if article has a description, use that. otherwise fall back to
# the title # the title
description = context.get('description', context['title']) description = context.get("description", context["title"])
feed.add_item( feed.add_item(
title=context['title'], title=context["title"],
author_name=blog_author, author_name=blog_author,
link=base_url + dst, link=base_url + dst,
description=description, description=description,
content=context['content'], content=context["content"],
pubdate=context['date'], pubdate=context["date"],
) )
with open(f'{output_dir}/atom.xml', 'w') as fh: with open(f"{output_dir}/atom.xml", "w") as fh:
feed.write(fh, encoding='utf8') feed.write(fh, encoding="utf8")
def generate_index( def generate_index(
@@ -429,11 +425,11 @@ def generate_index(
archive = [] archive = []
for dst, context in articles: for dst, context in articles:
entry = context.copy() entry = context.copy()
entry['dst'] = dst entry["dst"] = dst
archive.append(entry) archive.append(entry)
result = template.render(dict(archive=archive)) result = template.render(dict(archive=archive))
with open(f'{output_dir}/index.html', 'w') as fh: with open(f"{output_dir}/index.html", "w") as fh:
fh.write(result) fh.write(result)
@@ -458,11 +454,11 @@ def generate_archive(
archive = [] archive = []
for dst, context in articles: for dst, context in articles:
entry = context.copy() entry = context.copy()
entry['dst'] = dst entry["dst"] = dst
archive.append(entry) archive.append(entry)
result = template.render(dict(archive=archive)) result = template.render(dict(archive=archive))
with open(f'{output_dir}/archive.html', 'w') as fh: with open(f"{output_dir}/archive.html", "w") as fh:
fh.write(result) fh.write(result)
@@ -484,11 +480,11 @@ def generate_tags(
""" """
logger.info("Generating Tag-pages.") logger.info("Generating Tag-pages.")
os.makedirs(f'{output_dir}/tags', exist_ok=True) os.makedirs(f"{output_dir}/tags", exist_ok=True)
# get tags number of occurrences # get tags number of occurrences
all_tags: dict[str, int] = {} all_tags: dict[str, int] = {}
for _, context in articles: for _, context in articles:
tags: list[str] = context.get('tags', []) tags: list[str] = context.get("tags", [])
for tag in tags: for tag in tags:
all_tags[tag] = all_tags.get(tag, 0) + 1 all_tags[tag] = all_tags.get(tag, 0) + 1
# sort by occurrence # sort by occurrence
@@ -497,25 +493,25 @@ def generate_tags(
) )
result = tags_template.render(dict(tags=taglist)) result = tags_template.render(dict(tags=taglist))
with open(f'{output_dir}/tags/index.html', 'w') as fh: with open(f"{output_dir}/tags/index.html", "w") as fh:
fh.write(result) fh.write(result)
# get tags and archive per tag # get tags and archive per tag
all_tags2: dict[str, list[dict[str, Any]]] = {} all_tags2: dict[str, list[dict[str, Any]]] = {}
for dst, context in articles: for dst, context in articles:
tags = context.get('tags', []) tags = context.get("tags", [])
for tag in tags: for tag in tags:
archive: list[dict[str, Any]] = all_tags2.get(tag, []) archive: list[dict[str, Any]] = all_tags2.get(tag, [])
entry = context.copy() entry = context.copy()
entry['dst'] = dst entry["dst"] = dst
archive.append(entry) archive.append(entry)
all_tags2[tag] = archive all_tags2[tag] = archive
for tag, archive in all_tags2.items(): for tag, archive in all_tags2.items():
result = tag_template.render(dict(archive=archive, tag=tag)) result = tag_template.render(dict(archive=archive, tag=tag))
with open(f'{output_dir}/tags/{tag}.html', 'w') as fh: with open(f"{output_dir}/tags/{tag}.html", "w") as fh:
fh.write(result) fh.write(result)
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View File

@@ -8,18 +8,18 @@ site if necessary.
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
from typing import NoReturn
import os
import logging
import time
import multiprocessing
from http.server import SimpleHTTPRequestHandler, HTTPServer
from functools import partial
import argparse import argparse
import logging
import multiprocessing
import os
import time
from functools import partial
from http.server import HTTPServer, SimpleHTTPRequestHandler
from typing import NoReturn
from blag import blag from blag import blag
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -69,7 +69,7 @@ def autoreload(args: argparse.Namespace) -> NoReturn:
""" """
dirs = [args.input_dir, args.template_dir, args.static_dir] dirs = [args.input_dir, args.template_dir, args.static_dir]
logger.info(f'Monitoring {dirs} for changes...') logger.info(f"Monitoring {dirs} for changes...")
# make sure we trigger the rebuild immediately when we enter the # make sure we trigger the rebuild immediately when we enter the
# loop to avoid serving stale contents # loop to avoid serving stale contents
last_mtime = 0.0 last_mtime = 0.0
@@ -77,7 +77,7 @@ def autoreload(args: argparse.Namespace) -> NoReturn:
mtime = get_last_modified(dirs) mtime = get_last_modified(dirs)
if mtime > last_mtime: if mtime > last_mtime:
last_mtime = mtime last_mtime = mtime
logger.info('Change detected, rebuilding...') logger.info("Change detected, rebuilding...")
blag.build(args) blag.build(args)
time.sleep(1) time.sleep(1)
@@ -92,7 +92,7 @@ def serve(args: argparse.Namespace) -> None:
""" """
httpd = HTTPServer( httpd = HTTPServer(
('', 8000), ("", 8000),
partial(SimpleHTTPRequestHandler, directory=args.output_dir), partial(SimpleHTTPRequestHandler, directory=args.output_dir),
) )
proc = multiprocessing.Process(target=autoreload, args=(args,)) proc = multiprocessing.Process(target=autoreload, args=(args,))

View File

@@ -7,8 +7,9 @@ processing.
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
from datetime import datetime
import logging import logging
from datetime import datetime
from urllib.parse import urlsplit, urlunsplit from urllib.parse import urlsplit, urlunsplit
from xml.etree.ElementTree import Element from xml.etree.ElementTree import Element
@@ -16,7 +17,6 @@ from markdown import Markdown
from markdown.extensions import Extension from markdown.extensions import Extension
from markdown.treeprocessors import Treeprocessor from markdown.treeprocessors import Treeprocessor
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -33,13 +33,13 @@ def markdown_factory() -> Markdown:
""" """
md = Markdown( md = Markdown(
extensions=[ extensions=[
'meta', "meta",
'fenced_code', "fenced_code",
'codehilite', "codehilite",
'smarty', "smarty",
MarkdownLinkExtension(), MarkdownLinkExtension(),
], ],
output_format='html', output_format="html",
) )
return md return md
@@ -75,20 +75,20 @@ def convert_markdown(
# markdowns metadata consists as list of strings -- one item per # markdowns metadata consists as list of strings -- one item per
# line. let's convert into single strings. # line. let's convert into single strings.
for key, value in meta.items(): for key, value in meta.items():
value = '\n'.join(value) value = "\n".join(value)
meta[key] = value meta[key] = value
# convert known metadata # convert known metadata
# date: datetime # date: datetime
if 'date' in meta: if "date" in meta:
meta['date'] = datetime.fromisoformat(meta['date']) meta["date"] = datetime.fromisoformat(meta["date"])
meta['date'] = meta['date'].astimezone() meta["date"] = meta["date"].astimezone()
# tags: list[str] and lower case # tags: list[str] and lower case
if 'tags' in meta: if "tags" in meta:
tags = meta['tags'].split(',') tags = meta["tags"].split(",")
tags = [t.lower() for t in tags] tags = [t.lower() for t in tags]
tags = [t.strip() for t in tags] tags = [t.strip() for t in tags]
meta['tags'] = tags meta["tags"] = tags
return content, meta return content, meta
@@ -98,25 +98,25 @@ class MarkdownLinkTreeprocessor(Treeprocessor):
def run(self, root: Element) -> Element: def run(self, root: Element) -> Element:
for element in root.iter(): for element in root.iter():
if element.tag == 'a': if element.tag == "a":
url = element.get('href') url = element.get("href")
# element.get could also return None, we haven't seen this so # element.get could also return None, we haven't seen this so
# far, so lets wait if we raise this # far, so lets wait if we raise this
assert url is not None assert url is not None
url = str(url) url = str(url)
converted = self.convert(url) converted = self.convert(url)
element.set('href', converted) element.set("href", converted)
return root return root
def convert(self, url: str) -> str: def convert(self, url: str) -> str:
scheme, netloc, path, query, fragment = urlsplit(url) scheme, netloc, path, query, fragment = urlsplit(url)
logger.debug( logger.debug(
f'{url}: {scheme=} {netloc=} {path=} {query=} {fragment=}' f"{url}: {scheme=} {netloc=} {path=} {query=} {fragment=}"
) )
if scheme or netloc or not path: if scheme or netloc or not path:
return url return url
if path.endswith('.md'): if path.endswith(".md"):
path = path[:-3] + '.html' path = path[:-3] + ".html"
url = urlunsplit((scheme, netloc, path, query, fragment)) url = urlunsplit((scheme, netloc, path, query, fragment))
return url return url
@@ -128,6 +128,6 @@ class MarkdownLinkExtension(Extension):
def extendMarkdown(self, md: Markdown) -> None: def extendMarkdown(self, md: Markdown) -> None:
md.treeprocessors.register( md.treeprocessors.register(
MarkdownLinkTreeprocessor(md), MarkdownLinkTreeprocessor(md),
'mdlink', "mdlink",
0, 0,
) )

View File

@@ -4,10 +4,11 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
import configparser
import argparse import argparse
import shutil import configparser
import os import os
import shutil
import blag import blag
@@ -47,7 +48,7 @@ def copy_default_theme() -> None:
""" """
print("Copying default theme...") print("Copying default theme...")
for dir_ in 'templates', 'content', 'static': for dir_ in "templates", "content", "static":
print(f" Copying {dir_}...") print(f" Copying {dir_}...")
try: try:
shutil.copytree( shutil.copytree(
@@ -89,13 +90,13 @@ def quickstart(args: argparse.Namespace | None) -> None:
) )
config = configparser.ConfigParser() config = configparser.ConfigParser()
config['main'] = { config["main"] = {
'base_url': base_url, "base_url": base_url,
'title': title, "title": title,
'description': description, "description": description,
'author': author, "author": author,
} }
with open('config.ini', 'w') as fh: with open("config.ini", "w") as fh:
config.write(fh) config.write(fh)
copy_default_theme() copy_default_theme()

View File

@@ -1 +1 @@
__VERSION__ = '1.5.0' __VERSION__ = "1.5.0"

View File

@@ -1,9 +1,10 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
from argparse import Namespace
from typing import Iterator, Callable
from tempfile import TemporaryDirectory
import os import os
from argparse import Namespace
from tempfile import TemporaryDirectory
from typing import Callable, Iterator
import pytest import pytest
from jinja2 import Environment, Template from jinja2 import Environment, Template
@@ -14,43 +15,43 @@ from blag import blag, quickstart
@pytest.fixture @pytest.fixture
def environment(cleandir: str) -> Iterator[Environment]: def environment(cleandir: str) -> Iterator[Environment]:
site = { site = {
'base_url': 'site base_url', "base_url": "site base_url",
'title': 'site title', "title": "site title",
'description': 'site description', "description": "site description",
'author': 'site author', "author": "site author",
} }
env = blag.environment_factory('templates', globals_=dict(site=site)) env = blag.environment_factory("templates", globals_=dict(site=site))
yield env yield env
@pytest.fixture @pytest.fixture
def page_template(environment: Environment) -> Iterator[Template]: def page_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('page.html') yield environment.get_template("page.html")
@pytest.fixture @pytest.fixture
def article_template(environment: Environment) -> Iterator[Template]: def article_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('article.html') yield environment.get_template("article.html")
@pytest.fixture @pytest.fixture
def index_template(environment: Environment) -> Iterator[Template]: def index_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('index.html') yield environment.get_template("index.html")
@pytest.fixture @pytest.fixture
def archive_template(environment: Environment) -> Iterator[Template]: def archive_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('archive.html') yield environment.get_template("archive.html")
@pytest.fixture @pytest.fixture
def tags_template(environment: Environment) -> Iterator[Template]: def tags_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('tags.html') yield environment.get_template("tags.html")
@pytest.fixture @pytest.fixture
def tag_template(environment: Environment) -> Iterator[Template]: def tag_template(environment: Environment) -> Iterator[Template]:
yield environment.get_template('tag.html') yield environment.get_template("tag.html")
@pytest.fixture @pytest.fixture
@@ -65,8 +66,8 @@ author = a. u. thor
""" """
with TemporaryDirectory() as dir: with TemporaryDirectory() as dir:
os.mkdir(f'{dir}/build') os.mkdir(f"{dir}/build")
with open(f'{dir}/config.ini', 'w') as fh: with open(f"{dir}/config.ini", "w") as fh:
fh.write(config) fh.write(config)
# change directory # change directory
old_cwd = os.getcwd() old_cwd = os.getcwd()
@@ -79,11 +80,10 @@ author = a. u. thor
@pytest.fixture @pytest.fixture
def args(cleandir: Callable[[], Iterator[str]]) -> Iterator[Namespace]: def args(cleandir: Callable[[], Iterator[str]]) -> Iterator[Namespace]:
args = Namespace( args = Namespace(
input_dir='content', input_dir="content",
output_dir='build', output_dir="build",
static_dir='static', static_dir="static",
template_dir='templates', template_dir="templates",
) )
yield args yield args

View File

@@ -1,73 +1,73 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
from tempfile import TemporaryDirectory
import os import os
from datetime import datetime
from typing import Any
from argparse import Namespace from argparse import Namespace
from datetime import datetime
from tempfile import TemporaryDirectory
from typing import Any
import pytest import pytest
from pytest import CaptureFixture, LogCaptureFixture
from jinja2 import Template from jinja2 import Template
from pytest import CaptureFixture, LogCaptureFixture
from blag import __VERSION__ from blag import __VERSION__, blag
from blag import blag
def test_generate_feed(cleandir: str) -> None: def test_generate_feed(cleandir: str) -> None:
articles: list[tuple[str, dict[str, Any]]] = [] articles: list[tuple[str, dict[str, Any]]] = []
blag.generate_feed(articles, 'build', ' ', ' ', ' ', ' ') blag.generate_feed(articles, "build", " ", " ", " ", " ")
assert os.path.exists('build/atom.xml') assert os.path.exists("build/atom.xml")
def test_feed(cleandir: str) -> None: def test_feed(cleandir: str) -> None:
articles: list[tuple[str, dict[str, Any]]] = [ articles: list[tuple[str, dict[str, Any]]] = [
( (
'dest1.html', "dest1.html",
{ {
'title': 'title1', "title": "title1",
'date': datetime(2019, 6, 6), "date": datetime(2019, 6, 6),
'content': 'content1', "content": "content1",
}, },
), ),
( (
'dest2.html', "dest2.html",
{ {
'title': 'title2', "title": "title2",
'date': datetime(1980, 5, 9), "date": datetime(1980, 5, 9),
'content': 'content2', "content": "content2",
}, },
), ),
] ]
blag.generate_feed( blag.generate_feed(
articles, articles,
'build', "build",
'https://example.com/', "https://example.com/",
'blog title', "blog title",
'blog description', "blog description",
'blog author', "blog author",
) )
with open('build/atom.xml') as fh: with open("build/atom.xml") as fh:
feed = fh.read() feed = fh.read()
assert '<title>blog title</title>' in feed assert "<title>blog title</title>" in feed
# enable when https://github.com/getpelican/feedgenerator/issues/22 # enable when https://github.com/getpelican/feedgenerator/issues/22
# is fixed # is fixed
# assert '<subtitle>blog description</subtitle>' in feed # assert '<subtitle>blog description</subtitle>' in feed
assert '<author><name>blog author</name></author>' in feed assert "<author><name>blog author</name></author>" in feed
# article 1 # article 1
assert '<title>title1</title>' in feed assert "<title>title1</title>" in feed
assert '<summary type="html">title1' in feed assert '<summary type="html">title1' in feed
assert '<published>2019-06-06' in feed assert "<published>2019-06-06" in feed
assert '<content type="html">content1' in feed assert '<content type="html">content1' in feed
assert '<link href="https://example.com/dest1.html"' in feed assert '<link href="https://example.com/dest1.html"' in feed
# article 2 # article 2
assert '<title>title2</title>' in feed assert "<title>title2</title>" in feed
assert '<summary type="html">title2' in feed assert '<summary type="html">title2' in feed
assert '<published>1980-05-09' in feed assert "<published>1980-05-09" in feed
assert '<content type="html">content2' in feed assert '<content type="html">content2' in feed
assert '<link href="https://example.com/dest2.html"' in feed assert '<link href="https://example.com/dest2.html"' in feed
@@ -77,57 +77,57 @@ def test_generate_feed_with_description(cleandir: str) -> None:
# the feed, otherwise we simply use the title of the article # the feed, otherwise we simply use the title of the article
articles: list[tuple[str, dict[str, Any]]] = [ articles: list[tuple[str, dict[str, Any]]] = [
( (
'dest.html', "dest.html",
{ {
'title': 'title', "title": "title",
'description': 'description', "description": "description",
'date': datetime(2019, 6, 6), "date": datetime(2019, 6, 6),
'content': 'content', "content": "content",
}, },
) )
] ]
blag.generate_feed(articles, 'build', ' ', ' ', ' ', ' ') blag.generate_feed(articles, "build", " ", " ", " ", " ")
with open('build/atom.xml') as fh: with open("build/atom.xml") as fh:
feed = fh.read() feed = fh.read()
assert '<title>title</title>' in feed assert "<title>title</title>" in feed
assert '<summary type="html">description' in feed assert '<summary type="html">description' in feed
assert '<published>2019-06-06' in feed assert "<published>2019-06-06" in feed
assert '<content type="html">content' in feed assert '<content type="html">content' in feed
def test_parse_args_build() -> None: def test_parse_args_build() -> None:
# test default args # test default args
args = blag.parse_args(['build']) args = blag.parse_args(["build"])
assert args.input_dir == 'content' assert args.input_dir == "content"
assert args.output_dir == 'build' assert args.output_dir == "build"
assert args.template_dir == 'templates' assert args.template_dir == "templates"
assert args.static_dir == 'static' assert args.static_dir == "static"
# input dir # input dir
args = blag.parse_args(['build', '-i', 'foo']) args = blag.parse_args(["build", "-i", "foo"])
assert args.input_dir == 'foo' assert args.input_dir == "foo"
args = blag.parse_args(['build', '--input-dir', 'foo']) args = blag.parse_args(["build", "--input-dir", "foo"])
assert args.input_dir == 'foo' assert args.input_dir == "foo"
# output dir # output dir
args = blag.parse_args(['build', '-o', 'foo']) args = blag.parse_args(["build", "-o", "foo"])
assert args.output_dir == 'foo' assert args.output_dir == "foo"
args = blag.parse_args(['build', '--output-dir', 'foo']) args = blag.parse_args(["build", "--output-dir", "foo"])
assert args.output_dir == 'foo' assert args.output_dir == "foo"
# template dir # template dir
args = blag.parse_args(['build', '-t', 'foo']) args = blag.parse_args(["build", "-t", "foo"])
assert args.template_dir == 'foo' assert args.template_dir == "foo"
args = blag.parse_args(['build', '--template-dir', 'foo']) args = blag.parse_args(["build", "--template-dir", "foo"])
assert args.template_dir == 'foo' assert args.template_dir == "foo"
# static dir # static dir
args = blag.parse_args(['build', '-s', 'foo']) args = blag.parse_args(["build", "-s", "foo"])
assert args.static_dir == 'foo' assert args.static_dir == "foo"
args = blag.parse_args(['build', '--static-dir', 'foo']) args = blag.parse_args(["build", "--static-dir", "foo"])
assert args.static_dir == 'foo' assert args.static_dir == "foo"
def test_get_config() -> None: def test_get_config() -> None:
@@ -140,24 +140,24 @@ author = a. u. thor
""" """
# happy path # happy path
with TemporaryDirectory() as dir: with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini' configfile = f"{dir}/config.ini"
with open(configfile, 'w') as fh: with open(configfile, "w") as fh:
fh.write(config) fh.write(config)
config_parsed = blag.get_config(configfile) config_parsed = blag.get_config(configfile)
assert config_parsed['base_url'] == 'https://example.com/' assert config_parsed["base_url"] == "https://example.com/"
assert config_parsed['title'] == 'title' assert config_parsed["title"] == "title"
assert config_parsed['description'] == 'description' assert config_parsed["description"] == "description"
assert config_parsed['author'] == 'a. u. thor' assert config_parsed["author"] == "a. u. thor"
# a missing required config causes a sys.exit # a missing required config causes a sys.exit
for x in 'base_url', 'title', 'description', 'author': for x in "base_url", "title", "description", "author":
config2 = '\n'.join( config2 = "\n".join(
[line for line in config.splitlines() if not line.startswith(x)] [line for line in config.splitlines() if not line.startswith(x)]
) )
with TemporaryDirectory() as dir: with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini' configfile = f"{dir}/config.ini"
with open(configfile, 'w') as fh: with open(configfile, "w") as fh:
fh.write(config2) fh.write(config2)
with pytest.raises(SystemExit): with pytest.raises(SystemExit):
config_parsed = blag.get_config(configfile) config_parsed = blag.get_config(configfile)
@@ -171,19 +171,19 @@ description = description
author = a. u. thor author = a. u. thor
""" """
with TemporaryDirectory() as dir: with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini' configfile = f"{dir}/config.ini"
with open(configfile, 'w') as fh: with open(configfile, "w") as fh:
fh.write(config) fh.write(config)
config_parsed = blag.get_config(configfile) config_parsed = blag.get_config(configfile)
assert config_parsed['base_url'] == 'https://example.com/' assert config_parsed["base_url"] == "https://example.com/"
def test_environment_factory(cleandir: str) -> None: def test_environment_factory(cleandir: str) -> None:
globals_: dict[str, object] = {'foo': 'bar', 'test': 'me'} globals_: dict[str, object] = {"foo": "bar", "test": "me"}
env = blag.environment_factory("templates", globals_=globals_) env = blag.environment_factory("templates", globals_=globals_)
assert env.globals['foo'] == 'bar' assert env.globals["foo"] == "bar"
assert env.globals['test'] == 'me' assert env.globals["test"] == "me"
def test_process_markdown( def test_process_markdown(
@@ -216,12 +216,12 @@ foo bar
convertibles = [] convertibles = []
for i, txt in enumerate((page1, article1, article2)): for i, txt in enumerate((page1, article1, article2)):
with open(f'content/{str(i)}', 'w') as fh: with open(f"content/{str(i)}", "w") as fh:
fh.write(txt) fh.write(txt)
convertibles.append((str(i), str(i))) convertibles.append((str(i), str(i)))
articles, pages = blag.process_markdown( articles, pages = blag.process_markdown(
convertibles, 'content', 'build', page_template, article_template convertibles, "content", "build", page_template, article_template
) )
assert isinstance(articles, list) assert isinstance(articles, list)
@@ -229,14 +229,14 @@ foo bar
for dst, context in articles: for dst, context in articles:
assert isinstance(dst, str) assert isinstance(dst, str)
assert isinstance(context, dict) assert isinstance(context, dict)
assert 'content' in context assert "content" in context
assert isinstance(pages, list) assert isinstance(pages, list)
assert len(pages) == 1 assert len(pages) == 1
for dst, context in pages: for dst, context in pages:
assert isinstance(dst, str) assert isinstance(dst, str)
assert isinstance(context, dict) assert isinstance(context, dict)
assert 'content' in context assert "content" in context
def test_build(args: Namespace) -> None: def test_build(args: Namespace) -> None:
@@ -268,63 +268,63 @@ foo bar
# write some convertibles # write some convertibles
convertibles = [] convertibles = []
for i, txt in enumerate((page1, article1, article2)): for i, txt in enumerate((page1, article1, article2)):
with open(f'{args.input_dir}/{str(i)}.md', 'w') as fh: with open(f"{args.input_dir}/{str(i)}.md", "w") as fh:
fh.write(txt) fh.write(txt)
convertibles.append((str(i), str(i))) convertibles.append((str(i), str(i)))
# some static files # some static files
with open(f'{args.static_dir}/test', 'w') as fh: with open(f"{args.static_dir}/test", "w") as fh:
fh.write('hello') fh.write("hello")
os.mkdir(f'{args.input_dir}/testdir') os.mkdir(f"{args.input_dir}/testdir")
with open(f'{args.input_dir}/testdir/test', 'w') as fh: with open(f"{args.input_dir}/testdir/test", "w") as fh:
fh.write('hello') fh.write("hello")
blag.build(args) blag.build(args)
# test existence of the three converted files # test existence of the three converted files
for i in range(3): for i in range(3):
assert os.path.exists(f'{args.output_dir}/{i}.html') assert os.path.exists(f"{args.output_dir}/{i}.html")
# ... static file # ... static file
assert os.path.exists(f'{args.output_dir}/test') assert os.path.exists(f"{args.output_dir}/test")
# ... directory # ... directory
assert os.path.exists(f'{args.output_dir}/testdir/test') assert os.path.exists(f"{args.output_dir}/testdir/test")
# ... feed # ... feed
assert os.path.exists(f'{args.output_dir}/atom.xml') assert os.path.exists(f"{args.output_dir}/atom.xml")
# ... index # ... index
assert os.path.exists(f'{args.output_dir}/index.html') assert os.path.exists(f"{args.output_dir}/index.html")
# ... archive # ... archive
assert os.path.exists(f'{args.output_dir}/archive.html') assert os.path.exists(f"{args.output_dir}/archive.html")
# ... tags # ... tags
assert os.path.exists(f'{args.output_dir}/tags/index.html') assert os.path.exists(f"{args.output_dir}/tags/index.html")
assert os.path.exists(f'{args.output_dir}/tags/foo.html') assert os.path.exists(f"{args.output_dir}/tags/foo.html")
assert os.path.exists(f'{args.output_dir}/tags/bar.html') assert os.path.exists(f"{args.output_dir}/tags/bar.html")
@pytest.mark.parametrize( @pytest.mark.parametrize(
'template', "template",
[ [
'page.html', "page.html",
'article.html', "article.html",
'index.html', "index.html",
'archive.html', "archive.html",
'tags.html', "tags.html",
'tag.html', "tag.html",
] ],
) )
def test_missing_template_raises(template: str, args: Namespace) -> None: def test_missing_template_raises(template: str, args: Namespace) -> None:
os.remove(f'templates/{template}') os.remove(f"templates/{template}")
with pytest.raises(SystemExit): with pytest.raises(SystemExit):
blag.build(args) blag.build(args)
def test_main(cleandir: str) -> None: def test_main(cleandir: str) -> None:
blag.main(['build']) blag.main(["build"])
def test_cli_version(capsys: CaptureFixture[str]) -> None: def test_cli_version(capsys: CaptureFixture[str]) -> None:
with pytest.raises(SystemExit) as ex: with pytest.raises(SystemExit) as ex:
blag.main(['--version']) blag.main(["--version"])
# normal system exit # normal system exit
assert ex.value.code == 0 assert ex.value.code == 0
# proper version reported # proper version reported
@@ -333,8 +333,8 @@ def test_cli_version(capsys: CaptureFixture[str]) -> None:
def test_cli_verbose(cleandir: str, caplog: LogCaptureFixture) -> None: def test_cli_verbose(cleandir: str, caplog: LogCaptureFixture) -> None:
blag.main(['build']) blag.main(["build"])
assert 'DEBUG' not in caplog.text assert "DEBUG" not in caplog.text
blag.main(['--verbose', 'build']) blag.main(["--verbose", "build"])
assert 'DEBUG' in caplog.text assert "DEBUG" in caplog.text

View File

@@ -1,7 +1,8 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
import time
import threading import threading
import time
from argparse import Namespace from argparse import Namespace
import pytest import pytest
@@ -11,17 +12,17 @@ from blag import devserver
def test_get_last_modified(cleandir: str) -> None: def test_get_last_modified(cleandir: str) -> None:
# take initial time # take initial time
t1 = devserver.get_last_modified(['content']) t1 = devserver.get_last_modified(["content"])
# wait a bit, create a file and measure again # wait a bit, create a file and measure again
time.sleep(0.1) time.sleep(0.1)
with open('content/test', 'w') as fh: with open("content/test", "w") as fh:
fh.write('boo') fh.write("boo")
t2 = devserver.get_last_modified(['content']) t2 = devserver.get_last_modified(["content"])
# wait a bit and take time again # wait a bit and take time again
time.sleep(0.1) time.sleep(0.1)
t3 = devserver.get_last_modified(['content']) t3 = devserver.get_last_modified(["content"])
assert t2 > t1 assert t2 > t1
assert t2 == t3 assert t2 == t3
@@ -29,20 +30,20 @@ def test_get_last_modified(cleandir: str) -> None:
def test_autoreload_builds_immediately(args: Namespace) -> None: def test_autoreload_builds_immediately(args: Namespace) -> None:
# create a dummy file that can be build # create a dummy file that can be build
with open('content/test.md', 'w') as fh: with open("content/test.md", "w") as fh:
fh.write('boo') fh.write("boo")
t = threading.Thread( t = threading.Thread(
target=devserver.autoreload, target=devserver.autoreload,
args=(args,), args=(args,),
daemon=True, daemon=True,
) )
t0 = devserver.get_last_modified(['build']) t0 = devserver.get_last_modified(["build"])
t.start() t.start()
# try for 5 seconds... # try for 5 seconds...
for i in range(5): for i in range(5):
time.sleep(1) time.sleep(1)
t1 = devserver.get_last_modified(['build']) t1 = devserver.get_last_modified(["build"])
print(t1) print(t1)
if t1 > t0: if t1 > t0:
break break
@@ -60,16 +61,16 @@ def test_autoreload(args: Namespace) -> None:
) )
t.start() t.start()
t0 = devserver.get_last_modified(['build']) t0 = devserver.get_last_modified(["build"])
# create a dummy file that can be build # create a dummy file that can be build
with open('content/test.md', 'w') as fh: with open("content/test.md", "w") as fh:
fh.write('boo') fh.write("boo")
# try for 5 seconds to see if we rebuild once... # try for 5 seconds to see if we rebuild once...
for i in range(5): for i in range(5):
time.sleep(1) time.sleep(1)
t1 = devserver.get_last_modified(['build']) t1 = devserver.get_last_modified(["build"])
if t1 > t0: if t1 > t0:
break break
assert t1 > t0 assert t1 > t0

View File

@@ -1,10 +1,11 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any
import pytest
import markdown import markdown
import pytest
from blag.markdown import convert_markdown, markdown_factory from blag.markdown import convert_markdown, markdown_factory
@@ -13,23 +14,23 @@ from blag.markdown import convert_markdown, markdown_factory
"input_, expected", "input_, expected",
[ [
# inline # inline
('[test](test.md)', 'test.html'), ("[test](test.md)", "test.html"),
('[test](test.md "test")', 'test.html'), ('[test](test.md "test")', "test.html"),
('[test](a/test.md)', 'a/test.html'), ("[test](a/test.md)", "a/test.html"),
('[test](a/test.md "test")', 'a/test.html'), ('[test](a/test.md "test")', "a/test.html"),
('[test](/test.md)', '/test.html'), ("[test](/test.md)", "/test.html"),
('[test](/test.md "test")', '/test.html'), ('[test](/test.md "test")', "/test.html"),
('[test](/a/test.md)', '/a/test.html'), ("[test](/a/test.md)", "/a/test.html"),
('[test](/a/test.md "test")', '/a/test.html'), ('[test](/a/test.md "test")', "/a/test.html"),
# reference # reference
('[test][]\n[test]: test.md ' '', 'test.html'), ("[test][]\n[test]: test.md " "", "test.html"),
('[test][]\n[test]: test.md "test"', 'test.html'), ('[test][]\n[test]: test.md "test"', "test.html"),
('[test][]\n[test]: a/test.md', 'a/test.html'), ("[test][]\n[test]: a/test.md", "a/test.html"),
('[test][]\n[test]: a/test.md "test"', 'a/test.html'), ('[test][]\n[test]: a/test.md "test"', "a/test.html"),
('[test][]\n[test]: /test.md', '/test.html'), ("[test][]\n[test]: /test.md", "/test.html"),
('[test][]\n[test]: /test.md "test"', '/test.html'), ('[test][]\n[test]: /test.md "test"', "/test.html"),
('[test][]\n[test]: /a/test.md', '/a/test.html'), ("[test][]\n[test]: /a/test.md", "/a/test.html"),
('[test][]\n[test]: /a/test.md "test"', '/a/test.html'), ('[test][]\n[test]: /a/test.md "test"', "/a/test.html"),
], ],
) )
def test_convert_markdown_links(input_: str, expected: str) -> None: def test_convert_markdown_links(input_: str, expected: str) -> None:
@@ -42,11 +43,11 @@ def test_convert_markdown_links(input_: str, expected: str) -> None:
"input_, expected", "input_, expected",
[ [
# scheme # scheme
('[test](https://)', 'https://'), ("[test](https://)", "https://"),
# netloc # netloc
('[test](//test.md)', '//test.md'), ("[test](//test.md)", "//test.md"),
# no path # no path
('[test]()', ''), ("[test]()", ""),
], ],
) )
def test_dont_convert_normal_links(input_: str, expected: str) -> None: def test_dont_convert_normal_links(input_: str, expected: str) -> None:
@@ -58,13 +59,13 @@ def test_dont_convert_normal_links(input_: str, expected: str) -> None:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input_, expected", "input_, expected",
[ [
('foo: bar', {'foo': 'bar'}), ("foo: bar", {"foo": "bar"}),
('foo: those are several words', {'foo': 'those are several words'}), ("foo: those are several words", {"foo": "those are several words"}),
('tags: this, is, a, test\n', {'tags': ['this', 'is', 'a', 'test']}), ("tags: this, is, a, test\n", {"tags": ["this", "is", "a", "test"]}),
('tags: this, IS, a, test', {'tags': ['this', 'is', 'a', 'test']}), ("tags: this, IS, a, test", {"tags": ["this", "is", "a", "test"]}),
( (
'date: 2020-01-01 12:10', "date: 2020-01-01 12:10",
{'date': datetime(2020, 1, 1, 12, 10).astimezone()}, {"date": datetime(2020, 1, 1, 12, 10).astimezone()},
), ),
], ],
) )
@@ -88,9 +89,9 @@ this --- is -- a test ...
""" """
html, meta = convert_markdown(md, md1) html, meta = convert_markdown(md, md1)
assert 'mdash' in html assert "mdash" in html
assert 'ndash' in html assert "ndash" in html
assert 'hellip' in html assert "hellip" in html
def test_smarty_code() -> None: def test_smarty_code() -> None:
@@ -102,6 +103,6 @@ this --- is -- a test ...
``` ```
""" """
html, meta = convert_markdown(md, md1) html, meta = convert_markdown(md, md1)
assert 'mdash' not in html assert "mdash" not in html
assert 'ndash' not in html assert "ndash" not in html
assert 'hellip' not in html assert "hellip" not in html

View File

@@ -1,5 +1,6 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
import os import os
from pytest import MonkeyPatch from pytest import MonkeyPatch
@@ -8,26 +9,26 @@ from blag.quickstart import get_input, quickstart
def test_get_input_default_answer(monkeypatch: MonkeyPatch) -> None: def test_get_input_default_answer(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr('builtins.input', lambda x: '') monkeypatch.setattr("builtins.input", lambda x: "")
answer = get_input("foo", "bar") answer = get_input("foo", "bar")
assert answer == 'bar' assert answer == "bar"
def test_get_input(monkeypatch: MonkeyPatch) -> None: def test_get_input(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr('builtins.input', lambda x: 'baz') monkeypatch.setattr("builtins.input", lambda x: "baz")
answer = get_input("foo", "bar") answer = get_input("foo", "bar")
assert answer == 'baz' assert answer == "baz"
def test_quickstart(cleandir: str, monkeypatch: MonkeyPatch) -> None: def test_quickstart(cleandir: str, monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr('builtins.input', lambda x: 'foo') monkeypatch.setattr("builtins.input", lambda x: "foo")
quickstart(None) quickstart(None)
with open('config.ini', 'r') as fh: with open("config.ini", "r") as fh:
data = fh.read() data = fh.read()
assert 'base_url = foo' in data assert "base_url = foo" in data
assert 'title = foo' in data assert "title = foo" in data
assert 'description = foo' in data assert "description = foo" in data
assert 'author = foo' in data assert "author = foo" in data
for template in ( for template in (
"archive.html", "archive.html",
@@ -38,7 +39,7 @@ def test_quickstart(cleandir: str, monkeypatch: MonkeyPatch) -> None:
"tag.html", "tag.html",
"tags.html", "tags.html",
): ):
assert os.path.exists(f'templates/{template}') assert os.path.exists(f"templates/{template}")
for directory in "build", "content", "static": for directory in "build", "content", "static":
assert os.path.exists(directory) assert os.path.exists(directory)

View File

@@ -1,5 +1,6 @@
# remove when we don't support py38 anymore # remove when we don't support py38 anymore
from __future__ import annotations from __future__ import annotations
import datetime import datetime
from jinja2 import Template from jinja2 import Template
@@ -7,91 +8,91 @@ from jinja2 import Template
def test_page(page_template: Template) -> None: def test_page(page_template: Template) -> None:
ctx = { ctx = {
'content': 'this is the content', "content": "this is the content",
'title': 'this is the title', "title": "this is the title",
} }
result = page_template.render(ctx) result = page_template.render(ctx)
assert 'this is the content' in result assert "this is the content" in result
assert 'this is the title' in result assert "this is the title" in result
def test_article(article_template: Template) -> None: def test_article(article_template: Template) -> None:
ctx = { ctx = {
'content': 'this is the content', "content": "this is the content",
'title': 'this is the title', "title": "this is the title",
'date': datetime.datetime(1980, 5, 9), "date": datetime.datetime(1980, 5, 9),
} }
result = article_template.render(ctx) result = article_template.render(ctx)
assert 'this is the content' in result assert "this is the content" in result
assert 'this is the title' in result assert "this is the title" in result
assert '1980-05-09' in result assert "1980-05-09" in result
def test_index(index_template: Template) -> None: def test_index(index_template: Template) -> None:
entry = { entry = {
'title': 'this is a title', "title": "this is a title",
'dst': 'https://example.com/link', "dst": "https://example.com/link",
'date': datetime.datetime(1980, 5, 9), "date": datetime.datetime(1980, 5, 9),
} }
archive = [entry] archive = [entry]
ctx = { ctx = {
'archive': archive, "archive": archive,
} }
result = index_template.render(ctx) result = index_template.render(ctx)
assert 'site title' in result assert "site title" in result
assert 'this is a title' in result assert "this is a title" in result
assert '1980-05-09' in result assert "1980-05-09" in result
assert 'https://example.com/link' in result assert "https://example.com/link" in result
assert '/archive.html' in result assert "/archive.html" in result
def test_archive(archive_template: Template) -> None: def test_archive(archive_template: Template) -> None:
entry = { entry = {
'title': 'this is a title', "title": "this is a title",
'dst': 'https://example.com/link', "dst": "https://example.com/link",
'date': datetime.datetime(1980, 5, 9), "date": datetime.datetime(1980, 5, 9),
} }
archive = [entry] archive = [entry]
ctx = { ctx = {
'archive': archive, "archive": archive,
} }
result = archive_template.render(ctx) result = archive_template.render(ctx)
assert 'Archive' in result assert "Archive" in result
assert 'this is a title' in result assert "this is a title" in result
assert '1980-05-09' in result assert "1980-05-09" in result
assert 'https://example.com/link' in result assert "https://example.com/link" in result
def test_tags(tags_template: Template) -> None: def test_tags(tags_template: Template) -> None:
tags = [('foo', 42)] tags = [("foo", 42)]
ctx = { ctx = {
'tags': tags, "tags": tags,
} }
result = tags_template.render(ctx) result = tags_template.render(ctx)
assert 'Tags' in result assert "Tags" in result
assert 'foo.html' in result assert "foo.html" in result
assert 'foo' in result assert "foo" in result
assert '42' in result assert "42" in result
def test_tag(tag_template: Template) -> None: def test_tag(tag_template: Template) -> None:
entry = { entry = {
'title': 'this is a title', "title": "this is a title",
'dst': 'https://example.com/link', "dst": "https://example.com/link",
'date': datetime.datetime(1980, 5, 9), "date": datetime.datetime(1980, 5, 9),
} }
archive = [entry] archive = [entry]
ctx = { ctx = {
'tag': 'foo', "tag": "foo",
'archive': archive, "archive": archive,
} }
result = tag_template.render(ctx) result = tag_template.render(ctx)
assert 'foo' in result assert "foo" in result
assert 'this is a title' in result assert "this is a title" in result
assert '1980-05-09' in result assert "1980-05-09" in result
assert 'https://example.com/link' in result assert "https://example.com/link" in result