1
0
mirror of https://github.com/venthur/blag.git synced 2025-11-25 20:52:43 +00:00

Re-formatted the code

This commit is contained in:
Bastian Venthur
2022-09-01 12:41:25 +02:00
parent 7b6b219cdf
commit 87d619cc1c
9 changed files with 167 additions and 135 deletions

View File

@@ -1 +1 @@
from blag.version import __VERSION__ as __VERSION__ # noqa from blag.version import __VERSION__ as __VERSION__ # noqa

View File

@@ -30,8 +30,8 @@ from blag.quickstart import quickstart
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s %(message)s', format='%(asctime)s %(levelname)s %(name)s %(message)s',
) )
@@ -72,10 +72,11 @@ def parse_args(args: list[str] | None = None) -> argparse.Namespace:
parser.add_argument( parser.add_argument(
'--version', '--version',
action='version', action='version',
version='%(prog)s '+__VERSION__, version='%(prog)s ' + __VERSION__,
) )
parser.add_argument( parser.add_argument(
'-v', '--verbose', '-v',
'--verbose',
action='store_true', action='store_true',
help='Verbose output.', help='Verbose output.',
) )
@@ -84,61 +85,69 @@ def parse_args(args: list[str] | None = None) -> argparse.Namespace:
commands.required = True commands.required = True
build_parser = commands.add_parser( build_parser = commands.add_parser(
'build', 'build',
help='Build website.', help='Build website.',
) )
build_parser.set_defaults(func=build) build_parser.set_defaults(func=build)
build_parser.add_argument( build_parser.add_argument(
'-i', '--input-dir', '-i',
default='content', '--input-dir',
help='Input directory (default: content)', default='content',
help='Input directory (default: content)',
) )
build_parser.add_argument( build_parser.add_argument(
'-o', '--output-dir', '-o',
default='build', '--output-dir',
help='Ouptut directory (default: build)', default='build',
help='Ouptut directory (default: build)',
) )
build_parser.add_argument( build_parser.add_argument(
'-t', '--template-dir', '-t',
default='templates', '--template-dir',
help='Template directory (default: templates)', default='templates',
help='Template directory (default: templates)',
) )
build_parser.add_argument( build_parser.add_argument(
'-s', '--static-dir', '-s',
default='static', '--static-dir',
help='Static directory (default: static)', default='static',
help='Static directory (default: static)',
) )
quickstart_parser = commands.add_parser( quickstart_parser = commands.add_parser(
'quickstart', 'quickstart',
help="Quickstart blag, creating necessary configuration.", help="Quickstart blag, creating necessary configuration.",
) )
quickstart_parser.set_defaults(func=quickstart) quickstart_parser.set_defaults(func=quickstart)
serve_parser = commands.add_parser( serve_parser = commands.add_parser(
'serve', 'serve',
help="Start development server.", help="Start development server.",
) )
serve_parser.set_defaults(func=serve) serve_parser.set_defaults(func=serve)
serve_parser.add_argument( serve_parser.add_argument(
'-i', '--input-dir', '-i',
default='content', '--input-dir',
help='Input directory (default: content)', default='content',
help='Input directory (default: content)',
) )
serve_parser.add_argument( serve_parser.add_argument(
'-o', '--output-dir', '-o',
default='build', '--output-dir',
help='Ouptut directory (default: build)', default='build',
help='Ouptut directory (default: build)',
) )
serve_parser.add_argument( serve_parser.add_argument(
'-t', '--template-dir', '-t',
default='templates', '--template-dir',
help='Template directory (default: templates)', default='templates',
help='Template directory (default: templates)',
) )
serve_parser.add_argument( serve_parser.add_argument(
'-s', '--static-dir', '-s',
default='static', '--static-dir',
help='Static directory (default: static)', default='static',
help='Static directory (default: static)',
) )
return parser.parse_args(args) return parser.parse_args(args)
@@ -224,8 +233,9 @@ def build(args: argparse.Namespace) -> None:
convertibles = [] convertibles = []
for root, dirnames, filenames in os.walk(args.input_dir): for root, dirnames, filenames in os.walk(args.input_dir):
for filename in filenames: for filename in filenames:
rel_src = os.path.relpath(f'{root}/{filename}', rel_src = os.path.relpath(
start=args.input_dir) f'{root}/{filename}', start=args.input_dir
)
# all non-markdown files are just copied over, the markdown # all non-markdown files are just copied over, the markdown
# files are converted to html # files are converted to html
if rel_src.endswith('.md'): if rel_src.endswith('.md'):
@@ -233,8 +243,10 @@ def build(args: argparse.Namespace) -> None:
rel_dst = rel_dst[:-3] + '.html' rel_dst = rel_dst[:-3] + '.html'
convertibles.append((rel_src, rel_dst)) convertibles.append((rel_src, rel_dst))
else: else:
shutil.copy(f'{args.input_dir}/{rel_src}', shutil.copy(
f'{args.output_dir}/{rel_src}') f'{args.input_dir}/{rel_src}',
f'{args.output_dir}/{rel_src}',
)
for dirname in dirnames: for dirname in dirnames:
# all directories are copied into the output directory # all directories are copied into the output directory
path = os.path.relpath(f'{root}/{dirname}', start=args.input_dir) path = os.path.relpath(f'{root}/{dirname}', start=args.input_dir)
@@ -264,7 +276,8 @@ def build(args: argparse.Namespace) -> None:
) )
generate_feed( generate_feed(
articles, args.output_dir, articles,
args.output_dir,
base_url=config['base_url'], base_url=config['base_url'],
blog_title=config['title'], blog_title=config['title'],
blog_description=config['description'], blog_description=config['description'],
@@ -364,10 +377,10 @@ def generate_feed(
""" """
logger.info('Generating Atom feed.') logger.info('Generating Atom feed.')
feed = feedgenerator.Atom1Feed( feed = feedgenerator.Atom1Feed(
link=base_url, link=base_url,
title=blog_title, title=blog_title,
description=blog_description, description=blog_description,
feed_url=base_url + 'atom.xml', feed_url=base_url + 'atom.xml',
) )
for dst, context in articles: for dst, context in articles:

View File

@@ -91,8 +91,10 @@ def serve(args: argparse.Namespace) -> None:
contains the input-, template- and static dir contains the input-, template- and static dir
""" """
httpd = HTTPServer(('', 8000), partial(SimpleHTTPRequestHandler, httpd = HTTPServer(
directory=args.output_dir)) ('', 8000),
partial(SimpleHTTPRequestHandler, directory=args.output_dir),
)
proc = multiprocessing.Process(target=autoreload, args=(args,)) proc = multiprocessing.Process(target=autoreload, args=(args,))
proc.start() proc.start()
logger.info("\n\n Devserver Started -- visit http://localhost:8000\n") logger.info("\n\n Devserver Started -- visit http://localhost:8000\n")

View File

@@ -33,8 +33,11 @@ def markdown_factory() -> Markdown:
""" """
md = Markdown( md = Markdown(
extensions=[ extensions=[
'meta', 'fenced_code', 'codehilite', 'smarty', 'meta',
MarkdownLinkExtension() 'fenced_code',
'codehilite',
'smarty',
MarkdownLinkExtension(),
], ],
output_format='html', output_format='html',
) )
@@ -91,9 +94,7 @@ def convert_markdown(
class MarkdownLinkTreeprocessor(Treeprocessor): class MarkdownLinkTreeprocessor(Treeprocessor):
"""Converts relative links to .md files to .html """Converts relative links to .md files to .html"""
"""
def run(self, root: Element) -> Element: def run(self, root: Element) -> Element:
for element in root.iter(): for element in root.iter():
@@ -112,7 +113,7 @@ class MarkdownLinkTreeprocessor(Treeprocessor):
logger.debug( logger.debug(
f'{url}: {scheme=} {netloc=} {path=} {query=} {fragment=}' f'{url}: {scheme=} {netloc=} {path=} {query=} {fragment=}'
) )
if (scheme or netloc or not path): if scheme or netloc or not path:
return url return url
if path.endswith('.md'): if path.endswith('.md'):
path = path[:-3] + '.html' path = path[:-3] + '.html'
@@ -122,10 +123,11 @@ class MarkdownLinkTreeprocessor(Treeprocessor):
class MarkdownLinkExtension(Extension): class MarkdownLinkExtension(Extension):
"""markdown.extension that converts relative .md- to .html-links. """markdown.extension that converts relative .md- to .html-links."""
"""
def extendMarkdown(self, md: Markdown) -> None: def extendMarkdown(self, md: Markdown) -> None:
md.treeprocessors.register( md.treeprocessors.register(
MarkdownLinkTreeprocessor(md), 'mdlink', 0, MarkdownLinkTreeprocessor(md),
'mdlink',
0,
) )

View File

@@ -64,10 +64,10 @@ def quickstart(args: argparse.Namespace | None) -> None:
config = configparser.ConfigParser() config = configparser.ConfigParser()
config['main'] = { config['main'] = {
'base_url': base_url, 'base_url': base_url,
'title': title, 'title': title,
'description': description, 'description': description,
'author': author, 'author': author,
} }
with open('config.ini', 'w') as fh: with open('config.ini', 'w') as fh:
config.write(fh) config.write(fh)

View File

@@ -50,9 +50,7 @@ def tag_template(environment: Environment) -> Iterator[Template]:
@pytest.fixture @pytest.fixture
def cleandir() -> Iterator[str]: def cleandir() -> Iterator[str]:
"""Create a temporary workind directory and cwd. """Create a temporary workind directory and cwd."""
"""
config = """ config = """
[main] [main]
base_url = https://example.com/ base_url = https://example.com/
@@ -78,9 +76,9 @@ author = a. u. thor
def args(cleandir: Callable[[], Iterator[str]]) -> Iterator[Namespace]: def args(cleandir: Callable[[], Iterator[str]]) -> Iterator[Namespace]:
args = Namespace( args = Namespace(
input_dir='content', input_dir='content',
output_dir='build', output_dir='build',
static_dir='static', static_dir='static',
template_dir='templates', template_dir='templates',
) )
yield args yield args

View File

@@ -28,7 +28,7 @@ def test_feed(cleandir: str) -> None:
'title': 'title1', 'title': 'title1',
'date': datetime(2019, 6, 6), 'date': datetime(2019, 6, 6),
'content': 'content1', 'content': 'content1',
} },
), ),
( (
'dest2.html', 'dest2.html',
@@ -36,12 +36,18 @@ def test_feed(cleandir: str) -> None:
'title': 'title2', 'title': 'title2',
'date': datetime(1980, 5, 9), 'date': datetime(1980, 5, 9),
'content': 'content2', 'content': 'content2',
} },
), ),
] ]
blag.generate_feed(articles, 'build', 'https://example.com/', blag.generate_feed(
'blog title', 'blog description', 'blog author') articles,
'build',
'https://example.com/',
'blog title',
'blog description',
'blog author',
)
with open('build/atom.xml') as fh: with open('build/atom.xml') as fh:
feed = fh.read() feed = fh.read()
@@ -69,15 +75,17 @@ def test_feed(cleandir: str) -> None:
def test_generate_feed_with_description(cleandir: str) -> None: def test_generate_feed_with_description(cleandir: str) -> None:
# if a description is provided, it will be used as the summary in # if a description is provided, it will be used as the summary in
# the feed, otherwise we simply use the title of the article # the feed, otherwise we simply use the title of the article
articles: list[tuple[str, dict[str, Any]]] = [( articles: list[tuple[str, dict[str, Any]]] = [
'dest.html', (
{ 'dest.html',
'title': 'title', {
'description': 'description', 'title': 'title',
'date': datetime(2019, 6, 6), 'description': 'description',
'content': 'content', 'date': datetime(2019, 6, 6),
} 'content': 'content',
)] },
)
]
blag.generate_feed(articles, 'build', ' ', ' ', ' ', ' ') blag.generate_feed(articles, 'build', ' ', ' ', ' ', ' ')
with open('build/atom.xml') as fh: with open('build/atom.xml') as fh:
@@ -144,10 +152,9 @@ author = a. u. thor
# a missing required config causes a sys.exit # a missing required config causes a sys.exit
for x in 'base_url', 'title', 'description', 'author': for x in 'base_url', 'title', 'description', 'author':
config2 = '\n'.join([line config2 = '\n'.join(
for line [line for line in config.splitlines() if not line.startswith(x)]
in config.splitlines() )
if not line.startswith(x)])
with TemporaryDirectory() as dir: with TemporaryDirectory() as dir:
configfile = f'{dir}/config.ini' configfile = f'{dir}/config.ini'
with open(configfile, 'w') as fh: with open(configfile, 'w') as fh:
@@ -173,10 +180,7 @@ author = a. u. thor
def test_environment_factory() -> None: def test_environment_factory() -> None:
globals_: dict[str, object] = { globals_: dict[str, object] = {'foo': 'bar', 'test': 'me'}
'foo': 'bar',
'test': 'me'
}
env = blag.environment_factory(globals_=globals_) env = blag.environment_factory(globals_=globals_)
assert env.globals['foo'] == 'bar' assert env.globals['foo'] == 'bar'
assert env.globals['test'] == 'me' assert env.globals['test'] == 'me'
@@ -217,11 +221,7 @@ foo bar
convertibles.append((str(i), str(i))) convertibles.append((str(i), str(i)))
articles, pages = blag.process_markdown( articles, pages = blag.process_markdown(
convertibles, convertibles, 'content', 'build', page_template, article_template
'content',
'build',
page_template,
article_template
) )
assert isinstance(articles, list) assert isinstance(articles, list)

View File

@@ -32,9 +32,11 @@ def test_autoreload_builds_immediately(args: Namespace) -> None:
with open('content/test.md', 'w') as fh: with open('content/test.md', 'w') as fh:
fh.write('boo') fh.write('boo')
t = threading.Thread(target=devserver.autoreload, t = threading.Thread(
args=(args, ), target=devserver.autoreload,
daemon=True,) args=(args,),
daemon=True,
)
t0 = devserver.get_last_modified(['build']) t0 = devserver.get_last_modified(['build'])
t.start() t.start()
# try for 5 seconds... # try for 5 seconds...
@@ -47,11 +49,15 @@ def test_autoreload_builds_immediately(args: Namespace) -> None:
assert t1 > t0 assert t1 > t0
@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning") # noqa @pytest.mark.filterwarnings(
"ignore::pytest.PytestUnhandledThreadExceptionWarning"
)
def test_autoreload(args: Namespace) -> None: def test_autoreload(args: Namespace) -> None:
t = threading.Thread(target=devserver.autoreload, t = threading.Thread(
args=(args, ), target=devserver.autoreload,
daemon=True,) args=(args,),
daemon=True,
)
t.start() t.start()
t0 = devserver.get_last_modified(['build']) t0 = devserver.get_last_modified(['build'])

View File

@@ -9,54 +9,65 @@ import markdown
from blag.markdown import convert_markdown, markdown_factory from blag.markdown import convert_markdown, markdown_factory
@pytest.mark.parametrize("input_, expected", [ @pytest.mark.parametrize(
# inline "input_, expected",
('[test](test.md)', 'test.html'), [
('[test](test.md "test")', 'test.html'), # inline
('[test](a/test.md)', 'a/test.html'), ('[test](test.md)', 'test.html'),
('[test](a/test.md "test")', 'a/test.html'), ('[test](test.md "test")', 'test.html'),
('[test](/test.md)', '/test.html'), ('[test](a/test.md)', 'a/test.html'),
('[test](/test.md "test")', '/test.html'), ('[test](a/test.md "test")', 'a/test.html'),
('[test](/a/test.md)', '/a/test.html'), ('[test](/test.md)', '/test.html'),
('[test](/a/test.md "test")', '/a/test.html'), ('[test](/test.md "test")', '/test.html'),
# reference ('[test](/a/test.md)', '/a/test.html'),
('[test][]\n[test]: test.md ''', 'test.html'), ('[test](/a/test.md "test")', '/a/test.html'),
('[test][]\n[test]: test.md "test"', 'test.html'), # reference
('[test][]\n[test]: a/test.md', 'a/test.html'), ('[test][]\n[test]: test.md ' '', 'test.html'),
('[test][]\n[test]: a/test.md "test"', 'a/test.html'), ('[test][]\n[test]: test.md "test"', 'test.html'),
('[test][]\n[test]: /test.md', '/test.html'), ('[test][]\n[test]: a/test.md', 'a/test.html'),
('[test][]\n[test]: /test.md "test"', '/test.html'), ('[test][]\n[test]: a/test.md "test"', 'a/test.html'),
('[test][]\n[test]: /a/test.md', '/a/test.html'), ('[test][]\n[test]: /test.md', '/test.html'),
('[test][]\n[test]: /a/test.md "test"', '/a/test.html'), ('[test][]\n[test]: /test.md "test"', '/test.html'),
]) ('[test][]\n[test]: /a/test.md', '/a/test.html'),
('[test][]\n[test]: /a/test.md "test"', '/a/test.html'),
],
)
def test_convert_markdown_links(input_: str, expected: str) -> None: def test_convert_markdown_links(input_: str, expected: str) -> None:
md = markdown_factory() md = markdown_factory()
html, _ = convert_markdown(md, input_) html, _ = convert_markdown(md, input_)
assert expected in html assert expected in html
@pytest.mark.parametrize("input_, expected", [ @pytest.mark.parametrize(
# scheme "input_, expected",
('[test](https://)', 'https://'), [
# netloc # scheme
('[test](//test.md)', '//test.md'), ('[test](https://)', 'https://'),
# no path # netloc
('[test]()', ''), ('[test](//test.md)', '//test.md'),
]) # no path
('[test]()', ''),
],
)
def test_dont_convert_normal_links(input_: str, expected: str) -> None: def test_dont_convert_normal_links(input_: str, expected: str) -> None:
md = markdown_factory() md = markdown_factory()
html, _ = convert_markdown(md, input_) html, _ = convert_markdown(md, input_)
assert expected in html assert expected in html
@pytest.mark.parametrize("input_, expected", [ @pytest.mark.parametrize(
('foo: bar', {'foo': 'bar'}), "input_, expected",
('foo: those are several words', {'foo': 'those are several words'}), [
('tags: this, is, a, test\n', {'tags': ['this', 'is', 'a', 'test']}), ('foo: bar', {'foo': 'bar'}),
('tags: this, IS, a, test', {'tags': ['this', 'is', 'a', 'test']}), ('foo: those are several words', {'foo': 'those are several words'}),
('date: 2020-01-01 12:10', {'date': ('tags: this, is, a, test\n', {'tags': ['this', 'is', 'a', 'test']}),
datetime(2020, 1, 1, 12, 10).astimezone()}), ('tags: this, IS, a, test', {'tags': ['this', 'is', 'a', 'test']}),
]) (
'date: 2020-01-01 12:10',
{'date': datetime(2020, 1, 1, 12, 10).astimezone()},
),
],
)
def test_convert_metadata(input_: str, expected: dict[str, Any]) -> None: def test_convert_metadata(input_: str, expected: dict[str, Any]) -> None:
md = markdown_factory() md = markdown_factory()
_, meta = convert_markdown(md, input_) _, meta = convert_markdown(md, input_)