atom feed!

This commit is contained in:
2025-07-26 14:20:22 +02:00
parent aec0177c6f
commit fee2a36453
11 changed files with 291 additions and 37 deletions

View File

@@ -294,8 +294,10 @@ document.addEventListener('scroll', (event) => {
#let people = (
alex: (
nick: "alex_s168",
name: "Alexander Nutz",
url: "https://alex.vxcc.dev",
badge: "https://alex.vxcc.dev/res/badge.png",
mail: "nutz.alexander@vxcc.dev",
),
ote: (
nick: "otesunki",
@@ -309,7 +311,7 @@ document.addEventListener('scroll', (event) => {
),
)
#metadata(json.encode(people)) <meta-people>
#metadata(people) <meta-people>
#let person(p) = {
flink(p.url, p.nick)

View File

@@ -22,16 +22,18 @@ rule typst
rule git_inp
command = git log -1 --format="--input git_rev=%H --input git_commit_date=\\\"%ad\\\"" --date="format:%d. %B %Y %H:%M" -- $in > $out.temp && \
cmp -s $out.temp $out || mv $out.temp $out; \
rm -f $out.temp
cmp -s $out.temp $out || mv $out.temp $out; \
git log -1 --format="%cI" -- $in > $out.iso.temp && \
cmp -s $out.iso.temp $out || mv $out.iso.temp $out.iso; \
rm -f $out.temp $out.iso.temp
restat = 1
rule badges_list
command = typst query $in "<meta-people>" --root . --input query=true --field value --one | jq -r . | jq -r 'to_entries[] | [.key,.value.badge] | @tsv' > $out
command = typst query $in "<meta-people>" --root . --input query=true --field value --one | jq -r 'to_entries[] | [.key,.value.badge] | @tsv' > $out
build build/badges.txt: badges_list common.typ
rule curl
command = curl $url > $out
command = curl $curlflags $url > $out
rule cp
command = cp $flags $in $out
@@ -44,7 +46,10 @@ rule runclean
build clean : runclean
rule ttf2woff
command = fonttools ttLib.woff2 compress $in -o $out
command = fonttools ttLib.woff2 compress $in -o $out 2>/dev/null
rule python
command = python $in
rule python_capture
command = python $in > $out
@@ -54,18 +59,16 @@ rule minhtml
build build.ninja: regen | config.py build/badges.txt res pages
build build/deploy/coffee.js : python_capture gen_coffee_js.py
rule cargo_release_bin
command = (cd $in && cargo build --release) && cp $in/target/release/$file $out
pool = console
build build/coffee_server : cargo_release_bin coffee
file = coffee
rule expect_img_size
command = eval "[ $$(ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 $in) = $size ]" && touch $out
rule touch
command = touch $out
rule ffmpeg_compress
command = ffmpeg -y -i $in -compression_level 100 $out -hide_banner -loglevel error
@@ -73,11 +76,27 @@ rule pngquant
command = pngquant $in -o $out --force --quality $quality
"""
gen += """
build build/deploy/coffee.js : python_capture gen_coffee_js.py
build build/coffee_server : cargo_release_bin coffee
file = coffee
"""
web_targets.append("build/deploy/coffee.js")
web_targets.append("build/coffee_server")
pages = [x for x in os.listdir("./pages/")]
fonts = [x for x in os.listdir("./fonts/")]
gen += """
build build/pages.typ build/pages.json : python pages.gen.py | pages.in.typ
build gen_typst: phony build/pages.typ | """+ " ".join(f"build/{x}.git_rev.txt.iso" for x in pages) +"""
"""
gen += """
build build/deploy/atom.xml : python gen_feed.py | build/pages.json """ + " ".join(f"build/{x}.nano.html" for x in pages) + """
"""
web_targets.append("build/deploy/atom.xml")
variants = [
{
@@ -101,11 +120,11 @@ variants = [
for page in pages:
gr = "build/" + page + ".git_rev.txt"
gen += "\n"
gen += "build "+gr+" : git_inp pages/" + page + " | build/git_rev.txt"
gen += "build "+gr+" | "+gr+".iso : git_inp pages/" + page + " | build/git_rev.txt\n"
for var in variants:
tg = "build/" + page + var["suffix"]
gen += "\n"
gen += "build "+tg+" : typst " + "pages/" + page + " | "+gr+"\n"
gen += "build "+tg+" : typst " + "pages/" + page + " | "+gr+" || gen_typst\n"
gen += " flags = " + var["args"] + " $$(cat "+gr+")\n"
if tg.endswith(".html"):
gen += "\n"
@@ -142,11 +161,13 @@ if os.path.isfile("build/badges.txt"):
else:
gen += f"curl |@ {val}\n"
gen += " url = "+url+"\n"
gen += " curlflags = -k"
gen += "\n"
gen += f"build {val} : expect_img_size {tg}\n"
gen += f" size = 88x31"
fonts = [x for x in os.listdir("./fonts/")]
for font in fonts:
font = font.replace(".ttf", "")
tg = f"build/deploy/res/{font}.woff2"

58
gen_feed.py Normal file
View File

@@ -0,0 +1,58 @@
import sys, json
from feedgen.feed import FeedGenerator
fg = FeedGenerator()
articles = 0
with open("build/pages.json", "r") as f:
articles = json.load(f)
articles = [x for x in articles if x["in-feed"] == True]
all_authors = {}
for article in articles:
authors = article["authors"]
for author in authors:
nick = author["nick"]
name = author.get("name", nick)
mail = author.get("mail", None)
url = author.get("url", None)
out = {"name":name}
if mail:
out["email"] = mail
if url:
out["uri"] = url
all_authors[nick] = out
fg.author([v for _,v in all_authors.items()])
fg.id("https://alex.vxcc.dev")
fg.title("alex168's block")
fg.subtitle("alex_s168's blog")
fg.icon("https://vxcc.dev/alex/res/favicon.png")
fg.language("en-US")
fg.link(href="https://alex.vxcc.dev/atom.xml", rel="self")
fg.link(href="https://alex.vxcc.dev/", rel="alternate")
for article in reversed(articles):
page = article["page"]
url = article["url"]
title = article["title"]
summary = article["summary"]
modified = article["modified"]
authors = article["authors"]
content = None
with open(f"./build/{page}.nano.html", "r") as f:
content = f.read()
fe = fg.add_entry()
fe.id(f"https://vxcc.dev/alex/{url}")
fe.title(title)
fe.summary(summary)
fe.link(href=url)
fe.updated(modified)
fe.content(content, type="html")
fe.author([all_authors[x["nick"]] for x in authors])
fg.atom_file("build/deploy/atom.xml")

107
pages.gen.py Normal file
View File

@@ -0,0 +1,107 @@
import sys, json, subprocess
def typst_encode_pre(e, ind=" "):
if isinstance(e, dict):
if len(e.items()) == 0:
return ["(:)"]
elif len(e.items()) == 1:
k,v = list(e.items())[0]
v = typst_encode_pre(v, ind)
out = ["(" + k + ": " + v[0]]
out.extend(v[1:])
out[-1] = out[-1] + ")"
return out
else:
out = ["("]
first = True
for k,v in e.items():
if not first:
out[-1] = out[-1] + ","
first = False
v = typst_encode_pre(v, ind)
out.append(ind + k + ": " + v[0])
out.extend(ind+x for x in v[1:])
out[-1] = out[-1]
out.append(")")
return out
elif isinstance(e, list):
if len(e) == 0:
return ["()"]
elif len(e) == 1:
v = typst_encode_pre(e[0], ind)
out = ["(" + v[0]]
out.extend(v[1:])
out[-1] = out[-1] + ")"
return out
else:
out = ["("]
first = True
for v in e:
if not first:
out[-1] = out[-1] + ","
first = False
v = typst_encode_pre(v, ind)
out.append(ind + v[0])
out.extend(ind+x for x in v[1:])
out[-1] = out[-1]
out.append(")")
return out
elif isinstance(e, bool):
return ["true" if e else "false"]
elif isinstance(e, int) or isinstance(e, float):
return [str(e)]
elif isinstance(e, str):
# TODO: can do better (newlines)
return [json.dumps(e)]
else:
raise ValueError(f"can't typst encode {e}")
def typst_encode(e):
e = typst_encode_pre(e)
return "\n".join(e)
def typst_query_one(path, tag):
meta = subprocess.run([
"typst", "query", path, tag,
"--one",
"--field", "value",
"--root", ".",
"--input", "query=true",
"--features", "html"
], capture_output=True)
meta = meta.stdout.decode("utf-8").strip()
if len(meta) == 0:
return None
return json.loads(meta)
out = []
for page in typst_query_one("pages.in.typ", "<articles>"):
p_page = page["page"]
p_feed = page["feed"]
p_homepage = page["homepage"]
path = f"pages/{p_page}"
meta = typst_query_one(path, "<feed-ent>")
last_changed = None
with open(f"build/{p_page}.git_rev.txt.iso", "r") as f:
last_changed = f.read().strip()
res = {
"url": f"{p_page}.desktop.html",
"page": p_page,
"in-feed": p_feed,
"in-homepage": p_homepage,
"authors": meta["authors"],
"title": meta["title"],
"summary": meta["summary"],
"modified": last_changed,
}
out.append(res)
with open("build/pages.typ", "w") as f:
f.write("#let articles = " + typst_encode(out) + "\n")
with open("build/pages.json", "w") as f:
json.dump(out, f)

30
pages.in.typ Normal file
View File

@@ -0,0 +1,30 @@
// don't import this file directly! it will be post processed by the build script
// generates build/pages.typ
// first element in list will show up first in the homepage and the feed => needs to be newest article!
#let articles = (
(
page: "article-gpu-arch-1.typ",
// unfinished
feed: false,
homepage: false,
),
(
page: "compiler-pattern-matching.typ",
feed: true,
homepage: true,
),
(
page: "article-favicon.typ",
feed: true,
homepage: true,
),
(
page: "article-make-regex-engine-1.typ",
feed: true,
homepage: true,
),
)
#metadata(articles) <articles>

View File

@@ -1,18 +1,27 @@
#import "../common.typ": *
#import "../simple-page-layout.typ": *
#import "../core-page-style.typ": *
#import "../components/header.typ": rev-and-authors
#import "../components/header.typ": *
#let article = (
authors: (people.alex,),
title: "The making of the favicon",
html-title: "The making of the favicon",
summary: "It turns out that websites need a favicon, and making one is hard..."
)
#metadata(article) <feed-ent>
#simple-page(
gen-table-of-contents: true,
[The making of the favicon]
article.html-title
)[
#section[
#title[The making of the favicon]
#title(article.title)
#sized-p(small-font-size)[
#rev-and-authors((people.alex,))
#rev-and-authors(article.authors)
]
]

View File

@@ -3,16 +3,25 @@
#import "../core-page-style.typ": *
#import "../components/header.typ": *
#let article = (
authors: (people.alex,),
title: "Designing a GPU architecture: Waves",
html-title: "Designing a GPU architecture: Waves",
summary: "Exploring GPU architecture and designing our own. Part 1: wavefronts / warps"
)
#metadata(article) <feed-ent>
#simple-page(
gen-table-of-contents: true,
[Designing a GPU architecture: Waves]
article.html-title
)[
#section[
#title[Designing a GPU Architecture: Waves]
#title(article.title)
#sized-p(small-font-size)[
#rev-and-authors((people.alex,))
#rev-and-authors(article.authors)
]
]

View File

@@ -3,18 +3,26 @@
#import "../core-page-style.typ": *
#import "../components/header.typ": *
#let article = (
authors: (people.alex,),
title: "Making a simple RegEx engine:
Part 1: Introduction to RegEx",
html-title: "Introduction to RegEx",
summary: "Do you also think that all RegEx engines kinda suck and you want to make your own? probably not"
)
#metadata(article) <feed-ent>
#simple-page(
gen-table-of-contents: true,
[Introduction to RegEx]
article.html-title
)[
#section[
#title[Making a simple RegEx engine]
#title[Part 1: Introduction to RegEx]
#title(article.title)
#sized-p(small-font-size)[
#rev-and-authors((people.alex,))
#rev-and-authors(article.authors)
]
]

View File

@@ -3,16 +3,25 @@
#import "../core-page-style.typ": *
#import "../components/header.typ": *
#let article = (
authors: (people.alex,),
title: "Approaches to pattern matching in compilers",
html-title: "Approaches to Compiler Pattern Matching",
summary: "If you are working an more advanced compilers, you probably had to work with pattern matching already. In this article, we will explore different approaches.",
)
#metadata(article) <feed-ent>
#simple-page(
gen-table-of-contents: true,
[Approaches to Pattern Matching - Alexander Nutz]
article.html-title
)[
#section[
#title[Approaches to pattern matching in compilers]
#title(article.title)
#sized-p(small-font-size)[
#rev-and-authors((people.alex,))
#rev-and-authors(article.authors)
]
]
@@ -314,7 +323,7 @@
Modern processor architecture features like superscalar execution make this even more complicated.
\
As a simple, *non realistic* example, let's imagine a CPU (core) that has one bit operations execution unit,
As a simple, *unrealistic* example, let's imagine a CPU (core) that has one bit operations execution unit,
and two ALU execution units / ports. \
This means that the CPU can execute two instructions in the ALU unit and one instruction in the bit ops unit at the same time.
]

View File

@@ -1,6 +1,7 @@
#import "../common.typ": *
#import "../simple-page-layout.typ": *
#import "../core-page-style.typ": *
#import "../build/pages.typ": articles
#let tree-list(..elements) = {
gen-tree-from-headings(elemfn: (content, x) => [
@@ -24,14 +25,12 @@
#title[alex_s168]
#br()
Articles
Articles (#html-href("atom.xml")[Atom feed])
#br()
#tree-list(
(level:1, body: [ Making a simple RegEx engine ]),
(level:2, body: html-href("article-make-regex-engine-1.typ.desktop.html")[ Part 1: Introduction to RegEx ]),
(level:1, body: html-href("compiler-pattern-matching.typ.desktop.html")[ Approaches to pattern matching in compilers ]),
(level:1, body: html-href("article-favicon.typ.desktop.html")[ Making of the favicon ]),
)
#tree-list(..articles.filter(x => x.in-homepage).map(x => (
level: 1,
body: html-href(x.url, x.title)
)))
#br()
Socials

View File

@@ -48,6 +48,8 @@
// TODO: fix and re-add - #variant-link("less bloated HTML", ".min.html")
- #variant-link("minimal HTML", ".nano.html")
]],
[#html-href("atom.xml")[Atom feed]
#context br()],
[#context if is-html() {
html.elem("style", "