2022-04-25 00:35:53 +02:00
|
|
|
from dataclasses import dataclass
|
2023-06-19 03:40:11 +02:00
|
|
|
from typing import Any, Optional
|
2021-12-29 02:18:06 +01:00
|
|
|
import datetime
|
2021-12-28 23:29:33 +01:00
|
|
|
import re
|
2021-12-28 21:54:12 +01:00
|
|
|
import os
|
|
|
|
|
2023-03-27 21:47:33 +02:00
|
|
|
import jinja_markdown
|
|
|
|
|
2023-06-19 03:37:46 +02:00
|
|
|
from flask import (
|
|
|
|
Flask, send_from_directory, render_template, make_response, url_for,
|
|
|
|
render_template_string
|
|
|
|
)
|
|
|
|
import sass
|
2021-12-29 02:18:06 +01:00
|
|
|
from livereload import Server
|
|
|
|
|
2022-04-11 20:27:15 +02:00
|
|
|
# Importing performs monkeypatching
|
|
|
|
import xml_lexer # NOQA: F401
|
2022-06-28 00:09:04 +02:00
|
|
|
import ini_lexer # NOQA: F401
|
2021-12-29 02:18:06 +01:00
|
|
|
|
2021-12-28 23:29:33 +01:00
|
|
|
|
2021-12-28 21:54:12 +01:00
|
|
|
app = Flask(__name__)
|
2023-03-27 21:47:33 +02:00
|
|
|
extensions = app.jinja_options.setdefault('extensions', [])
|
|
|
|
extensions.append('jinja2_highlight.HighlightExtension')
|
|
|
|
extensions.append('jinja_markdown.MarkdownExtension')
|
|
|
|
|
|
|
|
jinja_markdown.EXTENSIONS.append("mdx_spantables")
|
|
|
|
jinja_markdown.EXTENSIONS.append("toc")
|
2021-12-28 23:29:33 +01:00
|
|
|
|
|
|
|
|
2022-04-11 20:27:15 +02:00
|
|
|
HTAG = re.compile(r"<h(\d)[^>]*id=\"([^\"]+)\"[^>]*>([^<]*)</h")
|
|
|
|
TOC_HTAG_LEVELS = {"1", "2"}
|
|
|
|
|
2023-05-30 01:38:22 +02:00
|
|
|
HOST = os.environ.get("EA_HOST", "https://bsnk.me")
|
2021-12-28 23:29:33 +01:00
|
|
|
TEMPLATES = "templates"
|
2023-05-30 01:27:24 +02:00
|
|
|
PAGES_BASE = os.environ.get("EA_PROOT", "pages")
|
2022-06-13 06:48:39 +02:00
|
|
|
STATIC = ["images", "static", "mice"]
|
2021-12-28 23:29:33 +01:00
|
|
|
|
|
|
|
ROOT = os.environ.get("EA_ROOT", "")
|
|
|
|
|
2022-04-11 20:27:15 +02:00
|
|
|
EAMUSE_CONTENTS = {
|
|
|
|
"getting_started.html": ("Getting started and following along", {
|
|
|
|
0: ("A quick one-stop shop for getting setup with the tools you'll want on hand if you want to investigate "
|
|
|
|
+ "things for yourself.")
|
|
|
|
}),
|
|
|
|
"transport.html": ("Transport layer", None),
|
|
|
|
"packet.html": ("The inner packet structure", None),
|
|
|
|
"protocol.html": ("Communication protocol details", {
|
|
|
|
0: ("There are a crazy number of sub pages here, so just go check the contents there.")
|
|
|
|
}),
|
|
|
|
"server.html": "Let's write a server",
|
|
|
|
0: ("Misc pages", {
|
|
|
|
"cardid.html": ("Parsing and converting card IDs", ())
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
SEGA_CONTENTS = {
|
|
|
|
"intro.html": ("Introduction to RingEdge 2", ()),
|
2023-06-19 03:37:46 +02:00
|
|
|
"network": ("Networking", {
|
|
|
|
"auth.html": "ALL.Net Authentication",
|
|
|
|
"billing.html": "ALL.Net Billing",
|
|
|
|
}),
|
2022-11-18 13:08:48 +01:00
|
|
|
"hardware": ("Hardware", {
|
2022-04-25 00:35:53 +02:00
|
|
|
"jvs.html": "JVS",
|
2022-05-19 02:50:12 +02:00
|
|
|
"touch.html": "Touchscreen",
|
2022-11-18 13:08:48 +01:00
|
|
|
}),
|
|
|
|
"software": ("Software", {
|
|
|
|
"pcp": ("PCP", {"libpcp.html": "libpcp"}),
|
2022-06-13 06:48:39 +02:00
|
|
|
"drivers": ("Device drivers", None),
|
2023-02-17 10:37:51 +01:00
|
|
|
"security": ("Security", {
|
|
|
|
"alphadvd.html": "AlphaDVD",
|
|
|
|
}),
|
2022-06-28 00:09:04 +02:00
|
|
|
"groovemaster.html": "GrooveMaster.ini",
|
2022-04-11 20:27:15 +02:00
|
|
|
}),
|
2022-12-09 11:55:32 +01:00
|
|
|
"manual": ("Manual", {
|
2022-12-09 16:01:46 +01:00
|
|
|
"errors.html": "Error Codes",
|
|
|
|
"keychip.html": "Keychip Modding",
|
2022-12-09 11:55:32 +01:00
|
|
|
}),
|
2022-11-18 05:49:39 +01:00
|
|
|
# "network": ("Networking", {
|
|
|
|
# "allnet.html": "ALL.Net"
|
|
|
|
# }),
|
2022-11-27 23:29:36 +01:00
|
|
|
"misc": ("Misc", {
|
|
|
|
"partition.html": "SEGA Partition Structure"
|
|
|
|
}),
|
2022-04-11 20:27:15 +02:00
|
|
|
}
|
|
|
|
CONTENTS = {
|
2023-05-30 01:27:24 +02:00
|
|
|
"eamuse": EAMUSE_CONTENTS,
|
2022-04-11 20:27:15 +02:00
|
|
|
"sega": SEGA_CONTENTS
|
|
|
|
}
|
2021-12-28 23:29:33 +01:00
|
|
|
|
|
|
|
|
2022-04-25 00:35:53 +02:00
|
|
|
@dataclass
|
|
|
|
class Part:
|
|
|
|
id: str
|
|
|
|
name: str
|
2023-06-19 03:40:11 +02:00
|
|
|
description: Optional[str] = None
|
|
|
|
page: Optional[str] = None
|
2022-04-25 00:35:53 +02:00
|
|
|
|
|
|
|
|
|
|
|
PARTS = {
|
2022-05-19 02:50:12 +02:00
|
|
|
"838-14971": Part("838-14971", "Aime NFC daughter board", "NFC RW BD TN32MSEC003S"),
|
2022-06-28 00:09:04 +02:00
|
|
|
"838-15221": Part("838-15221", "Serial I/F BD Touchpanel Gunze",
|
|
|
|
"The interface board for the touchscreen on MaiMai versions pre-DX.")
|
2022-04-25 00:35:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-06-28 00:09:04 +02:00
|
|
|
def part(id_):
|
|
|
|
if (part := PARTS.get(id_)):
|
|
|
|
return (
|
|
|
|
f'<span class="part" tabindex="0">{part.name}<span>'
|
|
|
|
f'<span>ASSY ID</span><span>SEGA {part.id}</span>'
|
|
|
|
f'<span>Description</span><span>{part.description}</span>'
|
|
|
|
'</span></span>'
|
|
|
|
)
|
|
|
|
return f'<span class="part">{id_}</span>'
|
2022-04-25 00:35:53 +02:00
|
|
|
|
|
|
|
|
2021-12-28 23:29:33 +01:00
|
|
|
def generate_xrpc_list():
|
|
|
|
output = "<ul>"
|
|
|
|
proto = TEMPLATES + "/" + PAGES_BASE + "/proto"
|
|
|
|
for base, _, files in os.walk(proto):
|
|
|
|
prefix = base[len(proto):].replace("\\", "/").strip("/")
|
|
|
|
if prefix:
|
|
|
|
prefix = prefix.replace("/", ".") + "."
|
|
|
|
for i in files:
|
2023-03-27 21:47:33 +02:00
|
|
|
if i.startswith("~"):
|
|
|
|
continue
|
|
|
|
|
2021-12-28 23:29:33 +01:00
|
|
|
delim = "_" if prefix else "."
|
|
|
|
href = f"{ROOT}/proto{base[len(proto):]}/{i}"
|
|
|
|
output += f"<li><code><a href=\"{href}\">"
|
|
|
|
output += prefix + i.replace(".html", delim + "%s")
|
|
|
|
output += "</code></a></li>"
|
|
|
|
with open(os.path.join(base, i)) as f:
|
|
|
|
headers = re.findall('<h2 id="([^"]*?)">', f.read())
|
|
|
|
output += "<ul>"
|
|
|
|
for j in headers:
|
|
|
|
output += f"<li><code><a href=\"{href}#{j}\">"
|
|
|
|
output += prefix + i.replace(".html", delim + j)
|
|
|
|
output += "</code></a></li>"
|
|
|
|
output += "</ul>"
|
|
|
|
return output + "</ul>"
|
|
|
|
|
2021-12-28 21:54:12 +01:00
|
|
|
|
2022-04-11 20:27:15 +02:00
|
|
|
def generate_toc(base, name, route, start=1):
|
|
|
|
parts = route.strip("/").split("/")
|
|
|
|
|
|
|
|
toc = CONTENTS
|
2023-05-30 01:46:24 +02:00
|
|
|
for i in PAGES_BASE.split("/")[1:]:
|
|
|
|
toc = toc[i]
|
|
|
|
|
|
|
|
if parts == [""]:
|
|
|
|
parts = []
|
2022-04-11 20:27:15 +02:00
|
|
|
for i in parts:
|
|
|
|
if i in toc:
|
|
|
|
toc = toc[i]
|
|
|
|
if isinstance(toc, tuple):
|
|
|
|
toc = toc[1]
|
|
|
|
if not isinstance(toc, dict):
|
|
|
|
return ""
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def walk(toc, path, start=1):
|
|
|
|
unordered = len(toc) == 1 and 0 in toc
|
|
|
|
out = f'<{"u" if unordered else "o"}l start="{start}">'
|
|
|
|
for url in toc:
|
|
|
|
if isinstance(toc[url], tuple):
|
|
|
|
name, children = toc[url]
|
|
|
|
elif isinstance(toc[url], str):
|
|
|
|
name, children = toc[url], -1
|
2023-06-19 03:37:46 +02:00
|
|
|
else:
|
|
|
|
raise ValueError
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
out += "<li>"
|
|
|
|
if isinstance(url, str):
|
|
|
|
fqu = f"{ROOT}/{path}"
|
|
|
|
if not url.startswith("#"):
|
|
|
|
fqu += "/"
|
|
|
|
fqu += url
|
|
|
|
while "//" in fqu:
|
|
|
|
fqu = fqu.replace("//", "/")
|
|
|
|
if not fqu.endswith((".html", "/")) and "#" not in fqu:
|
|
|
|
fqu += "/"
|
|
|
|
out += f'<a href="{fqu}">{name}</a>'
|
|
|
|
else:
|
|
|
|
out += name
|
|
|
|
out += "</li>"
|
|
|
|
|
|
|
|
if children == -1:
|
|
|
|
continue
|
|
|
|
if children is None:
|
|
|
|
filename = "/".join((TEMPLATES, PAGES_BASE, path, url))
|
|
|
|
while "//" in filename:
|
|
|
|
filename = filename.replace("//", "/")
|
|
|
|
|
|
|
|
if url == "":
|
|
|
|
filename += "index.html"
|
2022-04-25 02:47:34 +02:00
|
|
|
if "." not in filename:
|
|
|
|
filename += "/index.html"
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
with open(filename) as page:
|
|
|
|
headers = HTAG.findall(page.read())
|
|
|
|
|
|
|
|
children = {}
|
|
|
|
for level, anchor, text in headers:
|
|
|
|
if level in TOC_HTAG_LEVELS:
|
|
|
|
children[f"#{anchor}"] = text
|
|
|
|
if not children:
|
|
|
|
children = None
|
|
|
|
|
|
|
|
if children is not None:
|
|
|
|
out += walk(children, f"{path}/{url}" if isinstance(url, str) else path)
|
|
|
|
|
|
|
|
out += f'</{"u" if unordered else "o"}l>'
|
|
|
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
return walk(toc, route, start)
|
|
|
|
|
|
|
|
|
2023-06-19 03:37:46 +02:00
|
|
|
def generate_footer_links(base, name, route):
|
2022-04-11 20:27:15 +02:00
|
|
|
parts = route.strip("/").split("/")
|
|
|
|
if not parts:
|
2023-06-19 03:37:46 +02:00
|
|
|
return {}
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
toc = CONTENTS
|
|
|
|
path = []
|
|
|
|
for i in parts[:-1]:
|
|
|
|
if i in toc:
|
|
|
|
path.append(i)
|
|
|
|
toc = toc[i]
|
|
|
|
if isinstance(toc, tuple):
|
|
|
|
toc = toc[1]
|
|
|
|
if not isinstance(toc, dict):
|
|
|
|
toc = None
|
|
|
|
break
|
|
|
|
elif toc == CONTENTS:
|
2023-05-30 01:30:13 +02:00
|
|
|
toc = toc[PAGES_BASE.partition("/")[2]]
|
2022-04-11 20:27:15 +02:00
|
|
|
else:
|
|
|
|
toc = None
|
|
|
|
break
|
|
|
|
|
|
|
|
if toc == CONTENTS and len(parts) == 1:
|
2023-06-19 03:37:46 +02:00
|
|
|
assert toc is not None
|
2023-05-30 01:38:04 +02:00
|
|
|
toc = toc[PAGES_BASE.partition("/")[2]]
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
if toc is None:
|
|
|
|
siblings = None
|
2023-06-19 03:37:46 +02:00
|
|
|
us_idx = -1
|
|
|
|
parent = ""
|
2022-04-11 20:27:15 +02:00
|
|
|
else:
|
|
|
|
siblings = [i for i in toc.keys() if isinstance(i, str)]
|
|
|
|
try:
|
|
|
|
us_idx = siblings.index(parts[-1])
|
|
|
|
except ValueError:
|
|
|
|
us_idx = -1
|
2022-04-11 20:49:43 +02:00
|
|
|
parent = ROOT + "/" + "/".join(parts[:-1])
|
2022-04-11 20:27:15 +02:00
|
|
|
if not parent.endswith("/"):
|
|
|
|
parent += "/"
|
|
|
|
|
2023-06-19 03:37:46 +02:00
|
|
|
links: dict[str, Any] = dict(footer_previous="", footer_crumbs=[], footer_current="", footer_next="")
|
|
|
|
|
2022-04-11 20:27:15 +02:00
|
|
|
if siblings and us_idx > 0:
|
2023-06-19 03:37:46 +02:00
|
|
|
links["footer_previous"] = parent + siblings[us_idx - 1]
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
if parts:
|
2022-04-11 20:49:43 +02:00
|
|
|
built = ROOT + "/"
|
2022-04-11 20:27:15 +02:00
|
|
|
for i in parts[:-1]:
|
|
|
|
built += f"{i}"
|
|
|
|
if not built.endswith((".html", "/")):
|
|
|
|
built += "/"
|
2023-06-19 03:37:46 +02:00
|
|
|
links["footer_crumbs"].append((built, i))
|
|
|
|
links["footer_current"] = parts[-1]
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
if siblings and us_idx < len(siblings) - 1 and us_idx != -1:
|
2023-06-19 03:37:46 +02:00
|
|
|
links["footer_next"] = parent + siblings[us_idx + 1]
|
2022-04-11 20:27:15 +02:00
|
|
|
|
2023-06-19 03:37:46 +02:00
|
|
|
return links
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
|
2022-04-25 02:47:34 +02:00
|
|
|
def ioctl(original):
|
|
|
|
original = eval(original) # Unsafe as hell
|
|
|
|
|
|
|
|
def CTL_CODE(DeviceType, Function, Method, Access):
|
|
|
|
return ((DeviceType) << 16) | ((Access) << 14) | ((Function) << 2) | (Method)
|
|
|
|
|
|
|
|
deviceType = original >> 16
|
|
|
|
access = (original >> 14) & 0x3
|
|
|
|
function = (original >> 2) & 0xfff
|
|
|
|
method = original & 0x3
|
|
|
|
|
|
|
|
assert hex(CTL_CODE(deviceType, function, method, access)) == hex(original)
|
|
|
|
|
|
|
|
deviceType = hex(deviceType)
|
|
|
|
if deviceType == "0x9c40":
|
|
|
|
deviceType = "FILE_DEVICE_SEGA"
|
|
|
|
function = hex(function)
|
|
|
|
method = "METHOD_" + ["BUFFERED", "IN_DIRECT", "OUT_DIRECT", "NEITHER"][method]
|
|
|
|
access = ["FILE_ANY_ACCESS", "FILE_READ_ACCESS", "FILE_WRITE_ACCESS"][access]
|
|
|
|
|
|
|
|
return (f"CTL_CODE({deviceType}, {function}, {method}, {access})")
|
|
|
|
|
|
|
|
|
2021-12-28 23:29:33 +01:00
|
|
|
@app.route("/tango.css")
|
|
|
|
def tango():
|
|
|
|
return send_from_directory(".", "tango.css")
|
2022-04-11 20:27:15 +02:00
|
|
|
|
|
|
|
|
2021-12-29 02:55:43 +01:00
|
|
|
@app.route("/headers.js")
|
|
|
|
def header_script():
|
|
|
|
return send_from_directory(".", "headers.js")
|
2021-12-28 21:54:12 +01:00
|
|
|
|
|
|
|
|
2023-06-19 03:37:46 +02:00
|
|
|
def install_static():
|
|
|
|
for i in STATIC:
|
|
|
|
for base, _, files in os.walk(i):
|
|
|
|
for name in files:
|
|
|
|
def handler_factory(base, name):
|
|
|
|
def handler():
|
|
|
|
return send_from_directory(base, name)
|
|
|
|
return handler
|
|
|
|
local_base = base.replace("\\", "/").strip(".").strip("/")
|
|
|
|
route = local_base + "/" + name
|
|
|
|
if not route.startswith("/"):
|
|
|
|
route = "/" + route
|
|
|
|
|
|
|
|
app.add_url_rule(route, route, handler_factory(base, name))
|
|
|
|
|
|
|
|
|
|
|
|
def handler_factory_html(base, name, route):
|
|
|
|
def handler():
|
|
|
|
return render_template(
|
|
|
|
os.path.join(base, name).strip("/").replace("\\", "/"),
|
|
|
|
HOST=HOST,
|
|
|
|
ROOT=ROOT,
|
|
|
|
CANONICAL=ROOT + route,
|
|
|
|
**generate_footer_links(base, name, route),
|
|
|
|
|
|
|
|
generate_xrpc_list=generate_xrpc_list,
|
|
|
|
generate_toc=lambda start=1: generate_toc(base, name, route, start),
|
|
|
|
relative=lambda path: os.path.join(base, path).strip("/").replace("\\", "/"),
|
|
|
|
part=part,
|
|
|
|
ioctl=ioctl,
|
|
|
|
)
|
|
|
|
return handler
|
|
|
|
|
|
|
|
|
|
|
|
def handler_factory_markdown(base, name, route):
|
|
|
|
md_name = name[:-5] + ".md"
|
|
|
|
md_path = os.path.join(base, md_name).strip("/").replace("\\", "/")
|
|
|
|
|
|
|
|
title = "Markdown Page"
|
|
|
|
with open(os.path.join(TEMPLATES, md_path)) as md_f:
|
|
|
|
for line in md_f:
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith("#") and not line.startswith("##"):
|
|
|
|
title = line[1:].strip()
|
|
|
|
|
|
|
|
template = (
|
|
|
|
f"{{% extends \"sega.html\" %}}{{% block title %}}{title}{{% endblock %}}"
|
|
|
|
f"{{% block body %}}"
|
|
|
|
f"{{% markdown %}}{{% include \"{md_path}\" %}}{{% endmarkdown %}}"
|
|
|
|
f"{{% endblock %}}"
|
|
|
|
)
|
|
|
|
|
|
|
|
def handler():
|
|
|
|
return render_template_string(
|
|
|
|
template,
|
|
|
|
HOST=HOST,
|
|
|
|
ROOT=ROOT,
|
|
|
|
CANONICAL=ROOT + route,
|
|
|
|
**generate_footer_links(base, name, route),
|
|
|
|
|
|
|
|
generate_xrpc_list=generate_xrpc_list,
|
|
|
|
generate_toc=lambda start=1: generate_toc(base, name, route, start),
|
|
|
|
relative=lambda path: os.path.join(base, path).strip("/").replace("\\", "/"),
|
|
|
|
part=part,
|
|
|
|
ioctl=ioctl,
|
|
|
|
)
|
|
|
|
return handler
|
|
|
|
|
|
|
|
|
|
|
|
def install_pages():
|
|
|
|
for base, _, files in os.walk(TEMPLATES + "/" + PAGES_BASE):
|
|
|
|
if ".git" in base:
|
|
|
|
continue
|
|
|
|
if base.startswith(TEMPLATES):
|
|
|
|
base = base[len(TEMPLATES):]
|
|
|
|
|
2021-12-28 23:29:33 +01:00
|
|
|
for name in files:
|
2023-06-19 03:37:46 +02:00
|
|
|
handler_factory = None
|
|
|
|
|
|
|
|
if name.endswith(".html"):
|
|
|
|
handler_factory = handler_factory_html
|
|
|
|
elif name.endswith(".md") and not name.startswith("~"):
|
|
|
|
handler_factory = handler_factory_markdown
|
|
|
|
name = name[:-3] + ".html"
|
|
|
|
|
|
|
|
if handler_factory is not None:
|
|
|
|
local_base = base.replace("\\", "/").strip(".").strip("/")
|
|
|
|
if local_base.startswith(PAGES_BASE):
|
|
|
|
local_base = local_base[len(PAGES_BASE):]
|
|
|
|
|
|
|
|
if name.endswith(".md"):
|
|
|
|
route = local_base + "/" + name[:-3] + ".html"
|
|
|
|
else:
|
|
|
|
route = local_base + "/" + name
|
|
|
|
if route.endswith("/index.html"):
|
|
|
|
route = route[:-10]
|
|
|
|
if not route.startswith("/"):
|
|
|
|
route = "/" + route
|
|
|
|
|
|
|
|
app.add_url_rule(route, route, handler_factory(base, name, route))
|
2021-12-28 21:54:12 +01:00
|
|
|
|
|
|
|
|
2021-12-29 02:18:06 +01:00
|
|
|
@app.route("/sitemap.xml")
|
|
|
|
def sitemap():
|
2022-04-11 20:27:15 +02:00
|
|
|
host_base = HOST + ROOT
|
2021-12-29 02:18:06 +01:00
|
|
|
|
|
|
|
links = []
|
|
|
|
for rule in app.url_map.iter_rules():
|
2023-06-19 03:37:46 +02:00
|
|
|
if (not rule.methods or "GET" in rule.methods) and len(rule.arguments) == 0:
|
2021-12-29 02:18:06 +01:00
|
|
|
url = url_for(rule.endpoint, **(rule.defaults or {}))
|
|
|
|
if not url.endswith(("/", ".html", ".png")):
|
|
|
|
continue
|
|
|
|
path = rule.endpoint
|
|
|
|
if path.endswith("/"):
|
|
|
|
path += "index.html"
|
|
|
|
path = os.path.join(TEMPLATES, PAGES_BASE, path.lstrip("/"))
|
|
|
|
if os.path.exists(path):
|
|
|
|
mod_time = os.path.getmtime(path)
|
|
|
|
mod_time = datetime.datetime.fromtimestamp(mod_time).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
else:
|
|
|
|
mod_time = None
|
|
|
|
links.append((host_base + url, mod_time))
|
2021-12-29 02:23:43 +01:00
|
|
|
response = make_response(render_template("sitemap.xml", urls=links[::-1]))
|
2021-12-29 02:18:06 +01:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
2021-12-28 21:54:12 +01:00
|
|
|
|
2022-04-11 20:27:15 +02:00
|
|
|
|
2023-06-19 03:37:46 +02:00
|
|
|
def compile_sass():
|
|
|
|
with open("static/main.css", "w") as main_css:
|
|
|
|
main_css.write(sass.compile(filename="main.scss", output_style="compressed")) # type: ignore
|
|
|
|
|
|
|
|
|
|
|
|
install_static()
|
|
|
|
install_pages()
|
|
|
|
compile_sass()
|
|
|
|
|
|
|
|
|
2022-06-28 00:09:04 +02:00
|
|
|
def run_dev():
|
2021-12-28 21:54:12 +01:00
|
|
|
app.config['TEMPLATES_AUTO_RELOAD'] = True
|
|
|
|
app.config['DEBUG'] = True
|
|
|
|
|
2021-12-29 02:41:21 +01:00
|
|
|
# app.run(debug=True, port=3000, host="0.0.0.0")
|
2021-12-28 23:29:33 +01:00
|
|
|
|
2021-12-29 02:41:21 +01:00
|
|
|
server = Server(app.wsgi_app)
|
|
|
|
server.watch(".")
|
2023-06-19 03:37:46 +02:00
|
|
|
server.watch("main.scss", func=compile_sass)
|
|
|
|
server.watch("styles", func=compile_sass)
|
2022-04-11 20:27:15 +02:00
|
|
|
server.serve(port=3000)
|
2022-06-28 00:09:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
run_dev()
|