mirror of
https://gitlab.archlinux.org/archlinux/aurweb.git
synced 2025-02-03 10:43:03 +01:00
Merge branch 'mr440'
This commit is contained in:
commit
b6321bbdc5
6 changed files with 250 additions and 155 deletions
|
@ -213,6 +213,19 @@ async def index(request: Request):
|
|||
return render_template(request, "index.html", context)
|
||||
|
||||
|
||||
@router.get("/{archive}.sha256")
|
||||
async def archive_sha256(request: Request, archive: str):
|
||||
archivedir = aurweb.config.get("mkpkglists", "archivedir")
|
||||
hashfile = os.path.join(archivedir, f"{archive}.sha256")
|
||||
if not os.path.exists(hashfile):
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
with open(hashfile) as f:
|
||||
hash_value = f.read()
|
||||
headers = {"Content-Type": "text/plain"}
|
||||
return Response(hash_value, headers=headers)
|
||||
|
||||
|
||||
@router.get("/metrics")
|
||||
async def metrics(request: Request):
|
||||
registry = CollectorRegistry()
|
||||
|
|
|
@ -19,8 +19,12 @@ on the following, right-hand side fields are added to each item.
|
|||
"""
|
||||
|
||||
import gzip
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict
|
||||
|
@ -37,15 +41,6 @@ from aurweb.models import Package, PackageBase, User
|
|||
|
||||
logger = logging.get_logger("aurweb.scripts.mkpkglists")
|
||||
|
||||
archivedir = aurweb.config.get("mkpkglists", "archivedir")
|
||||
os.makedirs(archivedir, exist_ok=True)
|
||||
|
||||
PACKAGES = aurweb.config.get('mkpkglists', 'packagesfile')
|
||||
META = aurweb.config.get('mkpkglists', 'packagesmetafile')
|
||||
META_EXT = aurweb.config.get('mkpkglists', 'packagesmetaextfile')
|
||||
PKGBASE = aurweb.config.get('mkpkglists', 'pkgbasefile')
|
||||
USERS = aurweb.config.get('mkpkglists', 'userfile')
|
||||
|
||||
|
||||
TYPE_MAP = {
|
||||
"depends": "Depends",
|
||||
|
@ -174,7 +169,24 @@ def as_dict(package: Package) -> Dict[str, Any]:
|
|||
}
|
||||
|
||||
|
||||
def sha256sum(file_path: str) -> str:
|
||||
hash = hashlib.sha256()
|
||||
with open(file_path, "rb") as f:
|
||||
while chunk := f.read(io.DEFAULT_BUFFER_SIZE):
|
||||
hash.update(chunk)
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
def _main():
|
||||
archivedir = aurweb.config.get("mkpkglists", "archivedir")
|
||||
os.makedirs(archivedir, exist_ok=True)
|
||||
|
||||
PACKAGES = aurweb.config.get('mkpkglists', 'packagesfile')
|
||||
META = aurweb.config.get('mkpkglists', 'packagesmetafile')
|
||||
META_EXT = aurweb.config.get('mkpkglists', 'packagesmetaextfile')
|
||||
PKGBASE = aurweb.config.get('mkpkglists', 'pkgbasefile')
|
||||
USERS = aurweb.config.get('mkpkglists', 'userfile')
|
||||
|
||||
bench = Benchmark()
|
||||
logger.info("Started re-creating archives, wait a while...")
|
||||
|
||||
|
@ -204,9 +216,14 @@ def _main():
|
|||
# Produce packages-meta-v1.json.gz
|
||||
output = list()
|
||||
snapshot_uri = aurweb.config.get("options", "snapshot_uri")
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
tmp_packages = os.path.join(tmpdir, os.path.basename(PACKAGES))
|
||||
tmp_meta = os.path.join(tmpdir, os.path.basename(META))
|
||||
tmp_metaext = os.path.join(tmpdir, os.path.basename(META_EXT))
|
||||
gzips = {
|
||||
"packages": gzip.open(PACKAGES, "wt"),
|
||||
"meta": gzip.open(META, "wb"),
|
||||
"packages": gzip.open(tmp_packages, "wt"),
|
||||
"meta": gzip.open(tmp_meta, "wb"),
|
||||
}
|
||||
|
||||
# Append list opening to the metafile.
|
||||
|
@ -215,7 +232,7 @@ def _main():
|
|||
# Produce packages.gz + packages-meta-ext-v1.json.gz
|
||||
extended = False
|
||||
if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
|
||||
gzips["meta_ext"] = gzip.open(META_EXT, "wb")
|
||||
gzips["meta_ext"] = gzip.open(tmp_metaext, "wb")
|
||||
# Append list opening to the meta_ext file.
|
||||
gzips.get("meta_ext").write(b"[\n")
|
||||
f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1])
|
||||
|
@ -258,14 +275,38 @@ def _main():
|
|||
# Produce pkgbase.gz
|
||||
query = db.query(PackageBase.Name).filter(
|
||||
PackageBase.PackagerUID.isnot(None)).all()
|
||||
with gzip.open(PKGBASE, "wt") as f:
|
||||
tmp_pkgbase = os.path.join(tmpdir, os.path.basename(PKGBASE))
|
||||
with gzip.open(tmp_pkgbase, "wt") as f:
|
||||
f.writelines([f"{base.Name}\n" for i, base in enumerate(query)])
|
||||
|
||||
# Produce users.gz
|
||||
query = db.query(User.Username).all()
|
||||
with gzip.open(USERS, "wt") as f:
|
||||
tmp_users = os.path.join(tmpdir, os.path.basename(USERS))
|
||||
with gzip.open(tmp_users, "wt") as f:
|
||||
f.writelines([f"{user.Username}\n" for i, user in enumerate(query)])
|
||||
|
||||
files = [
|
||||
(tmp_packages, PACKAGES),
|
||||
(tmp_meta, META),
|
||||
(tmp_pkgbase, PKGBASE),
|
||||
(tmp_users, USERS),
|
||||
]
|
||||
if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
|
||||
files.append((tmp_metaext, META_EXT))
|
||||
|
||||
for src, dst in files:
|
||||
checksum = sha256sum(src)
|
||||
base = os.path.basename(src)
|
||||
checksum_formatted = f"SHA256 ({base}) = {checksum}"
|
||||
|
||||
checksum_file = f"{dst}.sha256"
|
||||
with open(checksum_file, "w") as f:
|
||||
f.write(checksum_formatted)
|
||||
|
||||
# Move the new archive into its rightful place.
|
||||
shutil.move(src, dst)
|
||||
|
||||
os.removedirs(tmpdir)
|
||||
seconds = filters.number_format(bench.end(), 4)
|
||||
logger.info(f"Completed in {seconds} seconds.")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue