Compare commits

..

No commits in common. "master" and "v6.2.13" have entirely different histories.

24 changed files with 411 additions and 972 deletions

View file

@ -19,9 +19,9 @@ variables:
lint: lint:
stage: .pre stage: .pre
before_script: before_script:
- pacman -Sy --noconfirm --noprogressbar - pacman -Sy --noconfirm --noprogressbar --cachedir .pkg-cache
archlinux-keyring archlinux-keyring
- pacman -Syu --noconfirm --noprogressbar - pacman -Syu --noconfirm --noprogressbar --cachedir .pkg-cache
git python python-pre-commit git python python-pre-commit
script: script:
- export XDG_CACHE_HOME=.pre-commit - export XDG_CACHE_HOME=.pre-commit
@ -60,7 +60,7 @@ test:
path: coverage.xml path: coverage.xml
.init_tf: &init_tf .init_tf: &init_tf
- pacman -Syu --needed --noconfirm terraform - pacman -Syu --needed --noconfirm --cachedir .pkg-cache terraform
- export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}" - export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}"
- TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}" - TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"
- cd ci/tf - cd ci/tf
@ -97,7 +97,7 @@ provision_review:
- deploy_review - deploy_review
script: script:
- *init_tf - *init_tf
- pacman -Syu --noconfirm --needed ansible git openssh jq - pacman -Syu --noconfirm --needed --cachedir .pkg-cache ansible git openssh jq
# Get ssh key from terraform state file # Get ssh key from terraform state file
- mkdir -p ~/.ssh - mkdir -p ~/.ssh
- chmod 700 ~/.ssh - chmod 700 ~/.ssh

View file

@ -14,12 +14,6 @@ from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from jinja2 import TemplateNotFound from jinja2 import TemplateNotFound
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from sqlalchemy import and_ from sqlalchemy import and_
from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.middleware.authentication import AuthenticationMiddleware from starlette.middleware.authentication import AuthenticationMiddleware
@ -28,6 +22,7 @@ from starlette.middleware.sessions import SessionMiddleware
import aurweb.captcha # noqa: F401 import aurweb.captcha # noqa: F401
import aurweb.config import aurweb.config
import aurweb.filters # noqa: F401 import aurweb.filters # noqa: F401
import aurweb.pkgbase.util as pkgbaseutil
from aurweb import aur_logging, prometheus, util from aurweb import aur_logging, prometheus, util
from aurweb.aur_redis import redis_connection from aurweb.aur_redis import redis_connection
from aurweb.auth import BasicAuthBackend from aurweb.auth import BasicAuthBackend
@ -59,17 +54,6 @@ instrumentator().add(prometheus.http_requests_total())
instrumentator().instrument(app) instrumentator().instrument(app)
# Instrument FastAPI for tracing
FastAPIInstrumentor.instrument_app(app)
resource = Resource(attributes={"service.name": "aurweb"})
otlp_endpoint = aurweb.config.get("tracing", "otlp_endpoint")
otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
span_processor = BatchSpanProcessor(otlp_exporter)
trace.set_tracer_provider(TracerProvider(resource=resource))
trace.get_tracer_provider().add_span_processor(span_processor)
async def app_startup(): async def app_startup():
# https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi # https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
# Test failures have been observed by internal starlette code when # Test failures have been observed by internal starlette code when
@ -231,13 +215,7 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> Respon
if matches and len(tokens) == 2: if matches and len(tokens) == 2:
try: try:
pkgbase = get_pkg_or_base(matches.group(1)) pkgbase = get_pkg_or_base(matches.group(1))
context["pkgbase"] = pkgbase context = pkgbaseutil.make_context(request, pkgbase)
context["git_clone_uri_anon"] = aurweb.config.get(
"options", "git_clone_uri_anon"
)
context["git_clone_uri_priv"] = aurweb.config.get(
"options", "git_clone_uri_priv"
)
except HTTPException: except HTTPException:
pass pass

View file

@ -1,5 +1,4 @@
import fakeredis import fakeredis
from opentelemetry.instrumentation.redis import RedisInstrumentor
from redis import ConnectionPool, Redis from redis import ConnectionPool, Redis
import aurweb.config import aurweb.config
@ -8,8 +7,6 @@ from aurweb import aur_logging
logger = aur_logging.get_logger(__name__) logger = aur_logging.get_logger(__name__)
pool = None pool = None
RedisInstrumentor().instrument()
class FakeConnectionPool: class FakeConnectionPool:
"""A fake ConnectionPool class which holds an internal reference """A fake ConnectionPool class which holds an internal reference

View file

@ -3,7 +3,6 @@
import hashlib import hashlib
from jinja2 import pass_context from jinja2 import pass_context
from sqlalchemy import func
from aurweb.db import query from aurweb.db import query
from aurweb.models import User from aurweb.models import User
@ -12,8 +11,7 @@ from aurweb.templates import register_filter
def get_captcha_salts(): def get_captcha_salts():
"""Produce salts based on the current user count.""" """Produce salts based on the current user count."""
count = query(func.count(User.ID)).scalar() count = query(User).count()
salts = [] salts = []
for i in range(0, 6): for i in range(0, 6):
salts.append(f"aurweb-{count - i}") salts.append(f"aurweb-{count - i}")

View file

@ -298,12 +298,9 @@ def get_engine(dbname: str = None, echo: bool = False):
connect_args["check_same_thread"] = False connect_args["check_same_thread"] = False
kwargs = {"echo": echo, "connect_args": connect_args} kwargs = {"echo": echo, "connect_args": connect_args}
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import create_engine from sqlalchemy import create_engine
engine = create_engine(get_sqlalchemy_url(), **kwargs) _engines[dbname] = create_engine(get_sqlalchemy_url(), **kwargs)
SQLAlchemyInstrumentor().instrument(engine=engine)
_engines[dbname] = engine
if is_sqlite: # pragma: no cover if is_sqlite: # pragma: no cover
setup_sqlite(_engines.get(dbname)) setup_sqlite(_engines.get(dbname))

View file

@ -2,7 +2,6 @@ from typing import Any
from fastapi import Request from fastapi import Request
from sqlalchemy import and_ from sqlalchemy import and_
from sqlalchemy.orm import joinedload
from aurweb import config, db, defaults, l10n, time, util from aurweb import config, db, defaults, l10n, time, util
from aurweb.models import PackageBase, User from aurweb.models import PackageBase, User
@ -41,9 +40,9 @@ def make_context(
context["pkgbase"] = pkgbase context["pkgbase"] = pkgbase
context["comaintainers"] = [ context["comaintainers"] = [
c.User c.User
for c in pkgbase.comaintainers.options(joinedload(PackageComaintainer.User)) for c in pkgbase.comaintainers.order_by(
.order_by(PackageComaintainer.Priority.asc()) PackageComaintainer.Priority.asc()
.all() ).all()
] ]
if is_authenticated: if is_authenticated:
context["unflaggers"] = context["comaintainers"].copy() context["unflaggers"] = context["comaintainers"].copy()

View file

@ -190,17 +190,6 @@ async def package(
if not all_deps: if not all_deps:
deps = deps.limit(max_listing) deps = deps.limit(max_listing)
context["dependencies"] = deps.all() context["dependencies"] = deps.all()
# Existing dependencies to avoid multiple lookups
context["dependencies_names_from_aur"] = [
item.Name
for item in db.query(models.Package)
.filter(
models.Package.Name.in_(
pkg.package_dependencies.with_entities(models.PackageDependency.DepName)
)
)
.all()
]
# Package requirements (other packages depend on this one). # Package requirements (other packages depend on this one).
reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])]) reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])])

View file

@ -177,6 +177,3 @@ expiry_time_search = 600
expiry_time_statistics = 300 expiry_time_statistics = 300
; number of seconds after a cache entry for rss queries expires, default is 5 minutes ; number of seconds after a cache entry for rss queries expires, default is 5 minutes
expiry_time_rss = 300 expiry_time_rss = 300
[tracing]
otlp_endpoint = http://localhost:4318/v1/traces

View file

@ -73,6 +73,3 @@ pkgnames-repo = pkgnames.git
[aurblup] [aurblup]
db-path = YOUR_AUR_ROOT/aurblup/ db-path = YOUR_AUR_ROOT/aurblup/
[tracing]
otlp_endpoint = http://tempo:4318/v1/traces

View file

@ -1,4 +1,5 @@
--- version: "3.8"
services: services:
ca: ca:
volumes: volumes:

View file

@ -1,10 +1,16 @@
--- version: "3.8"
services: services:
ca: ca:
volumes: volumes:
- ./data:/data - ./data:/data
- step:/root/.step - step:/root/.step
mariadb_init:
depends_on:
mariadb:
condition: service_healthy
git: git:
volumes: volumes:
- git_data:/aurweb/aur.git - git_data:/aurweb/aur.git
@ -15,6 +21,9 @@ services:
- git_data:/aurweb/aur.git - git_data:/aurweb/aur.git
- ./data:/data - ./data:/data
- smartgit_run:/var/run/smartgit - smartgit_run:/var/run/smartgit
depends_on:
mariadb:
condition: service_healthy
fastapi: fastapi:
volumes: volumes:

View file

@ -1,4 +1,3 @@
---
# #
# Docker service definitions for the aurweb project. # Docker service definitions for the aurweb project.
# #
@ -17,6 +16,8 @@
# #
# Copyright (C) 2021 aurweb Development # Copyright (C) 2021 aurweb Development
# All Rights Reserved. # All Rights Reserved.
version: "3.8"
services: services:
aurweb-image: aurweb-image:
build: . build: .
@ -48,7 +49,7 @@ services:
image: aurweb:latest image: aurweb:latest
init: true init: true
entrypoint: /docker/mariadb-entrypoint.sh entrypoint: /docker/mariadb-entrypoint.sh
command: /usr/bin/mariadbd-safe --datadir=/var/lib/mysql command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql
ports: ports:
# This will expose mariadbd on 127.0.0.1:13306 in the host. # This will expose mariadbd on 127.0.0.1:13306 in the host.
# Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb` # Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb`
@ -80,7 +81,7 @@ services:
environment: environment:
- MARIADB_PRIVILEGED=1 - MARIADB_PRIVILEGED=1
entrypoint: /docker/mariadb-entrypoint.sh entrypoint: /docker/mariadb-entrypoint.sh
command: /usr/bin/mariadbd-safe --datadir=/var/lib/mysql command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql
ports: ports:
# This will expose mariadbd on 127.0.0.1:13307 in the host. # This will expose mariadbd on 127.0.0.1:13307 in the host.
# Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb` # Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb`
@ -106,10 +107,8 @@ services:
test: "bash /docker/health/sshd.sh" test: "bash /docker/health/sshd.sh"
interval: 3s interval: 3s
depends_on: depends_on:
mariadb:
condition: service_healthy
mariadb_init: mariadb_init:
condition: service_completed_successfully condition: service_started
volumes: volumes:
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
@ -123,9 +122,6 @@ services:
healthcheck: healthcheck:
test: "bash /docker/health/smartgit.sh" test: "bash /docker/health/smartgit.sh"
interval: 3s interval: 3s
depends_on:
mariadb:
condition: service_healthy
cgit-fastapi: cgit-fastapi:
image: aurweb:latest image: aurweb:latest
@ -156,10 +152,8 @@ services:
entrypoint: /docker/cron-entrypoint.sh entrypoint: /docker/cron-entrypoint.sh
command: /docker/scripts/run-cron.sh command: /docker/scripts/run-cron.sh
depends_on: depends_on:
mariadb:
condition: service_healthy
mariadb_init: mariadb_init:
condition: service_completed_successfully condition: service_started
volumes: volumes:
- ./aurweb:/aurweb/aurweb - ./aurweb:/aurweb/aurweb
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
@ -188,12 +182,6 @@ services:
condition: service_healthy condition: service_healthy
cron: cron:
condition: service_started condition: service_started
mariadb:
condition: service_healthy
mariadb_init:
condition: service_completed_successfully
tempo:
condition: service_healthy
volumes: volumes:
- archives:/var/lib/aurweb/archives - archives:/var/lib/aurweb/archives
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
@ -293,56 +281,6 @@ services:
- ./test:/aurweb/test - ./test:/aurweb/test
- ./templates:/aurweb/templates - ./templates:/aurweb/templates
grafana:
# TODO: check if we need init: true
image: grafana/grafana:11.1.3
environment:
- GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
- GF_AUTH_DISABLE_LOGIN_FORM=true
- GF_LOG_LEVEL=warn
# check if depends ar ecorrect, does stopping or restarting a child exit grafana?
depends_on:
prometheus:
condition: service_healthy
tempo:
condition: service_healthy
ports:
- "127.0.0.1:3000:3000"
volumes:
- ./docker/config/grafana/datasources:/etc/grafana/provisioning/datasources
prometheus:
image: prom/prometheus:latest
command:
- --config.file=/etc/prometheus/prometheus.yml
- --web.enable-remote-write-receiver
- --web.listen-address=prometheus:9090
healthcheck:
# TODO: check if there is a status route
test: "sh /docker/health/prometheus.sh"
interval: 3s
ports:
- "127.0.0.1:9090:9090"
volumes:
- ./docker/config/prometheus.yml:/etc/prometheus/prometheus.yml
- ./docker/health/prometheus.sh:/docker/health/prometheus.sh
tempo:
image: grafana/tempo:2.5.0
command:
- -config.file=/etc/tempo/config.yml
healthcheck:
# TODO: check if there is a status route
test: "sh /docker/health/tempo.sh"
interval: 3s
ports:
- "127.0.0.1:3200:3200"
- "127.0.0.1:4318:4318"
volumes:
- ./docker/config/tempo.yml:/etc/tempo/config.yml
- ./docker/health/tempo.sh:/docker/health/tempo.sh
volumes: volumes:
mariadb_test_run: {} mariadb_test_run: {}
mariadb_run: {} # Share /var/run/mysqld/mysqld.sock mariadb_run: {} # Share /var/run/mysqld/mysqld.sock

View file

@ -47,7 +47,7 @@ Luckily such data can be generated.
docker compose exec fastapi /bin/bash docker compose exec fastapi /bin/bash
pacman -S words fortune-mod pacman -S words fortune-mod
./schema/gendummydata.py dummy.sql ./schema/gendummydata.py dummy.sql
mariadb aurweb < dummy.sql mysql aurweb < dummy.sql
``` ```
The generation script may prompt you to install other Arch packages before it The generation script may prompt you to install other Arch packages before it

View file

@ -1,42 +0,0 @@
---
apiVersion: 1
deleteDatasources:
- name: Prometheus
- name: Tempo
datasources:
- name: Prometheus
type: prometheus
uid: prometheus
access: proxy
url: http://prometheus:9090
orgId: 1
editable: false
jsonData:
timeInterval: 1m
- name: Tempo
type: tempo
uid: tempo
access: proxy
url: http://tempo:3200
orgId: 1
editable: false
jsonData:
tracesToMetrics:
datasourceUid: 'prometheus'
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
serviceMap:
datasourceUid: 'prometheus'
nodeGraph:
enabled: true
search:
hide: false
traceQuery:
timeShiftEnabled: true
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
spanBar:
type: 'Tag'
tag: 'http.path'

View file

@ -1,15 +0,0 @@
---
global:
scrape_interval: 60s
scrape_configs:
- job_name: tempo
static_configs:
- targets: ['tempo:3200']
labels:
instance: tempo
- job_name: aurweb
static_configs:
- targets: ['fastapi:8000']
labels:
instance: aurweb

View file

@ -1,54 +0,0 @@
---
stream_over_http_enabled: true
server:
http_listen_address: tempo
http_listen_port: 3200
log_level: info
query_frontend:
search:
duration_slo: 5s
throughput_bytes_slo: 1.073741824e+09
trace_by_id:
duration_slo: 5s
distributor:
receivers:
otlp:
protocols:
http:
endpoint: tempo:4318
log_received_spans:
enabled: false
metric_received_spans:
enabled: false
ingester:
max_block_duration: 5m
compactor:
compaction:
block_retention: 1h
metrics_generator:
registry:
external_labels:
source: tempo
storage:
path: /tmp/tempo/generator/wal
remote_write:
- url: http://prometheus:9090/api/v1/write
send_exemplars: true
traces_storage:
path: /tmp/tempo/generator/traces
storage:
trace:
backend: local
wal:
path: /tmp/tempo/wal
local:
path: /tmp/tempo/blocks
overrides:
metrics_generator_processors: [service-graphs, span-metrics, local-blocks]

View file

@ -1,2 +1,2 @@
#!/bin/bash #!/bin/bash
exec mariadb-admin ping --silent exec mysqladmin ping --silent

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec wget -q http://prometheus:9090/status -O /dev/null

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec wget -q http://tempo:3200/status -O /dev/null

View file

@ -6,8 +6,8 @@ MYSQL_DATA=/var/lib/mysql
mariadb-install-db --user=mysql --basedir=/usr --datadir=$MYSQL_DATA mariadb-install-db --user=mysql --basedir=/usr --datadir=$MYSQL_DATA
# Start it up. # Start it up.
mariadbd-safe --datadir=$MYSQL_DATA --skip-networking & mysqld_safe --datadir=$MYSQL_DATA --skip-networking &
while ! mariadb-admin ping 2>/dev/null; do while ! mysqladmin ping 2>/dev/null; do
sleep 1s sleep 1s
done done
@ -15,17 +15,17 @@ done
DATABASE="aurweb" # Persistent database for fastapi. DATABASE="aurweb" # Persistent database for fastapi.
echo "Taking care of primary database '${DATABASE}'..." echo "Taking care of primary database '${DATABASE}'..."
mariadb -u root -e "CREATE USER IF NOT EXISTS 'aur'@'localhost' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'localhost' IDENTIFIED BY 'aur';"
mariadb -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';"
mariadb -u root -e "CREATE DATABASE IF NOT EXISTS $DATABASE;" mysql -u root -e "CREATE DATABASE IF NOT EXISTS $DATABASE;"
mariadb -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';"
mariadb -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'localhost';" mysql -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'localhost';"
mariadb -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'%';" mysql -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'%';"
mariadb -u root -e "CREATE USER IF NOT EXISTS 'root'@'%' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'root'@'%' IDENTIFIED BY 'aur';"
mariadb -u root -e "GRANT ALL ON *.* TO 'root'@'%' WITH GRANT OPTION;" mysql -u root -e "GRANT ALL ON *.* TO 'root'@'%' WITH GRANT OPTION;"
mariadb-admin -uroot shutdown mysqladmin -uroot shutdown
exec "$@" exec "$@"

View file

@ -13,7 +13,7 @@ pacman -Sy --noconfirm --noprogressbar archlinux-keyring
# Install other OS dependencies. # Install other OS dependencies.
pacman -Syu --noconfirm --noprogressbar \ pacman -Syu --noconfirm --noprogressbar \
git gpgme nginx redis openssh \ --cachedir .pkg-cache git gpgme nginx redis openssh \
mariadb mariadb-libs cgit-aurweb uwsgi uwsgi-plugin-cgi \ mariadb mariadb-libs cgit-aurweb uwsgi uwsgi-plugin-cgi \
python-pip pyalpm python-srcinfo curl libeatmydata cronie \ python-pip pyalpm python-srcinfo curl libeatmydata cronie \
python-poetry python-poetry-core step-cli step-ca asciidoc \ python-poetry python-poetry-core step-cli step-ca asciidoc \

1058
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,7 @@ combine_as_imports = true
# #
[tool.poetry] [tool.poetry]
name = "aurweb" name = "aurweb"
version = "v6.2.16" version = "v6.2.13"
license = "GPL-2.0-only" license = "GPL-2.0-only"
description = "Source code for the Arch User Repository's website" description = "Source code for the Arch User Repository's website"
homepage = "https://aur.archlinux.org" homepage = "https://aur.archlinux.org"
@ -52,7 +52,7 @@ build-backend = "poetry.masonry.api"
"Request Mailing List" = "https://lists.archlinux.org/listinfo/aur-requests" "Request Mailing List" = "https://lists.archlinux.org/listinfo/aur-requests"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.10,<3.14" python = ">=3.9,<3.13"
# poetry-dynamic-versioning is used to produce tool.poetry.version # poetry-dynamic-versioning is used to produce tool.poetry.version
# based on git tags. # based on git tags.
@ -69,8 +69,9 @@ httpx = "^0.27.0"
itsdangerous = "^2.1.2" itsdangerous = "^2.1.2"
lxml = "^5.2.1" lxml = "^5.2.1"
orjson = "^3.10.0" orjson = "^3.10.0"
pygit2 = "^1.17.0" protobuf = "^5.26.1"
python-multipart = "0.0.19" pygit2 = "^1.14.1"
python-multipart = "^0.0.9"
redis = "^5.0.3" redis = "^5.0.3"
requests = "^2.31.0" requests = "^2.31.0"
paginate = "^0.5.6" paginate = "^0.5.6"
@ -83,12 +84,12 @@ Jinja2 = "^3.1.3"
Markdown = "^3.6" Markdown = "^3.6"
Werkzeug = "^3.0.2" Werkzeug = "^3.0.2"
SQLAlchemy = "^1.4.52" SQLAlchemy = "^1.4.52"
greenlet = "3.1.1" # Explicitly add greenlet (dependency of SQLAlchemy) for python 3.13 support
# ASGI # ASGI
uvicorn = "^0.30.0" uvicorn = "^0.30.0"
gunicorn = "^22.0.0" gunicorn = "^22.0.0"
Hypercorn = "^0.17.0" Hypercorn = "^0.17.0"
prometheus-fastapi-instrumentator = "^7.0.0"
pytest-xdist = "^3.5.0" pytest-xdist = "^3.5.0"
filelock = "^3.13.3" filelock = "^3.13.3"
posix-ipc = "^1.1.1" posix-ipc = "^1.1.1"
@ -97,22 +98,13 @@ fastapi = "^0.112.0"
srcinfo = "^0.1.2" srcinfo = "^0.1.2"
tomlkit = "^0.13.0" tomlkit = "^0.13.0"
# Tracing
prometheus-fastapi-instrumentator = "^7.0.0"
opentelemetry-api = "^1.26.0"
opentelemetry-sdk = "^1.26.0"
opentelemetry-exporter-otlp-proto-http = "^1.26.0"
opentelemetry-instrumentation-fastapi = "^0.47b0"
opentelemetry-instrumentation-redis = "^0.47b0"
opentelemetry-instrumentation-sqlalchemy = "^0.47b0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
coverage = "^7.4.4" coverage = "^7.4.4"
pytest = "^8.1.1" pytest = "^8.1.1"
pytest-asyncio = "^0.23.0" pytest-asyncio = "^0.23.0"
pytest-cov = "^5.0.0" pytest-cov = "^5.0.0"
pytest-tap = "^3.4" pytest-tap = "^3.4"
watchfiles = "^1.0.4" watchfiles = "^0.22.0"
[tool.poetry.scripts] [tool.poetry.scripts]
aurweb-git-auth = "aurweb.git.auth:main" aurweb-git-auth = "aurweb.git.auth:main"

View file

@ -20,7 +20,7 @@
{% endif %} {% endif %}
{% else -%} {% else -%}
</a> </a>
{%- if dep.DepName in dependencies_names_from_aur -%} {%- if dep.is_aur_package() -%}
<sup><small>AUR</small></sup> <sup><small>AUR</small></sup>
{% endif %} {% endif %}
{% endif %} {% endif %}