From c83c5cdc4227b025ba74b3c5db4d06ea06b33668 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 19 Feb 2022 11:28:16 -0800 Subject: [PATCH 001/415] change: log out details about PROMETHEUS_MULTIPROC_DIR Additionally, respond with a 503 if the var is not set when /metrics is requested. Signed-off-by: Kevin Morris --- aurweb/asgi.py | 4 ++++ aurweb/routers/html.py | 10 +++++++--- test/test_asgi.py | 11 +++++++++++ test/test_html.py | 15 +++++++++++++-- 4 files changed, 35 insertions(+), 5 deletions(-) diff --git a/aurweb/asgi.py b/aurweb/asgi.py index ad0b7ca0..fa2526ed 100644 --- a/aurweb/asgi.py +++ b/aurweb/asgi.py @@ -74,6 +74,10 @@ async def app_startup(): if not session_secret: raise Exception("[fastapi] session_secret must not be empty") + if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None): + logger.warning("$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics " + "endpoint is disabled.") + app.mount("/static/css", StaticFiles(directory="web/html/css"), name="static_css") diff --git a/aurweb/routers/html.py b/aurweb/routers/html.py index b9d291d2..d31a32c7 100644 --- a/aurweb/routers/html.py +++ b/aurweb/routers/html.py @@ -13,7 +13,7 @@ from sqlalchemy import and_, case, or_ import aurweb.config import aurweb.models.package_request -from aurweb import cookies, db, models, time, util +from aurweb import cookies, db, logging, models, time, util from aurweb.cache import db_count_cache from aurweb.exceptions import handle_form_exceptions from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID @@ -21,6 +21,7 @@ from aurweb.models.package_request import PENDING_ID from aurweb.packages.util import query_notified, query_voted, updated_packages from aurweb.templates import make_context, render_template +logger = logging.get_logger(__name__) router = APIRouter() @@ -230,9 +231,12 @@ async def archive_sha256(request: Request, archive: str): @router.get("/metrics") async def metrics(request: Request): + if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None): + return Response("Prometheus metrics are not enabled.", + status_code=HTTPStatus.SERVICE_UNAVAILABLE) + registry = CollectorRegistry() - if os.environ.get("PROMETHEUS_MULTIPROC_DIR", None): # pragma: no cover - multiprocess.MultiProcessCollector(registry) + multiprocess.MultiProcessCollector(registry) data = generate_latest(registry) headers = { "Content-Type": CONTENT_TYPE_LATEST, diff --git a/test/test_asgi.py b/test/test_asgi.py index 667ae871..c693a3a9 100644 --- a/test/test_asgi.py +++ b/test/test_asgi.py @@ -104,6 +104,17 @@ async def test_asgi_app_unsupported_backends(): await aurweb.asgi.app_startup() +@pytest.mark.asyncio +async def test_asgi_app_disabled_metrics(caplog: pytest.LogCaptureFixture): + env = {"PROMETHEUS_MULTIPROC_DIR": str()} + with mock.patch.dict(os.environ, env): + await aurweb.asgi.app_startup() + + expected = ("$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics " + "endpoint is disabled.") + assert expected in caplog.text + + @pytest.fixture def use_traceback(): config_getboolean = aurweb.config.getboolean diff --git a/test/test_html.py b/test/test_html.py index b97d3571..ffe2a9f2 100644 --- a/test/test_html.py +++ b/test/test_html.py @@ -160,12 +160,23 @@ def test_archive_sig_404(client: TestClient): def test_metrics(client: TestClient): - with client as request: - resp = request.get("/metrics") + with tempfile.TemporaryDirectory() as tmpdir: + env = {"PROMETHEUS_MULTIPROC_DIR": tmpdir} + with mock.patch.dict(os.environ, env): + with client as request: + resp = request.get("/metrics") assert resp.status_code == int(HTTPStatus.OK) assert resp.headers.get("Content-Type").startswith("text/plain") +def test_disabled_metrics(client: TestClient): + env = {"PROMETHEUS_MULTIPROC_DIR": str()} + with mock.patch.dict(os.environ, env): + with client as request: + resp = request.get("/metrics") + assert resp.status_code == int(HTTPStatus.SERVICE_UNAVAILABLE) + + def test_rtl(client: TestClient): responses = {} expected = [ From 4a4fd015635c9a392f224eba51ed588c758fa441 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 19 Feb 2022 16:01:06 -0800 Subject: [PATCH 002/415] fix: blanking out particular fields when editing accounts Signed-off-by: Kevin Morris --- aurweb/users/update.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/aurweb/users/update.py b/aurweb/users/update.py index 8e42765e..5a32fd01 100644 --- a/aurweb/users/update.py +++ b/aurweb/users/update.py @@ -19,11 +19,11 @@ def simple(U: str = str(), E: str = str(), H: bool = False, user.Username = U or user.Username user.Email = E or user.Email user.HideEmail = strtobool(H) - user.BackupEmail = BE or user.BackupEmail - user.RealName = R or user.RealName - user.Homepage = HP or user.Homepage - user.IRCNick = I or user.IRCNick - user.PGPKey = K or user.PGPKey + user.BackupEmail = user.BackupEmail if BE is None else BE + user.RealName = user.RealName if R is None else R + user.Homepage = user.Homepage if HP is None else HP + user.IRCNick = user.IRCNick if I is None else I + user.PGPKey = user.PGPKey if K is None else K user.Suspended = strtobool(S) user.InactivityTS = now * int(strtobool(J)) user.CommentNotify = strtobool(CN) From 80622cc96611d36ee8b63eb3c95d952e290668b6 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 19 Feb 2022 16:12:15 -0800 Subject: [PATCH 003/415] fix: suspend check should check Suspended... This was causing some false negative errors in the update process, and it clearly not correct -- oops :( Signed-off-by: Kevin Morris --- aurweb/users/validate.py | 5 +++-- test/test_accounts_routes.py | 6 +++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/aurweb/users/validate.py b/aurweb/users/validate.py index 26f6eec6..de51e3ff 100644 --- a/aurweb/users/validate.py +++ b/aurweb/users/validate.py @@ -15,6 +15,7 @@ from aurweb.captcha import get_captcha_answer, get_captcha_salts, get_captcha_to from aurweb.exceptions import ValidationError from aurweb.models.account_type import ACCOUNT_TYPE_NAME from aurweb.models.ssh_pub_key import get_fingerprint +from aurweb.util import strtobool logger = logging.get_logger(__name__) @@ -26,9 +27,9 @@ def invalid_fields(E: str = str(), U: str = str(), **kwargs) -> None: def invalid_suspend_permission(request: Request = None, user: models.User = None, - J: bool = False, + S: str = "False", **kwargs) -> None: - if not request.user.is_elevated() and J != bool(user.InactivityTS): + if not request.user.is_elevated() and strtobool(S) != bool(user.Suspended): raise ValidationError([ "You do not have permission to suspend accounts."]) diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index e532e341..37b3d130 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -916,13 +916,13 @@ def test_post_account_edit_error_invalid_password(client: TestClient, assert "Invalid password." in content -def test_post_account_edit_inactivity_unauthorized(client: TestClient, - user: User): +def test_post_account_edit_suspend_unauthorized(client: TestClient, + user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} post_data = { "U": "test", "E": "test@example.org", - "J": True, + "S": True, "passwd": "testPassword" } with client as request: From 1e31db47ab20d86a0b0c943299113c3daaf6089c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 19 Feb 2022 16:32:49 -0800 Subject: [PATCH 004/415] upgrade: bump to v6.0.18 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 61b60402..b7aa3027 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.17" +AURWEB_VERSION = "v6.0.18" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 89b149a8..0b21a643 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.17" +version = "v6.0.18" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 6e837e0c023404e0a1c43dbdfde0826acb1f3381 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Mon, 21 Feb 2022 10:25:01 +0000 Subject: [PATCH 005/415] fix: always provide a path https://github.com/stephenhillier/starlette_exporter/commit/891efcd142da5a13f72ec9647ad0b8aca21075a8 --- aurweb/prometheus.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/aurweb/prometheus.py b/aurweb/prometheus.py index dae56320..73be3ef6 100644 --- a/aurweb/prometheus.py +++ b/aurweb/prometheus.py @@ -70,9 +70,17 @@ def http_requests_total() -> Callable[[Info], None]: if not (scope.get("endpoint", None) and scope.get("router", None)): return None + root_path = scope.get("root_path", "") + app = scope.get("app", {}) + + if hasattr(app, "root_path"): + app_root_path = getattr(app, "root_path") + if root_path.startswith(app_root_path): + root_path = root_path[len(app_root_path):] + base_scope = { "type": scope.get("type"), - "path": scope.get("root_path", "") + scope.get("path"), + "path": root_path + scope.get("path"), "path_params": scope.get("path_params", {}), "method": scope.get("method") } From 9f452a62e58ee6212e64e476c69f19dbdbfffb48 Mon Sep 17 00:00:00 2001 From: Colin Woodbury Date: Mon, 21 Feb 2022 11:56:57 -0800 Subject: [PATCH 006/415] docs: fix link formatting in CONTRIBUTING --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d1b0da60..2deaf237 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,7 +7,7 @@ Before sending patches, you are recommended to run `flake8` and `isort`. You can add a git hook to do this by installing `python-pre-commit` and running `pre-commit install`. -[1] https://lists.archlinux.org/listinfo/aur-dev +[1]: https://lists.archlinux.org/listinfo/aur-dev ### Coding Guidelines From 7c3637971571c1b5757d634aa627a91ff96999da Mon Sep 17 00:00:00 2001 From: Colin Woodbury Date: Mon, 21 Feb 2022 14:18:26 -0800 Subject: [PATCH 007/415] docs(docker): basic usage instructions --- docker/README.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 docker/README.md diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 00000000..dceee74f --- /dev/null +++ b/docker/README.md @@ -0,0 +1,39 @@ +# Aurweb and Docker + +The `INSTALL` document details a manual Aurweb setup, but Docker images are also +provided here to avoid the complications of database configuration (and so +forth). + +### Setup + +Naturally, both `docker` and `docker-compose` must be installed, and your Docker +service must be started: + +```sh +systemctl start docker.service +``` + +The main image - `aurweb` - must be built manually: + +```sh +docker compose build aurweb-image +``` + +### Starting and Stopping the Services + +With the above steps complete, you can bring up an initial cluster: + +```sh +docker compose up +``` + +Subsequent runs will be done with `start` instead of `up`. The cluster can be +stopped with `docker compose stop`. + +### Testing + +With a running cluster, execute the following in a new terminal: + +```sh +docker compose run test +``` From 27f30212e83609fafe2081f3366d376f5ecf69df Mon Sep 17 00:00:00 2001 From: Colin Woodbury Date: Mon, 21 Feb 2022 14:40:18 -0800 Subject: [PATCH 008/415] docs(docker): note ports and `curl` usage --- docker/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docker/README.md b/docker/README.md index dceee74f..6fa2f142 100644 --- a/docker/README.md +++ b/docker/README.md @@ -37,3 +37,14 @@ With a running cluster, execute the following in a new terminal: ```sh docker compose run test ``` + +### Querying the RPC + +The Fast (Python) API runs on Port 8444, while the legacy PHP version runs +on 8443. You can query one like so: + +```sh +curl -k "https://localhost:8444/rpc/?v=5&type=search&arg=python" +``` + +`-k` bypasses local certificate issues that `curl` will otherwise complain about. From 3aa8d523f5ee6beb13a9e981e2f8754f79c54578 Mon Sep 17 00:00:00 2001 From: Colin Woodbury Date: Mon, 21 Feb 2022 16:49:38 -0800 Subject: [PATCH 009/415] change(rpc): `search` module reformatting --- aurweb/packages/search.py | 93 +++++++++++++++++++++------------------ 1 file changed, 50 insertions(+), 43 deletions(-) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 4a6eb75f..5ba72652 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -12,7 +12,7 @@ from aurweb.models.package_vote import PackageVote class PackageSearch: - """ A Package search query builder. """ + """A Package search query builder.""" # A constant mapping of short to full name sort orderings. FULL_SORT_ORDER = {"d": "desc", "a": "asc"} @@ -24,14 +24,18 @@ class PackageSearch: if self.user: self.query = self.query.join( PackageVote, - and_(PackageVote.PackageBaseID == PackageBase.ID, - PackageVote.UsersID == self.user.ID), - isouter=True + and_( + PackageVote.PackageBaseID == PackageBase.ID, + PackageVote.UsersID == self.user.ID, + ), + isouter=True, ).join( PackageNotification, - and_(PackageNotification.PackageBaseID == PackageBase.ID, - PackageNotification.UserID == self.user.ID), - isouter=True + and_( + PackageNotification.PackageBaseID == PackageBase.ID, + PackageNotification.UserID == self.user.ID, + ), + isouter=True, ) self.ordering = "d" @@ -47,7 +51,7 @@ class PackageSearch: "m": self._search_by_maintainer, "c": self._search_by_comaintainer, "M": self._search_by_co_or_maintainer, - "s": self._search_by_submitter + "s": self._search_by_submitter, } # Setup SB (Sort By) callbacks. @@ -58,7 +62,7 @@ class PackageSearch: "w": self._sort_by_voted, "o": self._sort_by_notify, "m": self._sort_by_maintainer, - "l": self._sort_by_last_modified + "l": self._sort_by_last_modified, } self._joined_user = False @@ -66,12 +70,10 @@ class PackageSearch: self._joined_comaint = False def _join_user(self, outer: bool = True) -> orm.Query: - """ Centralized joining of a package base's maintainer. """ + """Centralized joining of a package base's maintainer.""" if not self._joined_user: self.query = self.query.join( - User, - User.ID == PackageBase.MaintainerUID, - isouter=outer + User, User.ID == PackageBase.MaintainerUID, isouter=outer ) self._joined_user = True return self.query @@ -87,7 +89,7 @@ class PackageSearch: self.query = self.query.join( PackageComaintainer, PackageComaintainer.PackageBaseID == PackageBase.ID, - isouter=isouter + isouter=isouter, ) self._joined_comaint = True return self.query @@ -95,8 +97,10 @@ class PackageSearch: def _search_by_namedesc(self, keywords: str) -> orm.Query: self._join_user() self.query = self.query.filter( - or_(Package.Name.like(f"%{keywords}%"), - Package.Description.like(f"%{keywords}%")) + or_( + Package.Name.like(f"%{keywords}%"), + Package.Description.like(f"%{keywords}%"), + ) ) return self @@ -132,8 +136,7 @@ class PackageSearch: self._join_user() if keywords: self.query = self.query.filter( - and_(User.Username == keywords, - User.ID == PackageBase.MaintainerUID) + and_(User.Username == keywords, User.ID == PackageBase.MaintainerUID) ) else: self.query = self.query.filter(PackageBase.MaintainerUID.is_(None)) @@ -197,8 +200,7 @@ class PackageSearch: # in terms of performance. We should improve this; there's no # reason it should take _longer_. column = getattr( - case([(models.PackageVote.UsersID == self.user.ID, 1)], else_=0), - order + case([(models.PackageVote.UsersID == self.user.ID, 1)], else_=0), order ) name = getattr(models.Package.Name, order) self.query = self.query.order_by(column(), name()) @@ -209,9 +211,8 @@ class PackageSearch: # in terms of performance. We should improve this; there's no # reason it should take _longer_. column = getattr( - case([(models.PackageNotification.UserID == self.user.ID, 1)], - else_=0), - order + case([(models.PackageNotification.UserID == self.user.ID, 1)], else_=0), + order, ) name = getattr(models.Package.Name, order) self.query = self.query.order_by(column(), name()) @@ -239,16 +240,16 @@ class PackageSearch: return callback(ordering) def count(self) -> int: - """ Return internal query's count. """ + """Return internal query's count.""" return self.query.count() def results(self) -> orm.Query: - """ Return internal query. """ + """Return internal query.""" return self.query class RPCSearch(PackageSearch): - """ A PackageSearch-derived RPC package search query builder. + """A PackageSearch-derived RPC package search query builder. With RPC search, we need a subset of PackageSearch's handlers, with a few additional handlers added. So, within the RPCSearch @@ -270,52 +271,60 @@ class RPCSearch(PackageSearch): # We keep: "nd", "n" and "m". We also overlay four new by params # on top: "depends", "makedepends", "optdepends" and "checkdepends". self.search_by_cb = { - k: v for k, v in self.search_by_cb.items() + k: v + for k, v in self.search_by_cb.items() if k not in RPCSearch.keys_removed } - self.search_by_cb.update({ - "depends": self._search_by_depends, - "makedepends": self._search_by_makedepends, - "optdepends": self._search_by_optdepends, - "checkdepends": self._search_by_checkdepends - }) + self.search_by_cb.update( + { + "depends": self._search_by_depends, + "makedepends": self._search_by_makedepends, + "optdepends": self._search_by_optdepends, + "checkdepends": self._search_by_checkdepends, + } + ) # We always want an optional Maintainer in the RPC. self._join_user() def _join_depends(self, dep_type_id: int) -> orm.Query: - """ Join Package with PackageDependency and filter results + """Join Package with PackageDependency and filter results based on `dep_type_id`. :param dep_type_id: DependencyType ID :returns: PackageDependency-joined orm.Query """ self.query = self.query.join(models.PackageDependency).filter( - models.PackageDependency.DepTypeID == dep_type_id) + models.PackageDependency.DepTypeID == dep_type_id + ) return self.query def _search_by_depends(self, keywords: str) -> "RPCSearch": self.query = self._join_depends(DEPENDS_ID).filter( - models.PackageDependency.DepName == keywords) + models.PackageDependency.DepName == keywords + ) return self def _search_by_makedepends(self, keywords: str) -> "RPCSearch": self.query = self._join_depends(MAKEDEPENDS_ID).filter( - models.PackageDependency.DepName == keywords) + models.PackageDependency.DepName == keywords + ) return self def _search_by_optdepends(self, keywords: str) -> "RPCSearch": self.query = self._join_depends(OPTDEPENDS_ID).filter( - models.PackageDependency.DepName == keywords) + models.PackageDependency.DepName == keywords + ) return self def _search_by_checkdepends(self, keywords: str) -> "RPCSearch": self.query = self._join_depends(CHECKDEPENDS_ID).filter( - models.PackageDependency.DepName == keywords) + models.PackageDependency.DepName == keywords + ) return self def search_by(self, by: str, keywords: str) -> "RPCSearch": - """ Override inherited search_by. In this override, we reduce the + """Override inherited search_by. In this override, we reduce the scope of what we handle within this function. We do not set `by` to a default of "nd" in the RPC, as the RPC returns an error when incorrect `by` fields are specified. @@ -329,6 +338,4 @@ class RPCSearch(PackageSearch): return result def results(self) -> orm.Query: - return self.query.filter( - models.PackageBase.PackagerUID.isnot(None) - ) + return self.query.filter(models.PackageBase.PackagerUID.isnot(None)) From 51d4b7f9935aa3bef34c151a417898985caf69f7 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 23 Feb 2022 14:06:07 -0800 Subject: [PATCH 010/415] fix(rpc): limit Package results, not relationships ...This was an obvious bug in hindsight. Apologies :( Closes #314 Signed-off-by: Kevin Morris --- aurweb/rpc.py | 14 +++++++------- test/test_rpc.py | 12 ++++++++++++ 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 6e2a27fe..70d8c2fd 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -202,7 +202,12 @@ class RPC: models.User.ID == models.PackageBase.MaintainerUID, isouter=True ).filter(models.Package.Name.in_(args)) - packages = self._entities(packages) + + max_results = config.getint("options", "max_rpc_results") + packages = self._entities(packages).limit(max_results + 1) + + if packages.count() > max_results: + raise RPCError("Too many package results.") ids = {pkg.ID for pkg in packages} @@ -274,12 +279,7 @@ class RPC: ] # Union all subqueries together. - max_results = config.getint("options", "max_rpc_results") - query = subqueries[0].union_all(*subqueries[1:]).limit( - max_results + 1).all() - - if len(query) > max_results: - raise RPCError("Too many package results.") + query = subqueries[0].union_all(*subqueries[1:]).all() # Store our extra information in a class-wise dictionary, # which contains package id -> extra info dict mappings. diff --git a/test/test_rpc.py b/test/test_rpc.py index a67a026e..0d6b2931 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -15,6 +15,7 @@ import aurweb.models.relation_type as rt from aurweb import asgi, config, db, rpc, scripts, time from aurweb.models.account_type import USER_ID +from aurweb.models.dependency_type import DEPENDS_ID from aurweb.models.license import License from aurweb.models.package import Package from aurweb.models.package_base import PackageBase @@ -23,6 +24,7 @@ from aurweb.models.package_keyword import PackageKeyword from aurweb.models.package_license import PackageLicense from aurweb.models.package_relation import PackageRelation from aurweb.models.package_vote import PackageVote +from aurweb.models.relation_type import PROVIDES_ID from aurweb.models.user import User from aurweb.redis import redis_connection @@ -814,6 +816,16 @@ def test_rpc_too_many_search_results(client: TestClient, def test_rpc_too_many_info_results(client: TestClient, packages: List[Package]): + # Make many of these packages depend and rely on each other. + # This way, we can test to see that the exceeded limit stays true + # regardless of the number of related records. + with db.begin(): + for i in range(len(packages) - 1): + db.create(PackageDependency, DepTypeID=DEPENDS_ID, + Package=packages[i], DepName=packages[i + 1].Name) + db.create(PackageRelation, RelTypeID=PROVIDES_ID, + Package=packages[i], RelName=packages[i + 1].Name) + config_getint = config.getint def mock_config(section: str, key: str): From 07e479ab503b6e2cb2e363ccf731c3ea60281451 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 23 Feb 2022 14:37:41 -0800 Subject: [PATCH 011/415] upgrade: bump to v6.0.19 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index b7aa3027..c2b8be79 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.18" +AURWEB_VERSION = "v6.0.19" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 0b21a643..b5da9913 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.18" +version = "v6.0.19" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From d92f1838404b426c6e3baad103f4dccac1a40e03 Mon Sep 17 00:00:00 2001 From: Colin Woodbury Date: Wed, 23 Feb 2022 18:12:00 -0800 Subject: [PATCH 012/415] docs(docker): explain how to generate dummy data --- docker/README.md | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/docker/README.md b/docker/README.md index 6fa2f142..89dbb739 100644 --- a/docker/README.md +++ b/docker/README.md @@ -16,7 +16,7 @@ systemctl start docker.service The main image - `aurweb` - must be built manually: ```sh -docker compose build aurweb-image +docker compose build ``` ### Starting and Stopping the Services @@ -38,6 +38,21 @@ With a running cluster, execute the following in a new terminal: docker compose run test ``` +### Generating Dummy Data + +Before you can make meaningful queries to the cluster, it needs some data. +Luckily such data can be generated. First, `docker ps` to discover the ID of the +container running the FastAPI. Then: + +```sh +docker exec -it /bin/bash +./scheme/gendummydata.py dummy.sql +mysql aurweb < dummy.sql +``` + +The generation script may prompt you to install other Arch packages before it +can proceed. + ### Querying the RPC The Fast (Python) API runs on Port 8444, while the legacy PHP version runs From 1bb4daa36ac1e92e68a528727581bb781f3d9c00 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 23 Feb 2022 18:54:35 -0800 Subject: [PATCH 013/415] doc: merge CodingGuidelines into CONTRIBUTING.md Signed-off-by: Kevin Morris --- CONTRIBUTING.md | 17 ++++++++++---- doc/CodingGuidelines | 54 -------------------------------------------- 2 files changed, 13 insertions(+), 58 deletions(-) delete mode 100644 doc/CodingGuidelines diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2deaf237..2bb840f5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,7 @@ # Contributing -Patches should be sent to the [aur-dev@lists.archlinux.org][1] mailing list. +Patches should be sent to the [aur-dev@lists.archlinux.org][1] mailing list +or included in a merge request on the [aurweb repository][2]. Before sending patches, you are recommended to run `flake8` and `isort`. @@ -8,12 +9,20 @@ You can add a git hook to do this by installing `python-pre-commit` and running `pre-commit install`. [1]: https://lists.archlinux.org/listinfo/aur-dev +[2]: https://gitlab.archlinunx.org/archlinux/aurweb ### Coding Guidelines -1. All source modified or added within a patchset **must** maintain equivalent - or increased coverage by providing tests that use the functionality. +DISCLAIMER: We realise the code doesn't necessarily follow all the rules. +This is an attempt to establish a standard coding style for future +development. -2. Please keep your source within an 80 column width. +1. All source modified or added within a patchset **must** maintain equivalent + or increased coverage by providing tests that use the functionality +2. Please keep your source within an 80 column width +3. Use four space indentation +4. Use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) +5. DRY: Don't Repeat Yourself +6. All code should be tested for good _and_ bad cases Test patches that increase coverage in the codebase are always welcome. diff --git a/doc/CodingGuidelines b/doc/CodingGuidelines deleted file mode 100644 index 46537bb2..00000000 --- a/doc/CodingGuidelines +++ /dev/null @@ -1,54 +0,0 @@ -Coding Guidelines -================= - -DISCLAIMER: We realise the code doesn't necessarily follow all the rules. -This is an attempt to establish a standard coding style for future -development. - -Coding style ------------- - -Column width: 79 columns or less within reason. - -Indentation: tabs (standard eight column width) -Please don't add any mode lines. Adjust your editor to display tabs to your -preferred width. Generally code should work with the standard eight column -tabs. - -No short open tags. '' -Try embedding as little XHTML in the PHP as possible. -Consider creating templates for XHTML. - -All markup should conform to XHTML 1.0 Strict requirements. -You can use http://validator.w3.org to check the markup. - -Prevent PHP Notices by using isset() or empty() in conditionals that -reference $_GET, $_POST, or $_REQUEST variables. - -MySQL queries should generally go into functions. - -Submitting patches ------------------- - -!!! PLEASE TEST YOUR PATCHES BEFORE SUBMITTING !!! -Submit uncompressed git-formatted patches to aur-dev@archlinux.org. - -You will need to register on the mailing list before submitting: -https://mailman.archlinux.org/mailman/listinfo/aur-dev - -Base your patches on the master branch as forward development is done there. -When writing patches please keep unnecessary changes to a minimum. - -Try to keep your commits small and focused. -Smaller patches are much easier to review and have a better chance of being -pushed more quickly into the main repo. Smaller commits also makes reviewing -the commit history and tracking down specific changes much easier. - -Try to make your commit messages brief but descriptive. - -Glossary --------- -git-formatted patch: - A patch that is produced via `git format-patch` and is sent via - `git send-email` or as an inline attachment of an email. From 9204b76110daaeb582f373836696f05f9674ce94 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 25 Feb 2022 19:24:29 -0800 Subject: [PATCH 014/415] fix: ...do not add to ActiveTUs when voting on a proposal Straight up bug. Closes #324 Signed-off-by: Kevin Morris --- aurweb/routers/trusted_user.py | 1 - test/test_trusted_user_routes.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index 2d6ea92c..53bcecb7 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -220,7 +220,6 @@ async def trusted_user_proposal_post(request: Request, proposal: int, with db.begin(): vote = db.create(models.TUVote, User=request.user, VoteInfo=voteinfo) - voteinfo.ActiveTUs += 1 context["error"] = "You've already voted for this proposal." return render_proposal(request, context, proposal, voteinfo, voters, vote) diff --git a/test/test_trusted_user_routes.py b/test/test_trusted_user_routes.py index e2bf6497..a5c4c5e8 100644 --- a/test/test_trusted_user_routes.py +++ b/test/test_trusted_user_routes.py @@ -650,7 +650,6 @@ def test_tu_proposal_vote(client, proposal): # Store the current related values. yes = voteinfo.Yes - active_tus = voteinfo.ActiveTUs cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: @@ -661,7 +660,6 @@ def test_tu_proposal_vote(client, proposal): # Check that the proposal record got updated. assert voteinfo.Yes == yes + 1 - assert voteinfo.ActiveTUs == active_tus + 1 # Check that the new TUVote exists. vote = db.query(TUVote, TUVote.VoteInfo == voteinfo, From c7c79a152b50b5536d54332c0d45e57e0462aed6 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 25 Feb 2022 19:44:10 -0800 Subject: [PATCH 015/415] upgrade: bump to v6.0.20 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index c2b8be79..ad8ea100 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.19" +AURWEB_VERSION = "v6.0.20" _parser = None diff --git a/pyproject.toml b/pyproject.toml index b5da9913..88f182be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.19" +version = "v6.0.20" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From b80d914cba0158a559162f1d019f836c004dee6b Mon Sep 17 00:00:00 2001 From: Matt Harrison Date: Mon, 7 Mar 2022 12:37:54 -0500 Subject: [PATCH 016/415] fix click to copy when there is more than one copy link on the page. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes issue reported on the mailing list here: https://lists.archlinux.org/pipermail/aur-general/2022-March/036833.html Thanks to Henry-Joseph Audéoud for diagnosing the issue https://lists.archlinux.org/pipermail/aur-general/2022-March/036836.html Also update the event variable to use the local copy instead of the deprecated global version https://stackoverflow.com/questions/58341832/event-is-deprecated-what-should-be-used-instead --- web/html/js/copy.js | 4 ++-- web/template/pkg_details.php | 4 ++-- web/template/pkgbase_details.php | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/web/html/js/copy.js b/web/html/js/copy.js index f46299b3..21324ccb 100644 --- a/web/html/js/copy.js +++ b/web/html/js/copy.js @@ -1,6 +1,6 @@ document.addEventListener('DOMContentLoaded', function() { - document.querySelector('.copy').addEventListener('click', function(e) { + document.querySelectorAll('.copy').addEventListener('click', function(e) { e.preventDefault(); - navigator.clipboard.writeText(event.target.text); + navigator.clipboard.writeText(e.target.text); }); }); diff --git a/web/template/pkg_details.php b/web/template/pkg_details.php index 047de9a7..25d85b78 100644 --- a/web/template/pkg_details.php +++ b/web/template/pkg_details.php @@ -309,9 +309,9 @@ endif; diff --git a/web/template/pkgbase_details.php b/web/template/pkgbase_details.php index 35ad217a..bde29c1c 100644 --- a/web/template/pkgbase_details.php +++ b/web/template/pkgbase_details.php @@ -138,9 +138,9 @@ endif; From 6a243e90dbf08c3a9db8f757c11471661d18bcc1 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 7 Mar 2022 23:23:49 -0800 Subject: [PATCH 017/415] fix: only reject addvote for users with running proposals This was incorrectly indiscriminately targetting _any_ proposal for a particular user. Signed-off-by: Kevin Morris --- aurweb/routers/trusted_user.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index 53bcecb7..cbe3e47d 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -274,8 +274,10 @@ async def trusted_user_addvote_post(request: Request, context["error"] = "Username does not exist." return render_addvote(context, HTTPStatus.NOT_FOUND) + utcnow = time.utcnow() voteinfo = db.query(models.TUVoteInfo).filter( - models.TUVoteInfo.User == user).count() + and_(models.TUVoteInfo.User == user, + models.TUVoteInfo.End > utcnow)).count() if voteinfo: _ = l10n.get_translator_for_request(request) context["error"] = _( From f11e8de251af54e78043d8016b12feda38a9ec55 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 7 Mar 2022 23:32:14 -0800 Subject: [PATCH 018/415] upgrade: bump to v6.0.21 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index ad8ea100..9931e7d2 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.20" +AURWEB_VERSION = "v6.0.21" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 88f182be..ce081ce6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.20" +version = "v6.0.21" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 5045f0f3e464fc0fbb3229968cb07617ec48314f Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 7 Mar 2022 23:53:57 -0800 Subject: [PATCH 019/415] fix: copy.js javascript initialization Not sure where this works, but it doesn't seem to work on my browser. Achieved the same by forEaching through the array returned by querySelectorAll instead. Signed-off-by: Kevin Morris --- templates/partials/packages/details.html | 1 - web/html/js/copy.js | 11 +++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index 22d519b9..e0eda54c 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -160,4 +160,3 @@ - diff --git a/web/html/js/copy.js b/web/html/js/copy.js index 21324ccb..3b659270 100644 --- a/web/html/js/copy.js +++ b/web/html/js/copy.js @@ -1,6 +1,9 @@ document.addEventListener('DOMContentLoaded', function() { - document.querySelectorAll('.copy').addEventListener('click', function(e) { - e.preventDefault(); - navigator.clipboard.writeText(e.target.text); - }); + let elements = document.querySelectorAll('.copy'); + elements.forEach(function(el) { + el.addEventListener('click', function(e) { + e.preventDefault(); + navigator.clipboard.writeText(e.target.text); + }); + }); }); From e2a17fef95385f0a7cae4216d28b5789b84facce Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 7 Mar 2022 23:57:54 -0800 Subject: [PATCH 020/415] upgrade: bump to v6.0.22 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 9931e7d2..d0b095f0 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.21" +AURWEB_VERSION = "v6.0.22" _parser = None diff --git a/pyproject.toml b/pyproject.toml index ce081ce6..f2401b88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.21" +version = "v6.0.22" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 13217be939278a483e77e46fd1e1dd5081d7a829 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 17:49:21 -0800 Subject: [PATCH 021/415] fix: don't check suspension for ownership changes People can change comaintainer ownership to suspended users if they want to. Suspended users cannot login, so there is no breach of security here. It does make sense to allow ownership to be changed, imo. Closes #339 Signed-off-by: Kevin Morris --- aurweb/scripts/notify.py | 5 +---- aurweb/testing/email.py | 9 +++++++++ test/test_notify.py | 15 +++++++++++++++ 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/aurweb/scripts/notify.py b/aurweb/scripts/notify.py index c823b09e..dbef3aa5 100755 --- a/aurweb/scripts/notify.py +++ b/aurweb/scripts/notify.py @@ -399,10 +399,7 @@ class ComaintainershipEventNotification(Notification): self._pkgbase = db.query(PackageBase.Name).filter( PackageBase.ID == pkgbase_id).first().Name - user = db.query(User).filter( - and_(User.ID == uid, - User.Suspended == 0) - ).with_entities( + user = db.query(User).filter(User.ID == uid).with_entities( User.Email, User.LangPreference ).first() diff --git a/aurweb/testing/email.py b/aurweb/testing/email.py index c0be2797..b3e3990b 100644 --- a/aurweb/testing/email.py +++ b/aurweb/testing/email.py @@ -37,6 +37,15 @@ class Email: if autoparse: self._parse() + @staticmethod + def reset() -> None: + # Cleanup all email files for this test suite. + prefix = Email.email_prefix(suite=True) + files = os.listdir(Email.TEST_DIR) + for file in files: + if file.startswith(prefix): + os.remove(os.path.join(Email.TEST_DIR, file)) + @staticmethod def email_prefix(suite: bool = False) -> str: """ diff --git a/test/test_notify.py b/test/test_notify.py index a8e994c5..2009e3a8 100644 --- a/test/test_notify.py +++ b/test/test_notify.py @@ -299,6 +299,21 @@ You were removed from the co-maintainer list of {pkgbase.Name} [1]. assert email.body == expected +def test_suspended_ownership_change(user: User, pkgbases: List[PackageBase]): + with db.begin(): + user.Suspended = 1 + + pkgbase = pkgbases[0] + notif = notify.ComaintainerAddNotification(user.ID, pkgbase.ID) + notif.send() + assert Email.count() == 1 + + Email.reset() # Clear the Email pool + notif = notify.ComaintainerRemoveNotification(user.ID, pkgbase.ID) + notif.send() + assert Email.count() == 1 + + def test_delete(user: User, user2: User, pkgbases: List[PackageBase]): pkgbase = pkgbases[0] notif = notify.DeleteNotification(user2.ID, pkgbase.ID) From e00cf5f1249b522e58fb0651ae8b00e7b74c6ab2 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 17:51:44 -0800 Subject: [PATCH 022/415] test: use smtplib.SMTP[_SSL] timeout = notifications.smtp-timeout A new option has been added for configuration of SMTP timeout: - notifications.smtp-timeout During tests, we can change this timeout to be small, so we aren't depending on hardware-based RNG to pass the timeout. Without a timeout, users can run into a long-running test for no particular reason. Signed-off-by: Kevin Morris --- aurweb/scripts/notify.py | 5 +++- aurweb/testing/smtp.py | 3 +++ conf/config.defaults | 1 + test/test_notify.py | 49 ++++++++++++++++++++++++---------------- 4 files changed, 37 insertions(+), 21 deletions(-) diff --git a/aurweb/scripts/notify.py b/aurweb/scripts/notify.py index dbef3aa5..6afa65ae 100755 --- a/aurweb/scripts/notify.py +++ b/aurweb/scripts/notify.py @@ -104,7 +104,10 @@ class Notification: False: smtplib.SMTP, True: smtplib.SMTP_SSL, } - server = classes[use_ssl](server_addr, server_port) + smtp_timeout = aurweb.config.getint("notifications", + "smtp-timeout") + server = classes[use_ssl](server_addr, server_port, + timeout=smtp_timeout) if use_starttls: server.ehlo() diff --git a/aurweb/testing/smtp.py b/aurweb/testing/smtp.py index da64c93f..e5d67991 100644 --- a/aurweb/testing/smtp.py +++ b/aurweb/testing/smtp.py @@ -36,6 +36,9 @@ class FakeSMTP: def quit(self) -> None: self.quit_count += 1 + def __call__(self, *args, **kwargs) -> "FakeSMTP": + return self + class FakeSMTP_SSL(FakeSMTP): """ A fake version of smtplib.SMTP_SSL used for testing. """ diff --git a/conf/config.defaults b/conf/config.defaults index 371c99b2..722802cc 100644 --- a/conf/config.defaults +++ b/conf/config.defaults @@ -65,6 +65,7 @@ smtp-use-ssl = 0 smtp-use-starttls = 0 smtp-user = smtp-password = +smtp-timeout = 60 sender = notify@aur.archlinux.org reply-to = noreply@aur.archlinux.org diff --git a/test/test_notify.py b/test/test_notify.py index 2009e3a8..fdec5ed7 100644 --- a/test/test_notify.py +++ b/test/test_notify.py @@ -547,18 +547,18 @@ def test_smtp(user: User): with db.begin(): user.ResetKey = "12345678901234567890123456789012" - SMTP = FakeSMTP() + smtp = FakeSMTP() get = "aurweb.config.get" getboolean = "aurweb.config.getboolean" with mock.patch(get, side_effect=mock_smtp_config(str)): with mock.patch(getboolean, side_effect=mock_smtp_config(bool)): - with mock.patch("smtplib.SMTP", side_effect=lambda a, b: SMTP): + with mock.patch("smtplib.SMTP", side_effect=smtp): config.rehash() notif = notify.WelcomeNotification(user.ID) notif.send() config.rehash() - assert len(SMTP.emails) == 1 + assert len(smtp.emails) == 1 def mock_smtp_starttls_config(cls): @@ -586,25 +586,25 @@ def test_smtp_starttls(user: User): user.ResetKey = "12345678901234567890123456789012" user.BackupEmail = "backup@example.org" - SMTP = FakeSMTP() + smtp = FakeSMTP() get = "aurweb.config.get" getboolean = "aurweb.config.getboolean" with mock.patch(get, side_effect=mock_smtp_starttls_config(str)): with mock.patch( getboolean, side_effect=mock_smtp_starttls_config(bool)): - with mock.patch("smtplib.SMTP", side_effect=lambda a, b: SMTP): + with mock.patch("smtplib.SMTP", side_effect=smtp): notif = notify.WelcomeNotification(user.ID) notif.send() - assert SMTP.starttls_enabled - assert SMTP.user - assert SMTP.passwd + assert smtp.starttls_enabled + assert smtp.user + assert smtp.passwd - assert len(SMTP.emails) == 2 - to = SMTP.emails[0][1] + assert len(smtp.emails) == 2 + to = smtp.emails[0][1] assert to == [user.Email] - to = SMTP.emails[1][1] + to = smtp.emails[1][1] assert to == [user.BackupEmail] @@ -629,19 +629,19 @@ def test_smtp_ssl(user: User): with db.begin(): user.ResetKey = "12345678901234567890123456789012" - SMTP = FakeSMTP_SSL() + smtp = FakeSMTP_SSL() get = "aurweb.config.get" getboolean = "aurweb.config.getboolean" with mock.patch(get, side_effect=mock_smtp_ssl_config(str)): with mock.patch(getboolean, side_effect=mock_smtp_ssl_config(bool)): - with mock.patch("smtplib.SMTP_SSL", side_effect=lambda a, b: SMTP): + with mock.patch("smtplib.SMTP_SSL", side_effect=smtp): notif = notify.WelcomeNotification(user.ID) notif.send() - assert len(SMTP.emails) == 1 - assert SMTP.use_ssl - assert SMTP.user - assert SMTP.passwd + assert len(smtp.emails) == 1 + assert smtp.use_ssl + assert smtp.user + assert smtp.passwd def test_notification_defaults(): @@ -655,6 +655,7 @@ def test_notification_oserror(user: User, caplog: pytest.LogCaptureFixture): """ Try sending a notification with a bad SMTP configuration. """ caplog.set_level(ERROR) config_get = config.get + config_getint = config.getint mocked_options = { "sendmail": str(), @@ -662,8 +663,9 @@ def test_notification_oserror(user: User, caplog: pytest.LogCaptureFixture): "smtp-port": "587", "smtp-user": "notify@server.xyz", "smtp-password": "notify_server_xyz", + "smtp-timeout": 1, "sender": "notify@server.xyz", - "reply-to": "no-reply@server.xyz" + "reply-to": "no-reply@server.xyz", } def mock_config_get(section: str, key: str) -> str: @@ -672,9 +674,16 @@ def test_notification_oserror(user: User, caplog: pytest.LogCaptureFixture): return mocked_options.get(key) return config_get(section, key) + def mock_config_getint(section: str, key: str) -> str: + if section == "notifications": + if key in mocked_options: + return mocked_options.get(key) + return config_getint(section, key) + notif = notify.WelcomeNotification(user.ID) - with mock.patch("aurweb.config.get", side_effect=mock_config_get): - notif.send() + with mock.patch("aurweb.config.getint", side_effect=mock_config_getint): + with mock.patch("aurweb.config.get", side_effect=mock_config_get): + notif.send() expected = "Unable to emit notification due to an OSError" assert expected in caplog.text From 2a393f95faa8a4952faf22b1cbcb4e0c8e8318ae Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 17:59:00 -0800 Subject: [PATCH 023/415] upgrade: bump to v6.0.23 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index d0b095f0..637024de 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.22" +AURWEB_VERSION = "v6.0.23" _parser = None diff --git a/pyproject.toml b/pyproject.toml index f2401b88..e930a331 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.22" +version = "v6.0.23" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From a1a88ea8729f4eafee396197d40fa8a290716bfa Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 19:00:19 -0800 Subject: [PATCH 024/415] fix(rpc): suggestions should only suggest based on % Previously, Python code was looking for suggestions based on `%%`. This was inconsistent with PHP's suggestion implementation and cause more records to be bundled with a suggestion, along with supplying misleading suggestions. Closes #343 Signed-off-by: Kevin Morris --- aurweb/rpc.py | 5 +++-- test/test_rpc.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 70d8c2fd..5bc6b80d 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -332,7 +332,7 @@ class RPC: models.PackageBase ).filter( and_(models.PackageBase.PackagerUID.isnot(None), - models.Package.Name.like(f"%{arg}%")) + models.Package.Name.like(f"{arg}%")) ).order_by(models.Package.Name.asc()).limit(20) return [pkg.Name for pkg in packages] @@ -341,9 +341,10 @@ class RPC: if not args: return [] + arg = args[0] packages = db.query(models.PackageBase.Name).filter( and_(models.PackageBase.PackagerUID.isnot(None), - models.PackageBase.Name.like(f"%{args[0]}%")) + models.PackageBase.Name.like(f"{arg}%")) ).order_by(models.PackageBase.Name.asc()).limit(20) return [pkg.Name for pkg in packages] diff --git a/test/test_rpc.py b/test/test_rpc.py index 0d6b2931..2f7f7860 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -551,6 +551,14 @@ def test_rpc_suggest_pkgbase(client: TestClient, packages: List[Package]): data = response.json() assert data == [] + # Test that suggestions are only given based on the beginning + # of the keyword string. + params["arg"] = "ther-pkg" + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + assert data == [] + def test_rpc_suggest(client: TestClient, packages: List[Package]): params = {"v": 5, "type": "suggest", "arg": "other"} @@ -573,6 +581,14 @@ def test_rpc_suggest(client: TestClient, packages: List[Package]): data = response.json() assert data == [] + # Test that suggestions are only given based on the beginning + # of the keyword string. + params["arg"] = "ther-pkg" + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + assert data == [] + def mock_config_getint(section: str, key: str): if key == "request_limit": From 0afa07ed3b895efd84adfba8e54a342065c54f78 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 19:16:02 -0800 Subject: [PATCH 025/415] upgrade: bump to v6.0.24 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 637024de..287152d4 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.23" +AURWEB_VERSION = "v6.0.24" _parser = None diff --git a/pyproject.toml b/pyproject.toml index e930a331..7a2f6ca3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.23" +version = "v6.0.24" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 49c5a3facf096e9b0a1905e5ee38fe8750a5bb63 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 20:28:09 -0800 Subject: [PATCH 026/415] feat: display stats about total & active TUs on proposals This patch brings in two new features: - when viewing proposal listings, there is a new Statistics section, containing the total and active number of Trusted Users found in the database. - when viewing a proposal directly, the number of active trusted users assigned when the proposal was added is now displayed in the details section. Closes #323 Signed-off-by: Kevin Morris --- aurweb/routers/trusted_user.py | 20 +++++++++ po/aurweb.pot | 4 ++ templates/partials/tu/proposal/details.html | 5 +++ templates/tu/index.html | 16 +++++++ test/test_trusted_user_routes.py | 49 +++++++++++++++++++++ web/html/css/aurweb.css | 13 ++++++ 6 files changed, 107 insertions(+) diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index cbe3e47d..3f0eb836 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -2,6 +2,7 @@ import html import typing from http import HTTPStatus +from typing import Any, Dict from fastapi import APIRouter, Form, HTTPException, Request from fastapi.responses import RedirectResponse, Response @@ -33,6 +34,21 @@ ADDVOTE_SPECIFICS = { } +def populate_trusted_user_counts(context: Dict[str, Any]) -> None: + tu_query = db.query(User).filter( + or_(User.AccountTypeID == TRUSTED_USER_ID, + User.AccountTypeID == TRUSTED_USER_AND_DEV_ID) + ) + context["trusted_user_count"] = tu_query.count() + + # In case any records have a None InactivityTS. + active_tu_query = tu_query.filter( + or_(User.InactivityTS.is_(None), + User.InactivityTS == 0) + ) + context["active_trusted_user_count"] = active_tu_query.count() + + @router.get("/tu") @requires_auth async def trusted_user(request: Request, @@ -40,6 +56,8 @@ async def trusted_user(request: Request, cby: str = "desc", # current by poff: int = 0, # past offset pby: str = "desc"): # past by + """ Proposal listings. """ + if not request.user.has_credential(creds.TU_LIST_VOTES): return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) @@ -102,6 +120,8 @@ async def trusted_user(request: Request, context["current_by_next"] = "asc" if current_by == "desc" else "desc" context["past_by_next"] = "asc" if past_by == "desc" else "desc" + populate_trusted_user_counts(context) + context["q"] = { "coff": current_off, "cby": current_by, diff --git a/po/aurweb.pot b/po/aurweb.pot index bec1b672..e7c632e3 100644 --- a/po/aurweb.pot +++ b/po/aurweb.pot @@ -2334,3 +2334,7 @@ msgid "This action will close any pending package requests " "related to it. If %sComments%s are omitted, a closure " "comment will be autogenerated." msgstr "" + +#: templates/partials/tu/proposal/details.html +msgid "assigned" +msgstr "" diff --git a/templates/partials/tu/proposal/details.html b/templates/partials/tu/proposal/details.html index f7a55148..4cbee9ad 100644 --- a/templates/partials/tu/proposal/details.html +++ b/templates/partials/tu/proposal/details.html @@ -21,6 +21,11 @@ +
+ {{ "Active" | tr }} {{ "Trusted Users" | tr }} {{ "assigned" | tr }}: + {{ voteinfo.ActiveTUs }} +
+ {% set submitter = voteinfo.Submitter.Username %} {% set submitter_uri = "/account/%s" | format(submitter) %} {% set submitter = '%s' | format(submitter_uri, submitter) %} diff --git a/templates/tu/index.html b/templates/tu/index.html index 5060e1f7..4c7a3c35 100644 --- a/templates/tu/index.html +++ b/templates/tu/index.html @@ -1,6 +1,22 @@ {% extends "partials/layout.html" %} {% block pageContent %} +
+

{{ "Statistics" | tr }}

+ + + + + + + + + + + +
{{ "Total" | tr }} {{ "Trusted Users" | tr }}:{{ trusted_user_count }}
{{ "Active" | tr }} {{ "Trusted Users" | tr }}:{{ active_trusted_user_count }}
+
+ {% with table_class = "current-votes", total_votes = current_votes_count, diff --git a/test/test_trusted_user_routes.py b/test/test_trusted_user_routes.py index a5c4c5e8..2e7dc193 100644 --- a/test/test_trusted_user_routes.py +++ b/test/test_trusted_user_routes.py @@ -267,6 +267,48 @@ def test_tu_index(client, tu_user): assert int(vote_id.text.strip()) == vote_records[1].ID +def test_tu_stats(client: TestClient, tu_user: User): + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + with client as request: + response = request.get("/tu", cookies=cookies, allow_redirects=False) + assert response.status_code == HTTPStatus.OK + + root = parse_root(response.text) + stats = root.xpath('//table[@class="no-width"]')[0] + rows = stats.xpath("./tbody/tr") + + # We have one trusted user. + total = rows[0] + label, count = total.xpath("./td") + assert int(count.text.strip()) == 1 + + # And we have one active TU. + active = rows[1] + label, count = active.xpath("./td") + assert int(count.text.strip()) == 1 + + with db.begin(): + tu_user.InactivityTS = time.utcnow() + + with client as request: + response = request.get("/tu", cookies=cookies, allow_redirects=False) + assert response.status_code == HTTPStatus.OK + + root = parse_root(response.text) + stats = root.xpath('//table[@class="no-width"]')[0] + rows = stats.xpath("./tbody/tr") + + # We have one trusted user. + total = rows[0] + label, count = total.xpath("./td") + assert int(count.text.strip()) == 1 + + # But we have no more active TUs. + active = rows[1] + label, count = active.xpath("./td") + assert int(count.text.strip()) == 0 + + def test_tu_index_table_paging(client, tu_user): ts = time.utcnow() @@ -515,6 +557,8 @@ def test_tu_proposal_unauthorized(client: TestClient, user: User, def test_tu_running_proposal(client: TestClient, proposal: Tuple[User, User, TUVoteInfo]): tu_user, user, voteinfo = proposal + with db.begin(): + voteinfo.ActiveTUs = 1 # Initiate an authenticated GET request to /tu/{proposal_id}. proposal_id = voteinfo.ID @@ -536,6 +580,11 @@ def test_tu_running_proposal(client: TestClient, './div[contains(@class, "user")]/strong/a/text()')[0] assert username.strip() == user.Username + active = details.xpath('./div[contains(@class, "field")]')[1] + content = active.text.strip() + assert "Active Trusted Users assigned:" in content + assert "1" in content + submitted = details.xpath( './div[contains(@class, "submitted")]/text()')[0] assert re.match(r'^Submitted: \d{4}-\d{2}-\d{2} \d{2}:\d{2} \(.+\) by$', diff --git a/web/html/css/aurweb.css b/web/html/css/aurweb.css index 22b5ac65..59ae7216 100644 --- a/web/html/css/aurweb.css +++ b/web/html/css/aurweb.css @@ -282,3 +282,16 @@ pre.traceback { white-space: -o-pre-wrap; word-wrap: break-all; } + +/* A text aligning alias. */ +.text-right { + text-align: right; +} + +/* By default, tables use 100% width, which we do not always want. */ +table.no-width { + width: auto; +} +table.no-width > tbody > tr > td { + padding-right: 2px; +} From d7cb04b93dcdad64b6ea8ad081f6dad6387545d0 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 8 Mar 2022 20:35:21 -0800 Subject: [PATCH 027/415] upgrade: bump to v6.0.25 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 287152d4..9565b70c 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.24" +AURWEB_VERSION = "v6.0.25" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 7a2f6ca3..8b7a2e93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.24" +version = "v6.0.25" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 7ddce6bb2d8a18fd9b63a23e7a022197226ef672 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 14 Mar 2022 05:55:19 -0700 Subject: [PATCH 028/415] doc: update CONTRIBUTING.md Signed-off-by: Kevin Morris --- CONTRIBUTING.md | 74 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 72 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2bb840f5..3d99d887 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,7 +9,7 @@ You can add a git hook to do this by installing `python-pre-commit` and running `pre-commit install`. [1]: https://lists.archlinux.org/listinfo/aur-dev -[2]: https://gitlab.archlinunx.org/archlinux/aurweb +[2]: https://gitlab.archlinux.org/archlinux/aurweb ### Coding Guidelines @@ -23,6 +23,76 @@ development. 3. Use four space indentation 4. Use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) 5. DRY: Don't Repeat Yourself -6. All code should be tested for good _and_ bad cases +6. All code should be tested for good _and_ bad cases (see [test/README.md][3]) + +[3]: https://gitlab.archlinux.org/archlinux/aurweb/-/blob/master/test/README.md Test patches that increase coverage in the codebase are always welcome. + +### Coding Style + +We use the `flake8` and `isort` tools to manage PEP-8 coherenace and +import ordering in this project. + +There are plugins for editors or IDEs which automate this process. Some +example plugins: + +- [tell-k/vim-autopep8](https://github.com/tell-k/vim-autopep8) +- [fisadev/vim-isort](https://github.com/fisadev/vim-isort) +- [prabirshrestha/vim-lsp](https://github.com/prabirshrestha/vim-lsp) + +See `setup.cfg` for flake8 and isort specific rules. + +Note: We are planning on switching to [psf/black](https://github.com/psf/black). +For now, developers should ensure that flake8 and isort passes when submitting +merge requests or patch sets. + +### Development Environment + +To get started with local development, an instance of aurweb must be +brought up. This can be done using the following sections: + +- [Using Docker](#using-docker) +- [Using INSTALL](#using-install) + +There are a number of services aurweb employs to run the application +in its entirety: + +- ssh +- cron jobs +- starlette/fastapi asgi server + +Project structure: + +- `./aurweb`: `aurweb` Python package +- `./templates`: Jinja2 templates +- `./docker`: Docker scripts and configuration files + +#### Using Docker + +Using Docker, we can run the entire infrastructure in two steps: + + # Build the aurweb:latest image + $ docker-compose build + + # Start all services in the background + $ docker-compose up -d nginx + +`docker-compose` services will generate a locally signed root certificate +at `./data/root_ca.crt`. Users can import this into ca-certificates or their +browser if desired. + +Accessible services (on the host): + +- https://localhost:8444 (python via nginx) +- https://localhost:8443 (php via nginx) +- localhost:13306 (mariadb) +- localhost:16379 (redis) + +Docker services, by default, are setup to be hot reloaded when source code +is changed. + +#### Using INSTALL + +The [INSTALL](INSTALL) file describes steps to install the application on +bare-metal systems. From 790ca4194a6360e9f47f56fe9d39aae4cbe14c25 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 14 Mar 2022 05:57:06 -0700 Subject: [PATCH 029/415] fix: coherenace -> coherence Signed-off-by: Kevin Morris --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3d99d887..52e182c7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,7 +31,7 @@ Test patches that increase coverage in the codebase are always welcome. ### Coding Style -We use the `flake8` and `isort` tools to manage PEP-8 coherenace and +We use the `flake8` and `isort` tools to manage PEP-8 coherence and import ordering in this project. There are plugins for editors or IDEs which automate this process. Some From afd25c248fcee508da6724398f6c37c47bf4be5e Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 14 Mar 2022 06:24:15 -0700 Subject: [PATCH 030/415] fix: remove HEAD and OPTIONS handling from metrics Signed-off-by: Kevin Morris --- aurweb/prometheus.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/aurweb/prometheus.py b/aurweb/prometheus.py index 73be3ef6..272ee023 100644 --- a/aurweb/prometheus.py +++ b/aurweb/prometheus.py @@ -60,6 +60,9 @@ def http_requests_total() -> Callable[[Info], None]: labelnames=("method", "path", "status")) def instrumentation(info: Info) -> None: + if info.request.method.lower() in ("head", "options"): # pragma: no cover + return + scope = info.request.scope # Taken from https://github.com/stephenhillier/starlette_exporter @@ -70,8 +73,8 @@ def http_requests_total() -> Callable[[Info], None]: if not (scope.get("endpoint", None) and scope.get("router", None)): return None - root_path = scope.get("root_path", "") - app = scope.get("app", {}) + root_path = scope.get("root_path", str()) + app = scope.get("app", dict()) if hasattr(app, "root_path"): app_root_path = getattr(app, "root_path") @@ -102,6 +105,9 @@ def http_api_requests_total() -> Callable[[Info], None]: labelnames=("type", "status")) def instrumentation(info: Info) -> None: + if info.request.method.lower() in ("head", "options"): # pragma: no cover + return + if info.request.url.path.rstrip("/") == "/rpc": type = info.request.query_params.get("type", "None") if info.response: From d8564e446b744bbc7b6bd8fea22ab6b614acc5ab Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 30 Mar 2022 12:30:21 -0700 Subject: [PATCH 031/415] upgrade: bump to v6.0.26 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 9565b70c..53942b75 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.25" +AURWEB_VERSION = "v6.0.26" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 8b7a2e93..b15af272 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.25" +version = "v6.0.26" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From ed41a4fe1933e13d19a4e648d63011b3d2a67cc5 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 30 Mar 2022 16:16:47 -0700 Subject: [PATCH 032/415] feat: add paging to package depends & required by This patch does not include a javascript implementating, but provides a pure HTML/HTTP method of paging through these lists. Also fixes erroneous limiting. We now use a hardcoded limit of 20 by default. Signed-off-by: Kevin Morris --- aurweb/packages/util.py | 6 +-- aurweb/pkgbase/util.py | 13 ++++- aurweb/routers/packages.py | 51 +++++++++++++++---- aurweb/templates.py | 2 + po/aurweb.pot | 8 +++ .../partials/packages/package_metadata.html | 16 +++++- test/test_packages_routes.py | 45 ++++++++++++++++ 7 files changed, 125 insertions(+), 16 deletions(-) diff --git a/aurweb/packages/util.py b/aurweb/packages/util.py index e8569f29..5085ddf4 100644 --- a/aurweb/packages/util.py +++ b/aurweb/packages/util.py @@ -214,7 +214,7 @@ def query_notified(query: List[models.Package], return output -def pkg_required(pkgname: str, provides: List[str], limit: int) \ +def pkg_required(pkgname: str, provides: List[str]) \ -> List[PackageDependency]: """ Get dependencies that match a string in `[pkgname] + provides`. @@ -227,8 +227,8 @@ def pkg_required(pkgname: str, provides: List[str], limit: int) \ targets = set([pkgname] + provides) query = db.query(PackageDependency).join(Package).filter( PackageDependency.DepName.in_(targets) - ).order_by(Package.Name.asc()).limit(limit) - return query.all() + ).order_by(Package.Name.asc()) + return query @register_filter("source_uri") diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 18af3df0..ea952dce 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -11,16 +11,25 @@ from aurweb.models.package_request import PENDING_ID, PackageRequest from aurweb.models.package_vote import PackageVote from aurweb.scripts import notify from aurweb.templates import make_context as _make_context +from aurweb.templates import make_variable_context as _make_variable_context -def make_context(request: Request, pkgbase: PackageBase) -> Dict[str, Any]: +async def make_variable_context(request: Request, pkgbase: PackageBase) \ + -> Dict[str, Any]: + ctx = await _make_variable_context(request, pkgbase.Name) + return make_context(request, pkgbase, ctx) + + +def make_context(request: Request, pkgbase: PackageBase, + context: Dict[str, Any] = None) -> Dict[str, Any]: """ Make a basic context for package or pkgbase. :param request: FastAPI request :param pkgbase: PackageBase instance :return: A pkgbase context without specific differences """ - context = _make_context(request, pkgbase.Name) + if not context: + context = _make_context(request, pkgbase.Name) context["git_clone_uri_anon"] = config.get("options", "git_clone_uri_anon") context["git_clone_uri_priv"] = config.get("options", "git_clone_uri_priv") diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index bc12455d..f14b0ad8 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -2,7 +2,7 @@ from collections import defaultdict from http import HTTPStatus from typing import Any, Dict, List -from fastapi import APIRouter, Form, Request, Response +from fastapi import APIRouter, Form, Query, Request, Response import aurweb.filters # noqa: F401 @@ -33,7 +33,7 @@ async def packages_get(request: Request, context: Dict[str, Any], context["O"] = offset # Limit PP to options.max_search_results - max_search_results = aurweb.config.getint("options", "max_search_results") + max_search_results = config.getint("options", "max_search_results") context["PP"] = per_page = min(per_page, max_search_results) # Query search by. @@ -123,7 +123,22 @@ async def packages(request: Request) -> Response: @router.get("/packages/{name}") -async def package(request: Request, name: str) -> Response: +async def package(request: Request, name: str, + all_deps: bool = Query(default=False), + all_reqs: bool = Query(default=False)) -> Response: + """ + Get a package by name. + + By default, we limit the number of depends and requires results + to 20. To bypass this and load all of them, which should be triggered + via a "Show more" link near the limited listing. + + :param name: Package.Name + :param all_deps: Boolean indicating whether we should load all depends + :param all_reqs: Boolean indicating whether we should load all requires + :return: FastAPI Response + """ + # Get the Package. pkg = get_pkg_or_base(name, models.Package) pkgbase = pkg.PackageBase @@ -139,23 +154,41 @@ async def package(request: Request, name: str) -> Response: rels_data["r"].append(rel) # Add our base information. - context = pkgbaseutil.make_context(request, pkgbase) + context = await pkgbaseutil.make_variable_context(request, pkgbase) + + context.update( + { + "all_deps": all_deps, + "all_reqs": all_reqs + } + ) + context["package"] = pkg # Package sources. context["sources"] = pkg.package_sources.order_by( models.PackageSource.Source.asc()).all() + # Listing metadata. + context["max_listing"] = max_listing = 20 + # Package dependencies. - max_depends = config.getint("options", "max_depends") - context["dependencies"] = pkg.package_dependencies.order_by( + deps = pkg.package_dependencies.order_by( models.PackageDependency.DepTypeID.asc(), models.PackageDependency.DepName.asc() - ).limit(max_depends).all() + ) + context["depends_count"] = deps.count() + if not all_deps: + deps = deps.limit(max_listing) + context["dependencies"] = deps.all() # Package requirements (other packages depend on this one). - context["required_by"] = pkgutil.pkg_required( - pkg.Name, [p.RelName for p in rels_data.get("p", [])], max_depends) + reqs = pkgutil.pkg_required( + pkg.Name, [p.RelName for p in rels_data.get("p", [])]) + context["reqs_count"] = reqs.count() + if not all_reqs: + reqs = reqs.limit(max_listing) + context["required_by"] = reqs.all() context["licenses"] = pkg.package_licenses diff --git a/aurweb/templates.py b/aurweb/templates.py index ccadb16d..6520bedf 100644 --- a/aurweb/templates.py +++ b/aurweb/templates.py @@ -100,6 +100,8 @@ async def make_variable_context(request: Request, title: str, next: str = None): for k, v in to_copy.items(): context[k] = v + context["q"] = dict(request.query_params) + return context diff --git a/po/aurweb.pot b/po/aurweb.pot index e7c632e3..bc4bab84 100644 --- a/po/aurweb.pot +++ b/po/aurweb.pot @@ -2338,3 +2338,11 @@ msgstr "" #: templates/partials/tu/proposal/details.html msgid "assigned" msgstr "" + +#: templaets/partials/packages/package_metadata.html +msgid "Show %d more" +msgstr "" + +#: templates/partials/packages/package_metadata.html +msgid "dependencies" +msgstr "" diff --git a/templates/partials/packages/package_metadata.html b/templates/partials/packages/package_metadata.html index 6f58c2be..123b994d 100644 --- a/templates/partials/packages/package_metadata.html +++ b/templates/partials/packages/package_metadata.html @@ -1,5 +1,5 @@
-

{{ "Dependencies" | tr }} ({{ dependencies | length }})

+

{{ "Dependencies" | tr }} ({{ depends_count }})

-

{{ "Required by" | tr }} ({{ required_by | length }})

+

{{ "Required by" | tr }} ({{ reqs_count }})

diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index ee837912..e4c992af 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -276,6 +276,51 @@ def test_package(client: TestClient, package: Package): assert conflicts[0].text.strip() == ", ".join(expected) +def paged_depends_required(client: TestClient, package: Package): + maint = package.PackageBase.Maintainer + new_pkgs = [] + + with db.begin(): + # Create 25 new packages that'll be used to depend on our package. + for i in range(26): + base = db.create(PackageBase, Name=f"new_pkg{i}", Maintainer=maint) + new_pkgs.append(db.create(Package, Name=base.Name)) + + # Create 25 deps. + for i in range(25): + create_package_dep(package, f"dep_{i}") + + with db.begin(): + # Create depends on this package so we get some required by listings. + for new_pkg in new_pkgs: + create_package_dep(new_pkg, package.Name) + + with client as request: + resp = request.get(package_endpoint(package)) + assert resp.status_code == int(HTTPStatus.OK) + + # Test depends show link. + assert "Show 5 more" in resp.text + + # Test required by show more link, we added 26 packages. + assert "Show 6 more" in resp.text + + # Follow both links at the same time. + with client as request: + resp = request.get( + package_endpoint(package), + params={ + "all_deps": True, + "all_reqs": True, + } + ) + assert resp.status_code == int(HTTPStatus.OK) + + # We're should see everything and have no link. + assert "Show 5 more" not in resp.text + assert "Show 6 more" not in resp.text + + def test_package_comments(client: TestClient, user: User, package: Package): now = (time.utcnow()) with db.begin(): From cf4295a13e43dcc0daea9a9bb7cb54452b4c8b34 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 31 Mar 2022 17:45:39 -0700 Subject: [PATCH 033/415] upgrade: bump to v6.0.27 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 53942b75..69d9b31f 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.26" +AURWEB_VERSION = "v6.0.27" _parser = None diff --git a/pyproject.toml b/pyproject.toml index b15af272..c50af62b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.26" +version = "v6.0.27" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From a553d5d95adb9339ca1ba62fcb375ab34e02d013 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 31 Mar 2022 20:45:59 -0700 Subject: [PATCH 034/415] fix: replace distutils.util.strtobool with our own Reference from github.com/PostHog/posthog/pull/4631/commits/341c28da0f6d33d6fb12fe443766a2d822ff0097 This fixes a deprecation warning regarding distutil's strtobool. Signed-off-by: Kevin Morris --- aurweb/util.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/aurweb/util.py b/aurweb/util.py index 6759794f..5138f7da 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -4,7 +4,6 @@ import secrets import string from datetime import datetime -from distutils.util import strtobool as _strtobool from http import HTTPStatus from subprocess import PIPE, Popen from typing import Callable, Iterable, List, Tuple, Union @@ -114,9 +113,9 @@ def sanitize_params(offset: str, per_page: str) -> Tuple[int, int]: def strtobool(value: Union[str, bool]) -> bool: - if isinstance(value, str): - return _strtobool(value or "False") - return value + if not value: + return False + return str(value).lower() in ("y", "yes", "t", "true", "on", "1") def file_hash(filepath: str, hash_function: Callable) -> str: From 7a525d769363a78c080e91b6cfee0b2e0b6df10b Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 31 Mar 2022 20:47:34 -0700 Subject: [PATCH 035/415] change: remove poetry-dynamic-versioning We've not been using this as it is and its now warning us about strtobool deprecation changes. Removing it for now. Signed-off-by: Kevin Morris --- poetry.lock | 69 ++++++++++++++++++-------------------------------- pyproject.toml | 1 - 2 files changed, 24 insertions(+), 46 deletions(-) diff --git a/poetry.lock b/poetry.lock index c9d0b38a..7744606e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -212,17 +212,6 @@ idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] wmi = ["wmi (>=1.5.1,<2.0.0)"] -[[package]] -name = "dunamai" -version = "1.8.0" -description = "Dynamic version generation" -category = "main" -optional = false -python-versions = ">=3.5,<4.0" - -[package.dependencies] -packaging = ">=20.9" - [[package]] name = "email-validator" version = "1.1.3" @@ -627,19 +616,6 @@ python-versions = ">=3.6" dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "poetry-dynamic-versioning" -version = "0.13.1" -description = "Plugin for Poetry to enable dynamic versioning based on VCS tags" -category = "main" -optional = false -python-versions = ">=3.5,<4.0" - -[package.dependencies] -dunamai = ">=1.5,<2.0" -jinja2 = {version = ">=2.11.1,<4", markers = "python_version >= \"3.6\" and python_version < \"4.0\""} -tomlkit = ">=0.4" - [[package]] name = "posix-ipc" version = "1.0.5" @@ -1029,14 +1005,6 @@ category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "tomlkit" -version = "0.9.0" -description = "Style preserving TOML library" -category = "main" -optional = false -python-versions = ">=3.6,<4.0" - [[package]] name = "typing-extensions" version = "4.0.1" @@ -1119,7 +1087,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "1f6a0dd3780d8857ba0d5123814f299a8178a80e79c2235805623f43b8e0381f" +content-hash = "ffe7ab6733020584382d2d01950153072a46d0738f6d2fe52ac84653d0b16086" [metadata.files] aiofiles = [ @@ -1151,10 +1119,13 @@ authlib = [ {file = "Authlib-0.15.5.tar.gz", hash = "sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252"}, ] bcrypt = [ + {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"}, {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, + {file = "bcrypt-3.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7"}, + {file = "bcrypt-3.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d"}, {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, @@ -1300,10 +1271,6 @@ dnspython = [ {file = "dnspython-2.2.0-py3-none-any.whl", hash = "sha256:081649da27ced5e75709a1ee542136eaba9842a0fe4c03da4fb0a3d3ed1f3c44"}, {file = "dnspython-2.2.0.tar.gz", hash = "sha256:e79351e032d0b606b98d38a4b0e6e2275b31a5b85c873e587cc11b73aca026d6"}, ] -dunamai = [ - {file = "dunamai-1.8.0-py3-none-any.whl", hash = "sha256:846855e45d5969f6d11835d486bbf4d6ca175d4169a0ab11f619a5135cc86bdf"}, - {file = "dunamai-1.8.0.tar.gz", hash = "sha256:ff1f958af3575ec612e72c84bf96367469f418d31b9685f8311a5de2eb754a85"}, -] email-validator = [ {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, @@ -1343,6 +1310,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, + {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, @@ -1355,6 +1323,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, + {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, @@ -1363,6 +1332,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, + {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, @@ -1371,6 +1341,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, + {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, @@ -1379,6 +1350,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, + {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, @@ -1515,6 +1487,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, @@ -1526,6 +1501,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1537,6 +1515,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, @@ -1549,6 +1530,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -1561,6 +1545,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -1619,10 +1606,6 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -poetry-dynamic-versioning = [ - {file = "poetry-dynamic-versioning-0.13.1.tar.gz", hash = "sha256:5c0e7b22560db76812057ef95dadad662ecc63eb270145787eabe73da7c222f9"}, - {file = "poetry_dynamic_versioning-0.13.1-py3-none-any.whl", hash = "sha256:6d79f76436c624653fc06eb9bb54fb4f39b1d54362bc366ad2496855711d3a78"}, -] posix-ipc = [ {file = "posix_ipc-1.0.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ccb36ba90efec56a1796f1566eee9561f355a4f45babbc4d18ac46fb2d0b246b"}, {file = "posix_ipc-1.0.5-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:613bf1afe90e84c06255ec1a6f52c9b24062492de66e5f0dbe068adf67fc3454"}, @@ -1876,10 +1859,6 @@ tomli = [ {file = "tomli-2.0.0-py3-none-any.whl", hash = "sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224"}, {file = "tomli-2.0.0.tar.gz", hash = "sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1"}, ] -tomlkit = [ - {file = "tomlkit-0.9.0-py3-none-any.whl", hash = "sha256:c1b0fc73abd4f1e77c29ea4061ca0f2e11cbfb77342e17df3d3fdd496fc3f899"}, - {file = "tomlkit-0.9.0.tar.gz", hash = "sha256:5a83672c565f78f5fc8f1e44e5f2726446cc6b765113efd21d03e9331747d9ab"}, -] typing-extensions = [ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, diff --git a/pyproject.toml b/pyproject.toml index c50af62b..001e0287 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,6 @@ python = ">=3.9,<3.11" # poetry-dynamic-versioning is used to produce tool.poetry.version # based on git tags. -poetry-dynamic-versioning = "^0.13.1" # General aiofiles = "^0.7.0" From 02d114d575a72c0ec5038c762cddd8f1424e2c12 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Fri, 6 May 2022 18:30:29 +0100 Subject: [PATCH 036/415] fix: hide email when account's email hidden is set Fixes: 362 Signed-off-by: Leonidas Spyropoulos --- templates/account/show.html | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/templates/account/show.html b/templates/account/show.html index a9bb3c30..a57efb77 100644 --- a/templates/account/show.html +++ b/templates/account/show.html @@ -25,7 +25,11 @@ {% trans %}Email Address{% endtrans %}: + {% if not user.HideEmail %} {{ user.Email }} + {% else %} + <{% trans %}hidden{% endtrans %}> + {% endif %} From 0b544885636fead56551b9f229400e7abacd0d73 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 12 May 2022 23:26:57 +0100 Subject: [PATCH 037/415] fix(poetry): remove mysql-connector dependency Reverting a8287921 Signed-off-by: Leonidas Spyropoulos --- poetry.lock | 12 ------------ pyproject.toml | 1 - 2 files changed, 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7744606e..fe1575a6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -553,14 +553,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "mysql-connector" -version = "2.2.9" -description = "MySQL driver written in Python" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "mysqlclient" version = "2.1.0" @@ -943,7 +935,6 @@ mssql_pymssql = ["pymssql"] mssql_pyodbc = ["pyodbc"] mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] -mysql_connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] postgresql = ["psycopg2 (>=2.7)"] postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] @@ -1556,9 +1547,6 @@ mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -mysql-connector = [ - {file = "mysql-connector-2.2.9.tar.gz", hash = "sha256:1733e6ce52a049243de3264f1fbc22a852cb35458c4ad739ba88189285efdf32"}, -] mysqlclient = [ {file = "mysqlclient-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:02c8826e6add9b20f4cb12dcf016485f7b1d6e30356a1204d05431867a1b3947"}, {file = "mysqlclient-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b62d23c11c516cedb887377c8807628c1c65d57593b57853186a6ee18b0c6a5b"}, diff --git a/pyproject.toml b/pyproject.toml index 001e0287..9ba73c2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,6 @@ SQLAlchemy = "^1.4.26" uvicorn = "^0.15.0" gunicorn = "^20.1.0" Hypercorn = "^0.11.2" -mysql-connector = "^2.2.9" prometheus-fastapi-instrumentator = "^5.7.1" pytest-xdist = "^2.4.0" filelock = "^3.3.2" From 4ddd1dec9c1d19481593f4095ba30de7b6d22cde Mon Sep 17 00:00:00 2001 From: Kristian Klausen Date: Fri, 13 May 2022 00:37:34 +0200 Subject: [PATCH 038/415] upgrade: bump to v6.0.28 --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 69d9b31f..6069910f 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -6,7 +6,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.27" +AURWEB_VERSION = "v6.0.28" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 9ba73c2d..41d8301f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ # [tool.poetry] name = "aurweb" -version = "v6.0.27" +version = "v6.0.28" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 8598ea6f748405de5678e4f6c17b95afaa9df886 Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Mon, 27 Jun 2022 20:52:43 +0200 Subject: [PATCH 039/415] fix(gitlab-ci): update coverage reporting in CI Gitlab 14.10 introduced a coverage_report key which obsoletes the old way of reporting coverage data. --- .gitlab-ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c5554e92..98f99ae3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -53,7 +53,9 @@ test: coverage: '/TOTAL.*\s+(\d+\%)/' artifacts: reports: - cobertura: coverage.xml + coverage_report: + coverage_format: cobertura + path: coverage.xml deploy: stage: deploy From 98f55879d37be1ffbdcf9861ef70410317f90af2 Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Tue, 28 Jun 2022 22:07:00 +0200 Subject: [PATCH 040/415] fix(docker): don't run redis with protected mode For our development setup we run a redis container without a username/password. Redis recently set protected mode by default which disallows this, turn it off as it has no security implication. --- docker/redis-entrypoint.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/redis-entrypoint.sh b/docker/redis-entrypoint.sh index e92be6c5..669716d7 100755 --- a/docker/redis-entrypoint.sh +++ b/docker/redis-entrypoint.sh @@ -2,5 +2,6 @@ set -eou pipefail sed -ri 's/^bind .*$/bind 0.0.0.0 -::1/g' /etc/redis/redis.conf +sed -ri 's/protected-mode yes/protected-mode no/g' /etc/redis/redis.conf exec "$@" From ade624c215989532c9536cebc4a17000999974f3 Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Mon, 27 Jun 2022 20:48:18 +0200 Subject: [PATCH 041/415] doc(README): update contributing guidelines --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3f156455..2741efa2 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ Links ----- * The repository is hosted at https://gitlab.archlinux.org/archlinux/aurweb - -- see doc/CodingGuidelines for information on the patch submission process. + -- see [CONTRIBUTING.md](./CONTRIBUTING.md) for information on the patch submission process. * Bugs can (and should) be submitted to the aurweb bug tracker: https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Bug From edef6cc6ac01b68d18fe8d7e7c948fc3be13b36b Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Thu, 30 Jun 2022 21:57:52 +0200 Subject: [PATCH 042/415] chore(css): drop old vendor prefixes All of these vendor prefixes are already supported by all browsers for quite a while. --- web/html/css/aurweb.css | 5 ----- 1 file changed, 5 deletions(-) diff --git a/web/html/css/aurweb.css b/web/html/css/aurweb.css index 59ae7216..281b8f59 100644 --- a/web/html/css/aurweb.css +++ b/web/html/css/aurweb.css @@ -125,13 +125,11 @@ } .rss-icon, .delete-comment, .undelete-comment, .edit-comment, .pin-comment { - -webkit-filter: grayscale(100%); filter: grayscale(100%); opacity: 0.6; } .rss-icon:hover, .delete-comment:hover, .undelete-comment:hover, .edit-comment:hover, .pin-comment:hover { - -webkit-filter: none; filter: none; opacity: 1; } @@ -277,9 +275,6 @@ div.box form.link button { pre.traceback { /* https://css-tricks.com/snippets/css/make-pre-text-wrap/ */ white-space: pre-wrap; - white-space: -moz-pre-wrap; - white-space: -pre-wrap; - white-space: -o-pre-wrap; word-wrap: break-all; } From 4a58e1349cb34844c8f706cdedf902ef66adb8d8 Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Mon, 4 Jul 2022 21:35:06 +0200 Subject: [PATCH 043/415] fix(docker): fix typo scheme -> schema --- docker/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/README.md b/docker/README.md index 89dbb739..81628a39 100644 --- a/docker/README.md +++ b/docker/README.md @@ -46,7 +46,7 @@ container running the FastAPI. Then: ```sh docker exec -it /bin/bash -./scheme/gendummydata.py dummy.sql +./schema/gendummydata.py dummy.sql mysql aurweb < dummy.sql ``` From 0b03a6871e288c837e273cf5577e8d81dc7b44fd Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Mon, 4 Jul 2022 21:35:41 +0200 Subject: [PATCH 044/415] fix(docker): document runtime deps --- docker/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/README.md b/docker/README.md index 81628a39..88fb763e 100644 --- a/docker/README.md +++ b/docker/README.md @@ -46,6 +46,7 @@ container running the FastAPI. Then: ```sh docker exec -it /bin/bash +pacman -S words fortune-mod ./schema/gendummydata.py dummy.sql mysql aurweb < dummy.sql ``` From 034e47bc282a43b661b6d5db2759b4c8ca723a3f Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 19 May 2022 13:13:36 +0100 Subject: [PATCH 045/415] fix: hide Unflag package from non-maintainers Closes: #364 Signed-off-by: Leonidas Spyropoulos --- aurweb/pkgbase/actions.py | 4 ++-- aurweb/pkgbase/util.py | 3 +++ templates/partials/packages/actions.html | 2 ++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index 229d52b9..6fd55497 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -38,8 +38,8 @@ def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None: - has_cred = request.user.has_credential( - creds.PKGBASE_UNFLAG, approved=[pkgbase.Flagger, pkgbase.Maintainer]) + has_cred = request.user.has_credential(creds.PKGBASE_UNFLAG, approved=[ + pkgbase.Flagger, pkgbase.Maintainer] + [c.User for c in pkgbase.comaintainers]) if has_cred: with db.begin(): pkgbase.OutOfDateTS = None diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index ea952dce..55dbb022 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -39,6 +39,9 @@ def make_context(request: Request, pkgbase: PackageBase, PackageComaintainer.Priority.asc() ).all() ] + context["unflaggers"] = context["comaintainers"].copy() + context["unflaggers"].append(pkgbase.Maintainer) + context["packages_count"] = pkgbase.packages.count() context["keywords"] = pkgbase.keywords context["comments"] = pkgbase.comments.order_by( diff --git a/templates/partials/packages/actions.html b/templates/partials/packages/actions.html index 88420222..2144b07a 100644 --- a/templates/partials/packages/actions.html +++ b/templates/partials/packages/actions.html @@ -41,6 +41,7 @@ + {% if request.user.has_credential(creds.PKGBASE_UNFLAG, approved=unflaggers) %}
  • + {% endif %} {% endif %}
  • {% if not voted %} From 28970ccc9179d210781fedabcf42cc04332cd1ec Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sun, 17 Jul 2022 12:19:18 +0100 Subject: [PATCH 046/415] fix: align text on left Closes: #368 Signed-off-by: Leonidas Spyropoulos --- templates/tu/index.html | 4 ++-- web/html/css/aurweb.css | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/templates/tu/index.html b/templates/tu/index.html index 4c7a3c35..9f5bfd50 100644 --- a/templates/tu/index.html +++ b/templates/tu/index.html @@ -6,11 +6,11 @@ - + - + diff --git a/web/html/css/aurweb.css b/web/html/css/aurweb.css index 281b8f59..59f7ed1e 100644 --- a/web/html/css/aurweb.css +++ b/web/html/css/aurweb.css @@ -278,11 +278,6 @@ pre.traceback { word-wrap: break-all; } -/* A text aligning alias. */ -.text-right { - text-align: right; -} - /* By default, tables use 100% width, which we do not always want. */ table.no-width { width: auto; From d6fa4ec5a8d76b6f791bb6d855eb267661baa012 Mon Sep 17 00:00:00 2001 From: Hugo Osvaldo Barrera Date: Tue, 19 Jul 2022 18:29:26 +0200 Subject: [PATCH 047/415] Explain how to populate dummy data for TESTING Signed-off-by: Hugo Osvaldo Barrera --- TESTING | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/TESTING b/TESTING index 776be2f4..cb34c0e9 100644 --- a/TESTING +++ b/TESTING @@ -31,6 +31,16 @@ docker-compose Python: https://localhost:8444/ PHP: https://localhost:8443/ +5) [Optionally] populate the database with dummy data: + + $ docker-compose up mariadb + $ docker-compose exec mariadb /bin/sh + # pacman -S --noconfirm words fortune-mod + # poetry run schema/gendummydata.py dummy_data.sql + # mysql -uaur -paur aurweb < dummy_data.sql + +Inspect `dummy_data.sql` for test credentials. Passwords match usernames. + Bare Metal ---------- From a509e4047483763316d3b06a4dfe3c2004455aff Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Sun, 31 Jul 2022 20:58:39 +0200 Subject: [PATCH 048/415] fix(python): use standard dict/list type annotation Since Python 3.9 list/dict can be used as type hint. --- aurweb/filters.py | 20 +++++----- aurweb/models/package_dependency.py | 4 +- aurweb/models/user.py | 4 +- aurweb/packages/requests.py | 10 ++--- aurweb/packages/util.py | 18 ++++----- aurweb/pkgbase/actions.py | 4 +- aurweb/pkgbase/util.py | 10 ++--- aurweb/pkgbase/validate.py | 4 +- aurweb/prometheus.py | 4 +- aurweb/routers/accounts.py | 4 +- aurweb/routers/packages.py | 24 ++++++------ aurweb/routers/rpc.py | 8 ++-- aurweb/routers/trusted_user.py | 4 +- aurweb/rpc.py | 44 ++++++++++----------- aurweb/scripts/mkpkglists.py | 4 +- aurweb/scripts/popupdate.py | 4 +- aurweb/spawn.py | 10 ++--- aurweb/testing/alpm.py | 4 +- aurweb/testing/html.py | 5 +-- aurweb/testing/requests.py | 6 +-- aurweb/users/update.py | 8 ++-- aurweb/util.py | 4 +- test/test_adduser.py | 3 +- test/test_mkpkglists.py | 7 ++-- test/test_notify.py | 35 +++++++++-------- test/test_packages_routes.py | 31 ++++++++------- test/test_pkgbase_routes.py | 5 +-- test/test_pkgmaint.py | 8 ++-- test/test_requests.py | 11 +++--- test/test_rpc.py | 59 ++++++++++++++--------------- test/test_templates.py | 4 +- 31 files changed, 175 insertions(+), 195 deletions(-) diff --git a/aurweb/filters.py b/aurweb/filters.py index 45cb6d83..22f65024 100644 --- a/aurweb/filters.py +++ b/aurweb/filters.py @@ -2,7 +2,7 @@ import copy import math from datetime import datetime -from typing import Any, Dict, Union +from typing import Any, Union from urllib.parse import quote_plus, urlencode from zoneinfo import ZoneInfo @@ -19,7 +19,7 @@ from aurweb.templates import register_filter, register_function @register_filter("pager_nav") @pass_context -def pager_nav(context: Dict[str, Any], +def pager_nav(context: dict[str, Any], page: int, total: int, prefix: str) -> str: page = int(page) # Make sure this is an int. @@ -71,7 +71,7 @@ def do_round(f: float) -> int: @register_filter("tr") @pass_context -def tr(context: Dict[str, Any], value: str): +def tr(context: dict[str, Any], value: str): """ A translation filter; example: {{ "Hello" | tr("de") }}. """ _ = l10n.get_translator_for_request(context.get("request")) return _(value) @@ -79,7 +79,7 @@ def tr(context: Dict[str, Any], value: str): @register_filter("tn") @pass_context -def tn(context: Dict[str, Any], count: int, +def tn(context: dict[str, Any], count: int, singular: str, plural: str) -> str: """ A singular and plural translation filter. @@ -107,7 +107,7 @@ def as_timezone(dt: datetime, timezone: str): @register_filter("extend_query") -def extend_query(query: Dict[str, Any], *additions) -> Dict[str, Any]: +def extend_query(query: dict[str, Any], *additions) -> dict[str, Any]: """ Add additional key value pairs to query. """ q = copy.copy(query) for k, v in list(additions): @@ -116,7 +116,7 @@ def extend_query(query: Dict[str, Any], *additions) -> Dict[str, Any]: @register_filter("urlencode") -def to_qs(query: Dict[str, Any]) -> str: +def to_qs(query: dict[str, Any]) -> str: return urlencode(query, doseq=True) @@ -134,7 +134,7 @@ def number_format(value: float, places: int): @register_filter("account_url") @pass_context -def account_url(context: Dict[str, Any], +def account_url(context: dict[str, Any], user: "aurweb.models.user.User") -> str: base = aurweb.config.get("options", "aur_location") return f"{base}/account/{user.Username}" @@ -152,7 +152,7 @@ def ceil(*args, **kwargs) -> int: @register_function("date_strftime") @pass_context -def date_strftime(context: Dict[str, Any], dt: Union[int, datetime], fmt: str) \ +def date_strftime(context: dict[str, Any], dt: Union[int, datetime], fmt: str) \ -> str: if isinstance(dt, int): dt = timestamp_to_datetime(dt) @@ -162,11 +162,11 @@ def date_strftime(context: Dict[str, Any], dt: Union[int, datetime], fmt: str) \ @register_function("date_display") @pass_context -def date_display(context: Dict[str, Any], dt: Union[int, datetime]) -> str: +def date_display(context: dict[str, Any], dt: Union[int, datetime]) -> str: return date_strftime(context, dt, "%Y-%m-%d (%Z)") @register_function("datetime_display") @pass_context -def datetime_display(context: Dict[str, Any], dt: Union[int, datetime]) -> str: +def datetime_display(context: dict[str, Any], dt: Union[int, datetime]) -> str: return date_strftime(context, dt, "%Y-%m-%d %H:%M (%Z)") diff --git a/aurweb/models/package_dependency.py b/aurweb/models/package_dependency.py index 2fd87f2a..67a7717f 100644 --- a/aurweb/models/package_dependency.py +++ b/aurweb/models/package_dependency.py @@ -1,5 +1,3 @@ -from typing import List - from sqlalchemy import and_, literal from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import backref, relationship @@ -60,7 +58,7 @@ class PackageDependency(Base): _OfficialProvider.Name == self.DepName).exists() return db.query(pkg).scalar() or db.query(official).scalar() - def provides(self) -> List[PackageRelation]: + def provides(self) -> list[PackageRelation]: from aurweb.models.relation_type import PROVIDES_ID rels = db.query(PackageRelation).join(_Package).filter( diff --git a/aurweb/models/user.py b/aurweb/models/user.py index c375fcbc..3fa72a85 100644 --- a/aurweb/models/user.py +++ b/aurweb/models/user.py @@ -1,6 +1,6 @@ import hashlib -from typing import List, Set +from typing import Set import bcrypt @@ -149,7 +149,7 @@ class User(Base): return self.session.SessionID def has_credential(self, credential: Set[int], - approved: List["User"] = list()): + approved: list["User"] = list()): from aurweb.auth.creds import has_credential return has_credential(self, credential, approved) diff --git a/aurweb/packages/requests.py b/aurweb/packages/requests.py index 6aaa59ab..42026a33 100644 --- a/aurweb/packages/requests.py +++ b/aurweb/packages/requests.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Set +from typing import Optional, Set from fastapi import Request from sqlalchemy import and_, orm @@ -139,7 +139,7 @@ def close_pkgreq(pkgreq: PackageRequest, closer: User, def handle_request(request: Request, reqtype_id: int, pkgbase: PackageBase, - target: PackageBase = None) -> List[notify.Notification]: + target: PackageBase = None) -> list[notify.Notification]: """ Handle package requests before performing an action. @@ -158,7 +158,7 @@ def handle_request(request: Request, reqtype_id: int, :param pkgbase: PackageBase which the request is about :param target: Optional target to merge into """ - notifs: List[notify.Notification] = [] + notifs: list[notify.Notification] = [] # If it's an orphan request, perform further verification # regarding existing requests. @@ -187,13 +187,13 @@ def handle_request(request: Request, reqtype_id: int, PackageRequest.MergeBaseName == target.Name) # Build an accept list out of `accept_query`. - to_accept: List[PackageRequest] = accept_query.all() + to_accept: list[PackageRequest] = accept_query.all() accepted_ids: Set[int] = set(p.ID for p in to_accept) # Build a reject list out of `query` filtered by IDs not found # in `to_accept`. That is, unmatched records of the same base # query properties. - to_reject: List[PackageRequest] = query.filter( + to_reject: list[PackageRequest] = query.filter( ~PackageRequest.ID.in_(accepted_ids) ).all() diff --git a/aurweb/packages/util.py b/aurweb/packages/util.py index 5085ddf4..bd173065 100644 --- a/aurweb/packages/util.py +++ b/aurweb/packages/util.py @@ -1,6 +1,6 @@ from collections import defaultdict from http import HTTPStatus -from typing import Dict, List, Tuple, Union +from typing import Tuple, Union import orjson @@ -15,7 +15,7 @@ from aurweb.models.package_relation import PackageRelation from aurweb.redis import redis_connection from aurweb.templates import register_filter -Providers = List[Union[PackageRelation, OfficialProvider]] +Providers = list[Union[PackageRelation, OfficialProvider]] def dep_extra_with_arch(dep: models.PackageDependency, annotation: str) -> str: @@ -123,7 +123,7 @@ def out_of_date(packages: orm.Query) -> orm.Query: def updated_packages(limit: int = 0, - cache_ttl: int = 600) -> List[models.Package]: + cache_ttl: int = 600) -> list[models.Package]: """ Return a list of valid Package objects ordered by their ModifiedTS column in descending order from cache, after setting the cache when no key yet exists. @@ -168,8 +168,8 @@ def updated_packages(limit: int = 0, return packages -def query_voted(query: List[models.Package], - user: models.User) -> Dict[int, bool]: +def query_voted(query: list[models.Package], + user: models.User) -> dict[int, bool]: """ Produce a dictionary of package base ID keys to boolean values, which indicate whether or not the package base has a vote record related to user. @@ -191,8 +191,8 @@ def query_voted(query: List[models.Package], return output -def query_notified(query: List[models.Package], - user: models.User) -> Dict[int, bool]: +def query_notified(query: list[models.Package], + user: models.User) -> dict[int, bool]: """ Produce a dictionary of package base ID keys to boolean values, which indicate whether or not the package base has a notification record related to user. @@ -214,8 +214,8 @@ def query_notified(query: List[models.Package], return output -def pkg_required(pkgname: str, provides: List[str]) \ - -> List[PackageDependency]: +def pkg_required(pkgname: str, provides: list[str]) \ + -> list[PackageDependency]: """ Get dependencies that match a string in `[pkgname] + provides`. diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index 6fd55497..46609f89 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -1,5 +1,3 @@ -from typing import List - from fastapi import Request from aurweb import db, logging, util @@ -86,7 +84,7 @@ def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_delete_instance(request: Request, pkgbase: PackageBase, comments: str = str()) \ - -> List[notify.Notification]: + -> list[notify.Notification]: notifs = handle_request(request, DELETION_ID, pkgbase) + [ notify.DeleteNotification(request.user.ID, pkgbase.ID) ] diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 55dbb022..5a7d952a 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any from fastapi import Request from sqlalchemy import and_ @@ -15,13 +15,13 @@ from aurweb.templates import make_variable_context as _make_variable_context async def make_variable_context(request: Request, pkgbase: PackageBase) \ - -> Dict[str, Any]: + -> dict[str, Any]: ctx = await _make_variable_context(request, pkgbase.Name) return make_context(request, pkgbase, ctx) def make_context(request: Request, pkgbase: PackageBase, - context: Dict[str, Any] = None) -> Dict[str, Any]: + context: dict[str, Any] = None) -> dict[str, Any]: """ Make a basic context for package or pkgbase. :param request: FastAPI request @@ -89,7 +89,7 @@ def remove_comaintainer(comaint: PackageComaintainer) \ return notif -def remove_comaintainers(pkgbase: PackageBase, usernames: List[str]) -> None: +def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None: """ Remove comaintainers from `pkgbase`. @@ -163,7 +163,7 @@ def add_comaintainer(pkgbase: PackageBase, comaintainer: User) \ def add_comaintainers(request: Request, pkgbase: PackageBase, - usernames: List[str]) -> None: + usernames: list[str]) -> None: """ Add comaintainers to `pkgbase`. diff --git a/aurweb/pkgbase/validate.py b/aurweb/pkgbase/validate.py index 8d05a3d7..baefc415 100644 --- a/aurweb/pkgbase/validate.py +++ b/aurweb/pkgbase/validate.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from aurweb import db from aurweb.exceptions import ValidationError @@ -7,7 +7,7 @@ from aurweb.models import PackageBase def request(pkgbase: PackageBase, type: str, comments: str, merge_into: str, - context: Dict[str, Any]) -> None: + context: dict[str, Any]) -> None: if not comments: raise ValidationError(["The comment field must not be empty."]) diff --git a/aurweb/prometheus.py b/aurweb/prometheus.py index 272ee023..227d46ed 100644 --- a/aurweb/prometheus.py +++ b/aurweb/prometheus.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Optional from prometheus_client import Counter from prometheus_fastapi_instrumentator import Instrumentator @@ -19,7 +19,7 @@ def instrumentator(): # Their license is included in LICENSES/starlette_exporter. # The code has been modified to remove child route checks # (since we don't have any) and to stay within an 80-width limit. -def get_matching_route_path(scope: Dict[Any, Any], routes: List[Route], +def get_matching_route_path(scope: dict[Any, Any], routes: list[Route], route_name: Optional[str] = None) -> str: """ Find a matching route and return its original path string diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index b603d22a..dcac72b0 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -2,7 +2,7 @@ import copy import typing from http import HTTPStatus -from typing import Any, Dict +from typing import Any from fastapi import APIRouter, Form, Request from fastapi.responses import HTMLResponse, RedirectResponse @@ -108,7 +108,7 @@ async def passreset_post(request: Request, def process_account_form(request: Request, user: models.User, - args: Dict[str, Any]): + args: dict[str, Any]): """ Process an account form. All fields are optional and only checks requirements in the case they are present. diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index f14b0ad8..7bf4e3d4 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -1,6 +1,6 @@ from collections import defaultdict from http import HTTPStatus -from typing import Any, Dict, List +from typing import Any from fastapi import APIRouter, Form, Query, Request, Response @@ -21,7 +21,7 @@ logger = logging.get_logger(__name__) router = APIRouter() -async def packages_get(request: Request, context: Dict[str, Any], +async def packages_get(request: Request, context: dict[str, Any], status_code: HTTPStatus = HTTPStatus.OK): # Query parameters used in this request. context["q"] = dict(request.query_params) @@ -210,7 +210,7 @@ async def package(request: Request, name: str, return render_template(request, "packages/show.html", context) -async def packages_unflag(request: Request, package_ids: List[int] = [], +async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs): if not package_ids: return (False, ["You did not select any packages to unflag."]) @@ -236,7 +236,7 @@ async def packages_unflag(request: Request, package_ids: List[int] = [], return (True, ["The selected packages have been unflagged."]) -async def packages_notify(request: Request, package_ids: List[int] = [], +async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs): # In cases where we encounter errors with the request, we'll # use this error tuple as a return value. @@ -275,7 +275,7 @@ async def packages_notify(request: Request, package_ids: List[int] = [], return (True, ["The selected packages' notifications have been enabled."]) -async def packages_unnotify(request: Request, package_ids: List[int] = [], +async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs): if not package_ids: # TODO: This error does not yet have a translation. @@ -312,7 +312,7 @@ async def packages_unnotify(request: Request, package_ids: List[int] = [], return (True, ["The selected packages' notifications have been removed."]) -async def packages_adopt(request: Request, package_ids: List[int] = [], +async def packages_adopt(request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs): if not package_ids: return (False, ["You did not select any packages to adopt."]) @@ -345,8 +345,8 @@ async def packages_adopt(request: Request, package_ids: List[int] = [], return (True, ["The selected packages have been adopted."]) -def disown_all(request: Request, pkgbases: List[models.PackageBase]) \ - -> List[str]: +def disown_all(request: Request, pkgbases: list[models.PackageBase]) \ + -> list[str]: errors = [] for pkgbase in pkgbases: try: @@ -356,7 +356,7 @@ def disown_all(request: Request, pkgbases: List[models.PackageBase]) \ return errors -async def packages_disown(request: Request, package_ids: List[int] = [], +async def packages_disown(request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs): if not package_ids: return (False, ["You did not select any packages to disown."]) @@ -390,7 +390,7 @@ async def packages_disown(request: Request, package_ids: List[int] = [], return (True, ["The selected packages have been disowned."]) -async def packages_delete(request: Request, package_ids: List[int] = [], +async def packages_delete(request: Request, package_ids: list[int] = [], confirm: bool = False, merge_into: str = str(), **kwargs): if not package_ids: @@ -430,7 +430,7 @@ async def packages_delete(request: Request, package_ids: List[int] = [], # A mapping of action string -> callback functions used within the # `packages_post` route below. We expect any action callback to -# return a tuple in the format: (succeeded: bool, message: List[str]). +# return a tuple in the format: (succeeded: bool, message: list[str]). PACKAGE_ACTIONS = { "unflag": packages_unflag, "notify": packages_notify, @@ -445,7 +445,7 @@ PACKAGE_ACTIONS = { @handle_form_exceptions @requires_auth async def packages_post(request: Request, - IDs: List[int] = Form(default=[]), + IDs: list[int] = Form(default=[]), action: str = Form(default=str()), confirm: bool = Form(default=False)): diff --git a/aurweb/routers/rpc.py b/aurweb/routers/rpc.py index 49e98f8c..ff58063f 100644 --- a/aurweb/routers/rpc.py +++ b/aurweb/routers/rpc.py @@ -2,7 +2,7 @@ import hashlib import re from http import HTTPStatus -from typing import List, Optional +from typing import Optional from urllib.parse import unquote import orjson @@ -71,7 +71,7 @@ async def rpc_request(request: Request, type: Optional[str] = None, by: Optional[str] = defaults.RPC_SEARCH_BY, arg: Optional[str] = None, - args: Optional[List[str]] = [], + args: Optional[list[str]] = [], callback: Optional[str] = None): # Create a handle to our RPC class. @@ -140,7 +140,7 @@ async def rpc(request: Request, type: Optional[str] = Query(default=None), by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), arg: Optional[str] = Query(default=None), - args: Optional[List[str]] = Query(default=[], alias="arg[]"), + args: Optional[list[str]] = Query(default=[], alias="arg[]"), callback: Optional[str] = Query(default=None)): if not request.url.query: return documentation() @@ -157,6 +157,6 @@ async def rpc_post(request: Request, type: Optional[str] = Form(default=None), by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY), arg: Optional[str] = Form(default=None), - args: Optional[List[str]] = Form(default=[], alias="arg[]"), + args: Optional[list[str]] = Form(default=[], alias="arg[]"), callback: Optional[str] = Form(default=None)): return await rpc_request(request, v, type, by, arg, args, callback) diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index 3f0eb836..e1267409 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -2,7 +2,7 @@ import html import typing from http import HTTPStatus -from typing import Any, Dict +from typing import Any from fastapi import APIRouter, Form, HTTPException, Request from fastapi.responses import RedirectResponse, Response @@ -34,7 +34,7 @@ ADDVOTE_SPECIFICS = { } -def populate_trusted_user_counts(context: Dict[str, Any]) -> None: +def populate_trusted_user_counts(context: dict[str, Any]) -> None: tu_query = db.query(User).filter( or_(User.AccountTypeID == TRUSTED_USER_ID, User.AccountTypeID == TRUSTED_USER_AND_DEV_ID) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 5bc6b80d..f04de7d6 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -1,7 +1,7 @@ import os from collections import defaultdict -from typing import Any, Callable, Dict, List, NewType, Union +from typing import Any, Callable, NewType, Union from fastapi.responses import HTMLResponse from sqlalchemy import and_, literal, orm @@ -24,7 +24,7 @@ TYPE_MAPPING = { } DataGenerator = NewType("DataGenerator", - Callable[[models.Package], Dict[str, Any]]) + Callable[[models.Package], dict[str, Any]]) def documentation(): @@ -86,7 +86,7 @@ class RPC: self.version = version self.type = RPC.TYPE_ALIASES.get(type, type) - def error(self, message: str) -> Dict[str, Any]: + def error(self, message: str) -> dict[str, Any]: return { "version": self.version, "results": [], @@ -95,7 +95,7 @@ class RPC: "error": message } - def _verify_inputs(self, by: str = [], args: List[str] = []) -> None: + def _verify_inputs(self, by: str = [], args: list[str] = []) -> None: if self.version is None: raise RPCError("Please specify an API version.") @@ -111,11 +111,11 @@ class RPC: if self.type not in RPC.EXPOSED_TYPES: raise RPCError("Incorrect request type specified.") - def _enforce_args(self, args: List[str]) -> None: + def _enforce_args(self, args: list[str]) -> None: if not args: raise RPCError("No request type/data specified.") - def _get_json_data(self, package: models.Package) -> Dict[str, Any]: + def _get_json_data(self, package: models.Package) -> dict[str, Any]: """ Produce dictionary data of one Package that can be JSON-serialized. :param package: Package instance @@ -146,7 +146,7 @@ class RPC: "LastModified": package.ModifiedTS } - def _get_info_json_data(self, package: models.Package) -> Dict[str, Any]: + def _get_info_json_data(self, package: models.Package) -> dict[str, Any]: data = self._get_json_data(package) # All info results have _at least_ an empty list of @@ -163,9 +163,9 @@ class RPC: return data - def _assemble_json_data(self, packages: List[models.Package], + def _assemble_json_data(self, packages: list[models.Package], data_generator: DataGenerator) \ - -> List[Dict[str, Any]]: + -> list[dict[str, Any]]: """ Assemble JSON data out of a list of packages. @@ -192,8 +192,8 @@ class RPC: models.User.Username.label("Maintainer"), ).group_by(models.Package.ID) - def _handle_multiinfo_type(self, args: List[str] = [], **kwargs) \ - -> List[Dict[str, Any]]: + def _handle_multiinfo_type(self, args: list[str] = [], **kwargs) \ + -> list[dict[str, Any]]: self._enforce_args(args) args = set(args) @@ -296,7 +296,7 @@ class RPC: return self._assemble_json_data(packages, self._get_info_json_data) def _handle_search_type(self, by: str = defaults.RPC_SEARCH_BY, - args: List[str] = []) -> List[Dict[str, Any]]: + args: list[str] = []) -> list[dict[str, Any]]: # If `by` isn't maintainer and we don't have any args, raise an error. # In maintainer's case, return all orphans if there are no args, # so we need args to pass through to the handler without errors. @@ -318,12 +318,12 @@ class RPC: return self._assemble_json_data(results, self._get_json_data) - def _handle_msearch_type(self, args: List[str] = [], **kwargs)\ - -> List[Dict[str, Any]]: + def _handle_msearch_type(self, args: list[str] = [], **kwargs)\ + -> list[dict[str, Any]]: return self._handle_search_type(by="m", args=args) - def _handle_suggest_type(self, args: List[str] = [], **kwargs)\ - -> List[str]: + def _handle_suggest_type(self, args: list[str] = [], **kwargs)\ + -> list[str]: if not args: return [] @@ -336,8 +336,8 @@ class RPC: ).order_by(models.Package.Name.asc()).limit(20) return [pkg.Name for pkg in packages] - def _handle_suggest_pkgbase_type(self, args: List[str] = [], **kwargs)\ - -> List[str]: + def _handle_suggest_pkgbase_type(self, args: list[str] = [], **kwargs)\ + -> list[str]: if not args: return [] @@ -351,16 +351,16 @@ class RPC: def _is_suggestion(self) -> bool: return self.type.startswith("suggest") - def _handle_callback(self, by: str, args: List[str])\ - -> Union[List[Dict[str, Any]], List[str]]: + def _handle_callback(self, by: str, args: list[str])\ + -> Union[list[dict[str, Any]], list[str]]: # Get a handle to our callback and trap an RPCError with # an empty list of results based on callback's execution. callback = getattr(self, f"_handle_{self.type.replace('-', '_')}_type") results = callback(by=by, args=args) return results - def handle(self, by: str = defaults.RPC_SEARCH_BY, args: List[str] = [])\ - -> Union[List[Dict[str, Any]], Dict[str, Any]]: + def handle(self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = [])\ + -> Union[list[dict[str, Any]], dict[str, Any]]: """ Request entrypoint. A router should pass v, type and args to this function and expect an output dictionary to be returned. diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 00096d74..888e346c 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -27,7 +27,7 @@ import sys import tempfile from collections import defaultdict -from typing import Any, Dict +from typing import Any import orjson @@ -151,7 +151,7 @@ EXTENDED_FIELD_HANDLERS = { } -def as_dict(package: Package) -> Dict[str, Any]: +def as_dict(package: Package) -> dict[str, Any]: return { "ID": package.ID, "Name": package.Name, diff --git a/aurweb/scripts/popupdate.py b/aurweb/scripts/popupdate.py index a2a796fd..637173eb 100755 --- a/aurweb/scripts/popupdate.py +++ b/aurweb/scripts/popupdate.py @@ -1,7 +1,5 @@ #!/usr/bin/env python3 -from typing import List - from sqlalchemy import and_, func from sqlalchemy.sql.functions import coalesce from sqlalchemy.sql.functions import sum as _sum @@ -10,7 +8,7 @@ from aurweb import db, time from aurweb.models import PackageBase, PackageVote -def run_variable(pkgbases: List[PackageBase] = []) -> None: +def run_variable(pkgbases: list[PackageBase] = []) -> None: """ Update popularity on a list of PackageBases. diff --git a/aurweb/spawn.py b/aurweb/spawn.py index 46f2f021..c7d54c4e 100644 --- a/aurweb/spawn.py +++ b/aurweb/spawn.py @@ -17,7 +17,7 @@ import sys import tempfile import time -from typing import Iterable, List +from typing import Iterable import aurweb.config import aurweb.schema @@ -204,8 +204,8 @@ def start(): """) -def _kill_children(children: Iterable, exceptions: List[Exception] = []) \ - -> List[Exception]: +def _kill_children(children: Iterable, exceptions: list[Exception] = []) \ + -> list[Exception]: """ Kill each process found in `children`. @@ -223,8 +223,8 @@ def _kill_children(children: Iterable, exceptions: List[Exception] = []) \ return exceptions -def _wait_for_children(children: Iterable, exceptions: List[Exception] = []) \ - -> List[Exception]: +def _wait_for_children(children: Iterable, exceptions: list[Exception] = []) \ + -> list[Exception]: """ Wait for each process to end found in `children`. diff --git a/aurweb/testing/alpm.py b/aurweb/testing/alpm.py index 6015d859..ce30d042 100644 --- a/aurweb/testing/alpm.py +++ b/aurweb/testing/alpm.py @@ -4,8 +4,6 @@ import re import shutil import subprocess -from typing import List - from aurweb import logging, util from aurweb.templates import base_template @@ -38,7 +36,7 @@ class AlpmDatabase: return pkgdir def add(self, pkgname: str, pkgver: str, arch: str, - provides: List[str] = []) -> None: + provides: list[str] = []) -> None: context = { "pkgname": pkgname, "pkgver": pkgver, diff --git a/aurweb/testing/html.py b/aurweb/testing/html.py index f01aaf3d..8c923438 100644 --- a/aurweb/testing/html.py +++ b/aurweb/testing/html.py @@ -1,5 +1,4 @@ from io import StringIO -from typing import List from lxml import etree @@ -15,11 +14,11 @@ def parse_root(html: str) -> etree.Element: return etree.parse(StringIO(html), parser) -def get_errors(content: str) -> List[etree._Element]: +def get_errors(content: str) -> list[etree._Element]: root = parse_root(content) return root.xpath('//ul[@class="errorlist"]/li') -def get_successes(content: str) -> List[etree._Element]: +def get_successes(content: str) -> list[etree._Element]: root = parse_root(content) return root.xpath('//ul[@class="success"]/li') diff --git a/aurweb/testing/requests.py b/aurweb/testing/requests.py index be13ab77..c97d1532 100644 --- a/aurweb/testing/requests.py +++ b/aurweb/testing/requests.py @@ -1,5 +1,3 @@ -from typing import Dict - import aurweb.config @@ -35,8 +33,8 @@ class Request: user: User = User(), authenticated: bool = False, method: str = "GET", - headers: Dict[str, str] = dict(), - cookies: Dict[str, str] = dict()) -> "Request": + headers: dict[str, str] = dict(), + cookies: dict[str, str] = dict()) -> "Request": self.user = user self.user.authenticated = authenticated diff --git a/aurweb/users/update.py b/aurweb/users/update.py index 5a32fd01..ffea1f2f 100644 --- a/aurweb/users/update.py +++ b/aurweb/users/update.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from fastapi import Request @@ -34,7 +34,7 @@ def simple(U: str = str(), E: str = str(), H: bool = False, def language(L: str = str(), request: Request = None, user: models.User = None, - context: Dict[str, Any] = {}, + context: dict[str, Any] = {}, **kwargs) -> None: if L and L != user.LangPreference: with db.begin(): @@ -45,7 +45,7 @@ def language(L: str = str(), def timezone(TZ: str = str(), request: Request = None, user: models.User = None, - context: Dict[str, Any] = {}, + context: dict[str, Any] = {}, **kwargs) -> None: if TZ and TZ != user.Timezone: with db.begin(): @@ -95,7 +95,7 @@ def account_type(T: int = None, def password(P: str = str(), request: Request = None, user: models.User = None, - context: Dict[str, Any] = {}, + context: dict[str, Any] = {}, **kwargs) -> None: if P and not user.valid_password(P): # Remove the fields we consumed for passwords. diff --git a/aurweb/util.py b/aurweb/util.py index 5138f7da..8291b578 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -6,7 +6,7 @@ import string from datetime import datetime from http import HTTPStatus from subprocess import PIPE, Popen -from typing import Callable, Iterable, List, Tuple, Union +from typing import Callable, Iterable, Tuple, Union from urllib.parse import urlparse import fastapi @@ -194,6 +194,6 @@ def parse_ssh_key(string: str) -> Tuple[str, str]: return (prefix, key) -def parse_ssh_keys(string: str) -> List[Tuple[str, str]]: +def parse_ssh_keys(string: str) -> list[Tuple[str, str]]: """ Parse a list of SSH public keys. """ return [parse_ssh_key(e) for e in string.splitlines()] diff --git a/test/test_adduser.py b/test/test_adduser.py index c6210e74..65968d40 100644 --- a/test/test_adduser.py +++ b/test/test_adduser.py @@ -1,4 +1,3 @@ -from typing import List from unittest import mock import pytest @@ -21,7 +20,7 @@ def setup(db_test): return -def run_main(args: List[str] = []): +def run_main(args: list[str] = []): with mock.patch("sys.argv", ["aurweb-adduser"] + args): adduser.main() diff --git a/test/test_mkpkglists.py b/test/test_mkpkglists.py index 7b538e02..9bc1073b 100644 --- a/test/test_mkpkglists.py +++ b/test/test_mkpkglists.py @@ -2,7 +2,6 @@ import gzip import json import os -from typing import List from unittest import mock import py @@ -47,7 +46,7 @@ def user() -> User: @pytest.fixture -def packages(user: User) -> List[Package]: +def packages(user: User) -> list[Package]: output = [] with db.begin(): lic = db.create(License, Name="GPL") @@ -89,7 +88,7 @@ def config_mock(tmpdir: py.path.local) -> None: config.rehash() -def test_mkpkglists(tmpdir: py.path.local, config_mock: None, user: User, packages: List[Package]): +def test_mkpkglists(tmpdir: py.path.local, config_mock: None, user: User, packages: list[Package]): from aurweb.scripts import mkpkglists mkpkglists.main() @@ -168,7 +167,7 @@ def test_mkpkglists_extended_empty(config_mock: None): @mock.patch("sys.argv", ["mkpkglists", "--extended"]) def test_mkpkglists_extended(config_mock: None, user: User, - packages: List[Package]): + packages: list[Package]): from aurweb.scripts import mkpkglists mkpkglists.main() diff --git a/test/test_notify.py b/test/test_notify.py index fdec5ed7..bbcc6b5a 100644 --- a/test/test_notify.py +++ b/test/test_notify.py @@ -1,5 +1,4 @@ from logging import ERROR -from typing import List from unittest import mock import pytest @@ -46,7 +45,7 @@ def user2() -> User: @pytest.fixture -def pkgbases(user: User) -> List[PackageBase]: +def pkgbases(user: User) -> list[PackageBase]: now = time.utcnow() output = [] @@ -62,7 +61,7 @@ def pkgbases(user: User) -> List[PackageBase]: @pytest.fixture -def pkgreq(user2: User, pkgbases: List[PackageBase]): +def pkgreq(user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): pkgreq_ = db.create(PackageRequest, PackageBase=pkgbase, @@ -74,7 +73,7 @@ def pkgreq(user2: User, pkgbases: List[PackageBase]): @pytest.fixture -def packages(pkgbases: List[PackageBase]) -> List[Package]: +def packages(pkgbases: list[PackageBase]) -> list[Package]: output = [] with db.begin(): for i, pkgbase in enumerate(pkgbases): @@ -85,7 +84,7 @@ def packages(pkgbases: List[PackageBase]) -> List[Package]: def test_out_of_date(user: User, user1: User, user2: User, - pkgbases: List[PackageBase]): + pkgbases: list[PackageBase]): pkgbase = pkgbases[0] # Create two comaintainers. We'll pass the maintainer uid to # FlagNotification, so we should expect to get two emails. @@ -162,7 +161,7 @@ link does not work, try copying and pasting it into your browser. assert email.body == expected -def test_comment(user: User, user2: User, pkgbases: List[PackageBase]): +def test_comment(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): @@ -194,7 +193,7 @@ please go to the package page [2] and select "Disable notifications". assert expected == email.body -def test_update(user: User, user2: User, pkgbases: List[PackageBase]): +def test_update(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): user.UpdateNotify = 1 @@ -221,7 +220,7 @@ please go to the package page [2] and select "Disable notifications". assert expected == email.body -def test_adopt(user: User, user2: User, pkgbases: List[PackageBase]): +def test_adopt(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] notif = notify.AdoptNotification(user2.ID, pkgbase.ID) notif.send() @@ -241,7 +240,7 @@ The package {pkgbase.Name} [1] was adopted by {user2.Username} [2]. assert email.body == expected -def test_disown(user: User, user2: User, pkgbases: List[PackageBase]): +def test_disown(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] notif = notify.DisownNotification(user2.ID, pkgbase.ID) notif.send() @@ -261,7 +260,7 @@ The package {pkgbase.Name} [1] was disowned by {user2.Username} [2]. assert email.body == expected -def test_comaintainer_addition(user: User, pkgbases: List[PackageBase]): +def test_comaintainer_addition(user: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] notif = notify.ComaintainerAddNotification(user.ID, pkgbase.ID) notif.send() @@ -280,7 +279,7 @@ You were added to the co-maintainer list of {pkgbase.Name} [1]. assert email.body == expected -def test_comaintainer_removal(user: User, pkgbases: List[PackageBase]): +def test_comaintainer_removal(user: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] notif = notify.ComaintainerRemoveNotification(user.ID, pkgbase.ID) notif.send() @@ -299,7 +298,7 @@ You were removed from the co-maintainer list of {pkgbase.Name} [1]. assert email.body == expected -def test_suspended_ownership_change(user: User, pkgbases: List[PackageBase]): +def test_suspended_ownership_change(user: User, pkgbases: list[PackageBase]): with db.begin(): user.Suspended = 1 @@ -314,7 +313,7 @@ def test_suspended_ownership_change(user: User, pkgbases: List[PackageBase]): assert Email.count() == 1 -def test_delete(user: User, user2: User, pkgbases: List[PackageBase]): +def test_delete(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] notif = notify.DeleteNotification(user2.ID, pkgbase.ID) notif.send() @@ -336,7 +335,7 @@ You will no longer receive notifications about this package. assert email.body == expected -def test_merge(user: User, user2: User, pkgbases: List[PackageBase]): +def test_merge(user: User, user2: User, pkgbases: list[PackageBase]): source, target = pkgbases[:2] notif = notify.DeleteNotification(user2.ID, source.ID, target.ID) notif.send() @@ -361,7 +360,7 @@ please go to [3] and click "Disable notifications". assert email.body == expected -def set_tu(users: List[User]) -> User: +def set_tu(users: list[User]) -> User: with db.begin(): for user in users: user.AccountTypeID = TRUSTED_USER_ID @@ -369,7 +368,7 @@ def set_tu(users: List[User]) -> User: def test_open_close_request(user: User, user2: User, pkgreq: PackageRequest, - pkgbases: List[PackageBase]): + pkgbases: list[PackageBase]): set_tu([user]) pkgbase = pkgbases[0] @@ -432,7 +431,7 @@ Request #{pkgreq.ID} has been rejected by {user2.Username} [1]. def test_close_request_comaintainer_cc(user: User, user2: User, pkgreq: PackageRequest, - pkgbases: List[PackageBase]): + pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): db.create(models.PackageComaintainer, PackageBase=pkgbase, @@ -449,7 +448,7 @@ def test_close_request_comaintainer_cc(user: User, user2: User, def test_close_request_closure_comment(user: User, user2: User, pkgreq: PackageRequest, - pkgbases: List[PackageBase]): + pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): pkgreq.ClosureComment = "This is a test closure comment." diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index e4c992af..62f89e23 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -1,7 +1,6 @@ import re from http import HTTPStatus -from typing import List from unittest import mock import pytest @@ -177,7 +176,7 @@ def comment(user: User, package: Package) -> PackageComment: @pytest.fixture -def packages(maintainer: User) -> List[Package]: +def packages(maintainer: User) -> list[Package]: """ Yield 55 packages named pkg_0 .. pkg_54. """ packages_ = [] now = time.utcnow() @@ -521,7 +520,7 @@ def test_package_dependencies(client: TestClient, maintainer: User, assert broken_node.text.strip() == broken_dep.DepName -def test_packages(client: TestClient, packages: List[Package]): +def test_packages(client: TestClient, packages: list[Package]): with client as request: response = request.get("/packages", params={ "SeB": "X", # "X" isn't valid, defaults to "nd" @@ -550,7 +549,7 @@ def test_packages_empty(client: TestClient): assert results[0].text.strip() == expected -def test_packages_search_by_name(client: TestClient, packages: List[Package]): +def test_packages_search_by_name(client: TestClient, packages: list[Package]): with client as request: response = request.get("/packages", params={ "SeB": "n", @@ -565,7 +564,7 @@ def test_packages_search_by_name(client: TestClient, packages: List[Package]): def test_packages_search_by_exact_name(client: TestClient, - packages: List[Package]): + packages: list[Package]): with client as request: response = request.get("/packages", params={ "SeB": "N", @@ -594,7 +593,7 @@ def test_packages_search_by_exact_name(client: TestClient, def test_packages_search_by_pkgbase(client: TestClient, - packages: List[Package]): + packages: list[Package]): with client as request: response = request.get("/packages", params={ "SeB": "b", @@ -609,7 +608,7 @@ def test_packages_search_by_pkgbase(client: TestClient, def test_packages_search_by_exact_pkgbase(client: TestClient, - packages: List[Package]): + packages: list[Package]): with client as request: response = request.get("/packages", params={ "SeB": "B", @@ -634,7 +633,7 @@ def test_packages_search_by_exact_pkgbase(client: TestClient, def test_packages_search_by_keywords(client: TestClient, - packages: List[Package]): + packages: list[Package]): # None of our packages have keywords, so this query should return nothing. with client as request: response = request.get("/packages", params={ @@ -791,7 +790,7 @@ def test_packages_search_by_submitter(client: TestClient, assert len(rows) == 1 -def test_packages_sort_by_name(client: TestClient, packages: List[Package]): +def test_packages_sort_by_name(client: TestClient, packages: list[Package]): with client as request: response = request.get("/packages", params={ "SB": "n", # Name @@ -820,7 +819,7 @@ def test_packages_sort_by_name(client: TestClient, packages: List[Package]): def test_packages_sort_by_votes(client: TestClient, maintainer: User, - packages: List[Package]): + packages: list[Package]): # Set the first package's NumVotes to 1. with db.begin(): packages[0].PackageBase.NumVotes = 1 @@ -855,7 +854,7 @@ def test_packages_sort_by_votes(client: TestClient, def test_packages_sort_by_popularity(client: TestClient, maintainer: User, - packages: List[Package]): + packages: list[Package]): # Set the first package's Popularity to 0.50. with db.begin(): packages[0].PackageBase.Popularity = "0.50" @@ -875,7 +874,7 @@ def test_packages_sort_by_popularity(client: TestClient, def test_packages_sort_by_voted(client: TestClient, maintainer: User, - packages: List[Package]): + packages: list[Package]): now = time.utcnow() with db.begin(): db.create(PackageVote, PackageBase=packages[0].PackageBase, @@ -902,7 +901,7 @@ def test_packages_sort_by_voted(client: TestClient, def test_packages_sort_by_notify(client: TestClient, maintainer: User, - packages: List[Package]): + packages: list[Package]): db.create(PackageNotification, PackageBase=packages[0].PackageBase, User=maintainer) @@ -970,7 +969,7 @@ def test_packages_sort_by_maintainer(client: TestClient, def test_packages_sort_by_last_modified(client: TestClient, - packages: List[Package]): + packages: list[Package]): now = time.utcnow() # Set the first package's ModifiedTS to be 1000 seconds before now. package = packages[0] @@ -996,7 +995,7 @@ def test_packages_sort_by_last_modified(client: TestClient, def test_packages_flagged(client: TestClient, maintainer: User, - packages: List[Package]): + packages: list[Package]): package = packages[0] now = time.utcnow() @@ -1029,7 +1028,7 @@ def test_packages_flagged(client: TestClient, maintainer: User, assert len(rows) == 50 -def test_packages_orphans(client: TestClient, packages: List[Package]): +def test_packages_orphans(client: TestClient, packages: list[Package]): package = packages[0] with db.begin(): package.PackageBase.Maintainer = None diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index 5edae592..3468656e 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -1,7 +1,6 @@ import re from http import HTTPStatus -from typing import List from unittest import mock import pytest @@ -176,7 +175,7 @@ def comment(user: User, package: Package) -> PackageComment: @pytest.fixture -def packages(maintainer: User) -> List[Package]: +def packages(maintainer: User) -> list[Package]: """ Yield 55 packages named pkg_0 .. pkg_54. """ packages_ = [] now = time.utcnow() @@ -197,7 +196,7 @@ def packages(maintainer: User) -> List[Package]: @pytest.fixture -def requests(user: User, packages: List[Package]) -> List[PackageRequest]: +def requests(user: User, packages: list[Package]) -> list[PackageRequest]: pkgreqs = [] deletion_type = db.query(RequestType).filter( RequestType.ID == DELETION_ID diff --git a/test/test_pkgmaint.py b/test/test_pkgmaint.py index 5d6a56de..da758c22 100644 --- a/test/test_pkgmaint.py +++ b/test/test_pkgmaint.py @@ -1,5 +1,3 @@ -from typing import List - import pytest from aurweb import db, time @@ -22,7 +20,7 @@ def user() -> User: @pytest.fixture -def packages(user: User) -> List[Package]: +def packages(user: User) -> list[Package]: output = [] now = time.utcnow() @@ -37,14 +35,14 @@ def packages(user: User) -> List[Package]: yield output -def test_pkgmaint_noop(packages: List[Package]): +def test_pkgmaint_noop(packages: list[Package]): assert len(packages) == 5 pkgmaint.main() packages = db.query(Package).all() assert len(packages) == 5 -def test_pkgmaint(packages: List[Package]): +def test_pkgmaint(packages: list[Package]): assert len(packages) == 5 # Modify the first package so it's out of date and gets deleted. diff --git a/test/test_requests.py b/test/test_requests.py index 5ac558e0..b7ab3835 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -2,7 +2,6 @@ import re from http import HTTPStatus from logging import DEBUG -from typing import List import pytest @@ -91,7 +90,7 @@ def maintainer() -> User: @pytest.fixture -def packages(maintainer: User) -> List[Package]: +def packages(maintainer: User) -> list[Package]: """ Yield 55 packages named pkg_0 .. pkg_54. """ packages_ = [] now = time.utcnow() @@ -112,7 +111,7 @@ def packages(maintainer: User) -> List[Package]: @pytest.fixture -def requests(user: User, packages: List[Package]) -> List[PackageRequest]: +def requests(user: User, packages: list[Package]) -> list[PackageRequest]: pkgreqs = [] with db.begin(): for i in range(55): @@ -660,8 +659,8 @@ def test_requests_unauthorized(client: TestClient): def test_requests(client: TestClient, tu_user: User, - packages: List[Package], - requests: List[PackageRequest]): + packages: list[Package], + requests: list[PackageRequest]): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: resp = request.get("/requests", params={ @@ -697,7 +696,7 @@ def test_requests(client: TestClient, def test_requests_selfmade(client: TestClient, user: User, - requests: List[PackageRequest]): + requests: list[PackageRequest]): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get("/requests", cookies=cookies) diff --git a/test/test_rpc.py b/test/test_rpc.py index 2f7f7860..0e24467a 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -1,7 +1,6 @@ import re from http import HTTPStatus -from typing import List from unittest import mock import orjson @@ -62,7 +61,7 @@ def user3() -> User: @pytest.fixture -def packages(user: User, user2: User, user3: User) -> List[Package]: +def packages(user: User, user2: User, user3: User) -> list[Package]: output = [] # Create package records used in our tests. @@ -123,7 +122,7 @@ def packages(user: User, user2: User, user3: User) -> List[Package]: @pytest.fixture -def depends(packages: List[Package]) -> List[PackageDependency]: +def depends(packages: list[Package]) -> list[PackageDependency]: output = [] with db.begin(): @@ -162,7 +161,7 @@ def depends(packages: List[Package]) -> List[PackageDependency]: @pytest.fixture -def relations(user: User, packages: List[Package]) -> List[PackageRelation]: +def relations(user: User, packages: list[Package]) -> list[PackageRelation]: output = [] with db.begin(): @@ -241,9 +240,9 @@ def test_rpc_documentation_missing(): def test_rpc_singular_info(client: TestClient, user: User, - packages: List[Package], - depends: List[PackageDependency], - relations: List[PackageRelation]): + packages: list[Package], + depends: list[PackageDependency], + relations: list[PackageRelation]): # Define expected response. pkg = packages[0] expected_data = { @@ -310,7 +309,7 @@ def test_rpc_nonexistent_package(client: TestClient): assert response_data["resultcount"] == 0 -def test_rpc_multiinfo(client: TestClient, packages: List[Package]): +def test_rpc_multiinfo(client: TestClient, packages: list[Package]): # Make dummy request. request_packages = ["big-chungus", "chungy-chungus"] with client as request: @@ -328,7 +327,7 @@ def test_rpc_multiinfo(client: TestClient, packages: List[Package]): assert request_packages == [] -def test_rpc_mixedargs(client: TestClient, packages: List[Package]): +def test_rpc_mixedargs(client: TestClient, packages: list[Package]): # Make dummy request. response1_packages = ["gluggly-chungus"] response2_packages = ["gluggly-chungus", "chungy-chungus"] @@ -361,9 +360,9 @@ def test_rpc_mixedargs(client: TestClient, packages: List[Package]): def test_rpc_no_dependencies_omits_key(client: TestClient, user: User, - packages: List[Package], - depends: List[PackageDependency], - relations: List[PackageRelation]): + packages: list[Package], + depends: list[PackageDependency], + relations: list[PackageRelation]): """ This makes sure things like 'MakeDepends' get removed from JSON strings when they don't have set values. @@ -517,7 +516,7 @@ def test_rpc_no_args(client: TestClient): assert expected_data == response_data -def test_rpc_no_maintainer(client: TestClient, packages: List[Package]): +def test_rpc_no_maintainer(client: TestClient, packages: list[Package]): # Make dummy request. with client as request: response = request.get("/rpc", params={ @@ -531,7 +530,7 @@ def test_rpc_no_maintainer(client: TestClient, packages: List[Package]): assert response_data["results"][0]["Maintainer"] is None -def test_rpc_suggest_pkgbase(client: TestClient, packages: List[Package]): +def test_rpc_suggest_pkgbase(client: TestClient, packages: list[Package]): params = {"v": 5, "type": "suggest-pkgbase", "arg": "big"} with client as request: response = request.get("/rpc", params=params) @@ -560,7 +559,7 @@ def test_rpc_suggest_pkgbase(client: TestClient, packages: List[Package]): assert data == [] -def test_rpc_suggest(client: TestClient, packages: List[Package]): +def test_rpc_suggest(client: TestClient, packages: list[Package]): params = {"v": 5, "type": "suggest", "arg": "other"} with client as request: response = request.get("/rpc", params=params) @@ -600,7 +599,7 @@ def mock_config_getint(section: str, key: str): @mock.patch("aurweb.config.getint", side_effect=mock_config_getint) def test_rpc_ratelimit(getint: mock.MagicMock, client: TestClient, - pipeline: Pipeline, packages: List[Package]): + pipeline: Pipeline, packages: list[Package]): params = {"v": 5, "type": "suggest-pkgbase", "arg": "big"} for i in range(4): @@ -626,7 +625,7 @@ def test_rpc_ratelimit(getint: mock.MagicMock, client: TestClient, assert response.status_code == int(HTTPStatus.OK) -def test_rpc_etag(client: TestClient, packages: List[Package]): +def test_rpc_etag(client: TestClient, packages: list[Package]): params = {"v": 5, "type": "suggest-pkgbase", "arg": "big"} with client as request: @@ -647,7 +646,7 @@ def test_rpc_search_arg_too_small(client: TestClient): assert response.json().get("error") == "Query arg too small." -def test_rpc_search(client: TestClient, packages: List[Package]): +def test_rpc_search(client: TestClient, packages: list[Package]): params = {"v": 5, "type": "search", "arg": "big"} with client as request: response = request.get("/rpc", params=params) @@ -673,7 +672,7 @@ def test_rpc_search(client: TestClient, packages: List[Package]): assert response.json().get("error") == "No request type/data specified." -def test_rpc_msearch(client: TestClient, user: User, packages: List[Package]): +def test_rpc_msearch(client: TestClient, user: User, packages: list[Package]): params = {"v": 5, "type": "msearch", "arg": user.Username} with client as request: response = request.get("/rpc", params=params) @@ -709,8 +708,8 @@ def test_rpc_msearch(client: TestClient, user: User, packages: List[Package]): assert result.get("Name") == "big-chungus" -def test_rpc_search_depends(client: TestClient, packages: List[Package], - depends: List[PackageDependency]): +def test_rpc_search_depends(client: TestClient, packages: list[Package], + depends: list[PackageDependency]): params = { "v": 5, "type": "search", "by": "depends", "arg": "chungus-depends" } @@ -722,8 +721,8 @@ def test_rpc_search_depends(client: TestClient, packages: List[Package], assert result.get("Name") == packages[0].Name -def test_rpc_search_makedepends(client: TestClient, packages: List[Package], - depends: List[PackageDependency]): +def test_rpc_search_makedepends(client: TestClient, packages: list[Package], + depends: list[PackageDependency]): params = { "v": 5, "type": "search", @@ -738,8 +737,8 @@ def test_rpc_search_makedepends(client: TestClient, packages: List[Package], assert result.get("Name") == packages[0].Name -def test_rpc_search_optdepends(client: TestClient, packages: List[Package], - depends: List[PackageDependency]): +def test_rpc_search_optdepends(client: TestClient, packages: list[Package], + depends: list[PackageDependency]): params = { "v": 5, "type": "search", @@ -754,8 +753,8 @@ def test_rpc_search_optdepends(client: TestClient, packages: List[Package], assert result.get("Name") == packages[0].Name -def test_rpc_search_checkdepends(client: TestClient, packages: List[Package], - depends: List[PackageDependency]): +def test_rpc_search_checkdepends(client: TestClient, packages: list[Package], + depends: list[PackageDependency]): params = { "v": 5, "type": "search", @@ -802,7 +801,7 @@ def test_rpc_jsonp_callback(client: TestClient): assert response.json().get("error") == "Invalid callback name." -def test_rpc_post(client: TestClient, packages: List[Package]): +def test_rpc_post(client: TestClient, packages: list[Package]): data = { "v": 5, "type": "info", @@ -816,7 +815,7 @@ def test_rpc_post(client: TestClient, packages: List[Package]): def test_rpc_too_many_search_results(client: TestClient, - packages: List[Package]): + packages: list[Package]): config_getint = config.getint def mock_config(section: str, key: str): @@ -831,7 +830,7 @@ def test_rpc_too_many_search_results(client: TestClient, assert resp.json().get("error") == "Too many package results." -def test_rpc_too_many_info_results(client: TestClient, packages: List[Package]): +def test_rpc_too_many_info_results(client: TestClient, packages: list[Package]): # Make many of these packages depend and rely on each other. # This way, we can test to see that the exceeded limit stays true # regardless of the number of related records. diff --git a/test/test_templates.py b/test/test_templates.py index 7d6b585c..e4888127 100644 --- a/test/test_templates.py +++ b/test/test_templates.py @@ -1,6 +1,6 @@ import re -from typing import Any, Dict +from typing import Any import pytest @@ -126,7 +126,7 @@ def test_commit_hash(): assert commit_hash not in render -def pager_context(num_packages: int) -> Dict[str, Any]: +def pager_context(num_packages: int) -> dict[str, Any]: return { "request": Request(), "singular": "%d package found.", From 1d6335363c028591d72eac40c85109d435e469cb Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Mon, 1 Aug 2022 19:02:17 +0300 Subject: [PATCH 049/415] fix: strip whitespace when parsing package keywords Remove all extra whitespace when parsing Keywords to ensure we don't add empty keywords in the DB. Closes: #332 Signed-off-by: Leonidas Spyropoulos --- aurweb/routers/pkgbase.py | 2 +- test/test_pkgbase_routes.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 2cef5436..6cd4199d 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -98,7 +98,7 @@ async def pkgbase_keywords(request: Request, name: str, # Lowercase all keywords. Our database table is case insensitive, # and providing CI duplicates of keywords is erroneous. - keywords = set(k.lower() for k in keywords.split(" ")) + keywords = set(k.lower() for k in keywords.split()) # Delete all keywords which are not supplied by the user. with db.begin(): diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index 3468656e..a152c590 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -1396,3 +1396,33 @@ def test_pkgbase_keywords(client: TestClient, user: User, package: Package): expected = ["abc", "test"] for i, keyword in enumerate(keywords): assert keyword.text.strip() == expected[i] + + +def test_pkgbase_empty_keywords(client: TestClient, user: User, package: Package): + endpoint = f"/pkgbase/{package.PackageBase.Name}" + with client as request: + resp = request.get(endpoint) + assert resp.status_code == int(HTTPStatus.OK) + + root = parse_root(resp.text) + keywords = root.xpath('//a[@class="keyword"]') + assert len(keywords) == 0 + + cookies = {"AURSID": user.login(Request(), "testPassword")} + post_endpoint = f"{endpoint}/keywords" + with client as request: + resp = request.post(post_endpoint, data={ + "keywords": "abc test foo bar " + }, cookies=cookies) + assert resp.status_code == int(HTTPStatus.SEE_OTHER) + + with client as request: + resp = request.get(resp.headers.get("location")) + assert resp.status_code == int(HTTPStatus.OK) + + root = parse_root(resp.text) + keywords = root.xpath('//a[@class="keyword"]') + assert len(keywords) == 4 + expected = ["abc", "bar", "foo", "test"] + for i, keyword in enumerate(keywords): + assert keyword.text.strip() == expected[i] From 2c080b2ea9a91668e6009c690e8e46826e4d05cb Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Tue, 2 Aug 2022 20:27:47 +0300 Subject: [PATCH 050/415] feature: add pagination on comments Fixes: #354 Signed-off-by: Leonidas Spyropoulos --- aurweb/defaults.py | 3 +++ aurweb/pkgbase/util.py | 13 +++++++++++-- templates/partials/packages/comments.html | 8 ++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/aurweb/defaults.py b/aurweb/defaults.py index 51072e8f..91ba367a 100644 --- a/aurweb/defaults.py +++ b/aurweb/defaults.py @@ -6,6 +6,9 @@ O = 0 # Default [P]er [P]age PP = 50 +# Default Comments Per Page +COMMENTS_PER_PAGE = 10 + # A whitelist of valid PP values PP_WHITELIST = {50, 100, 250} diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 5a7d952a..5ffe490e 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -3,7 +3,7 @@ from typing import Any from fastapi import Request from sqlalchemy import and_ -from aurweb import config, db, l10n, util +from aurweb import config, db, defaults, l10n, util from aurweb.models import PackageBase, User from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comment import PackageComment @@ -31,6 +31,12 @@ def make_context(request: Request, pkgbase: PackageBase, if not context: context = _make_context(request, pkgbase.Name) + # Per page and offset. + offset, per_page = util.sanitize_params( + request.query_params.get("O", defaults.O), + request.query_params.get("PP", defaults.COMMENTS_PER_PAGE)) + context["O"] = offset + context["PP"] = per_page context["git_clone_uri_anon"] = config.get("options", "git_clone_uri_anon") context["git_clone_uri_priv"] = config.get("options", "git_clone_uri_priv") context["pkgbase"] = pkgbase @@ -44,9 +50,12 @@ def make_context(request: Request, pkgbase: PackageBase, context["packages_count"] = pkgbase.packages.count() context["keywords"] = pkgbase.keywords + context["comments_total"] = pkgbase.comments.order_by( + PackageComment.CommentTS.desc() + ).count() context["comments"] = pkgbase.comments.order_by( PackageComment.CommentTS.desc() - ) + ).limit(per_page).offset(offset) context["pinned_comments"] = pkgbase.comments.filter( PackageComment.PinnedTS != 0 ).order_by(PackageComment.CommentTS.desc()) diff --git a/templates/partials/packages/comments.html b/templates/partials/packages/comments.html index 6e6b9a47..9d49bc86 100644 --- a/templates/partials/packages/comments.html +++ b/templates/partials/packages/comments.html @@ -33,6 +33,14 @@ {{ "Latest Comments" | tr }} + {% set page = ((O / PP) | int) %} + {% set pages = ((comments_total / PP) | ceil) %} + + {% if pages > 1 %} +

    + {{ page | pager_nav(comments_total, prefix) | safe }} +

    + {% endif %} {% for comment in comments.all() %} {% include "partials/packages/comment.html" %} From 9648628a2c29397216a609b75287a3e6643e67b2 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 9 Aug 2022 16:43:27 -0700 Subject: [PATCH 051/415] update: requests dependency Signed-off-by: Kevin Morris --- poetry.lock | 794 +++++++++++-------------------------------------- pyproject.toml | 2 +- 2 files changed, 175 insertions(+), 621 deletions(-) diff --git a/poetry.lock b/poetry.lock index fe1575a6..72b66638 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,11 +8,11 @@ python-versions = ">=3.6,<4.0" [[package]] name = "alembic" -version = "1.7.6" +version = "1.8.1" description = "A database migration tool for SQLAlchemy." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] Mako = "*" @@ -23,7 +23,7 @@ tz = ["python-dateutil"] [[package]] name = "anyio" -version = "3.5.0" +version = "3.6.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false @@ -35,12 +35,12 @@ sniffio = ">=1.1" [package.extras] doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] [[package]] name = "asgiref" -version = "3.5.0" +version = "3.5.2" description = "ASGI specs, helper code, and adapters" category = "main" optional = false @@ -51,7 +51,7 @@ tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] [[package]] name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" description = "Atomic file writes." category = "main" optional = false @@ -59,17 +59,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "authlib" @@ -87,7 +87,7 @@ client = ["requests"] [[package]] name = "bcrypt" -version = "3.2.0" +version = "3.2.2" description = "Modern password hashing for your software and your servers" category = "main" optional = false @@ -95,7 +95,6 @@ python-versions = ">=3.6" [package.dependencies] cffi = ">=1.1" -six = ">=1.4.1" [package.extras] tests = ["pytest (>=3.2.1,!=3.3.0)"] @@ -116,15 +115,15 @@ webencodings = "*" [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.6.15" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -135,29 +134,29 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.11" +version = "2.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.3" +version = "8.1.3" description = "Composable command line interface toolkit" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.5" description = "Cross-platform colored terminal text." category = "main" optional = false @@ -165,21 +164,21 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.3.1" +version = "6.4.3" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -tomli = {version = "*", optional = true, markers = "extra == \"toml\""} +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "cryptography" -version = "36.0.1" +version = "37.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -194,11 +193,11 @@ docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] [[package]] name = "dnspython" -version = "2.2.0" +version = "2.2.1" description = "DNS toolkit" category = "main" optional = false @@ -237,21 +236,20 @@ testing = ["pre-commit"] [[package]] name = "fakeredis" -version = "1.7.0" +version = "1.9.0" description = "Fake implementation of redis API for testing purposes." category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7,<4.0" [package.dependencies] -packaging = "*" -redis = "<4.1.0" -six = ">=1.12" -sortedcontainers = "*" +redis = "<4.4" +six = ">=1.16.0,<2.0.0" +sortedcontainers = ">=2.4.0,<3.0.0" [package.extras] -aioredis = ["aioredis"] -lua = ["lupa"] +aioredis = ["aioredis (>=2.0.1,<3.0.0)"] +lua = ["lupa (>=1.13,<2.0)"] [[package]] name = "fastapi" @@ -285,7 +283,7 @@ python-dateutil = "*" [[package]] name = "filelock" -version = "3.4.2" +version = "3.7.1" description = "A platform independent file lock." category = "main" optional = false @@ -436,7 +434,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.10.1" +version = "4.12.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -446,9 +444,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -474,19 +472,19 @@ plugins = ["setuptools"] [[package]] name = "itsdangerous" -version = "2.0.1" +version = "2.1.2" description = "Safely pass data to untrusted environments and back." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "jinja2" -version = "3.0.3" +version = "3.1.2" description = "A very fast and expressive template engine." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] MarkupSafe = ">=2.0" @@ -496,7 +494,7 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "lxml" -version = "4.7.1" +version = "4.9.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false @@ -510,11 +508,11 @@ source = ["Cython (>=0.29.7)"] [[package]] name = "mako" -version = "1.1.6" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +version = "1.2.1" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" [package.dependencies] MarkupSafe = ">=0.9.2" @@ -522,14 +520,15 @@ MarkupSafe = ">=0.9.2" [package.extras] babel = ["babel"] lingua = ["lingua"] +testing = ["pytest"] [[package]] name = "markdown" -version = "3.3.6" +version = "3.4.1" description = "Python implementation of Markdown." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} @@ -539,11 +538,11 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "mccabe" @@ -555,7 +554,7 @@ python-versions = "*" [[package]] name = "mysqlclient" -version = "2.1.0" +version = "2.1.1" description = "Python interface to MySQL" category = "main" optional = false @@ -563,7 +562,7 @@ python-versions = ">=3.5" [[package]] name = "orjson" -version = "3.6.6" +version = "3.7.11" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" category = "main" optional = false @@ -626,7 +625,7 @@ python-versions = ">=3.6.1" [[package]] name = "prometheus-client" -version = "0.13.1" +version = "0.14.1" description = "Python client for the Prometheus monitoring system." category = "main" optional = false @@ -637,11 +636,11 @@ twisted = ["twisted"] [[package]] name = "prometheus-fastapi-instrumentator" -version = "5.7.1" +version = "5.8.2" description = "Instrument your FastAPI with Prometheus metrics" category = "main" optional = false -python-versions = ">=3.6.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" [package.dependencies] fastapi = ">=0.38.1,<1.0.0" @@ -649,11 +648,11 @@ prometheus-client = ">=0.8.0,<1.0.0" [[package]] name = "protobuf" -version = "3.19.4" +version = "3.20.1" description = "Protocol Buffers" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "py" @@ -689,8 +688,8 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.0" -description = "Data validation and settings management using python 3.6 type hinting" +version = "1.9.1" +description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.6.1" @@ -712,25 +711,25 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygit2" -version = "1.7.2" +version = "1.10.0" description = "Python bindings for libgit2." category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -cffi = ">=1.4.0" +cffi = ">=1.9.1" [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pytest" @@ -859,21 +858,21 @@ hiredis = ["hiredis (>=0.1.3)"] [[package]] name = "requests" -version = "2.27.1" +version = "2.28.1" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rfc3986" @@ -915,7 +914,7 @@ python-versions = "*" [[package]] name = "sqlalchemy" -version = "1.4.31" +version = "1.4.40" description = "Database Abstraction Library" category = "main" optional = false @@ -928,17 +927,18 @@ greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platfo aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3)"] -mariadb_connector = ["mariadb (>=1.0.1)"] +asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"] +mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] mssql_pymssql = ["pymssql"] mssql_pyodbc = ["pyodbc"] mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] +mysql_connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] postgresql = ["psycopg2 (>=2.7)"] postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] -postgresql_pg8000 = ["pg8000 (>=1.16.6)"] +postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql (<1)", "pymysql"] @@ -990,7 +990,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "2.0.0" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false @@ -998,22 +998,22 @@ python-versions = ">=3.7" [[package]] name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.8" +version = "1.26.11" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -1043,225 +1043,76 @@ python-versions = "*" [[package]] name = "werkzeug" -version = "2.0.2" +version = "2.2.2" description = "The comprehensive WSGI web application library." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog"] [[package]] name = "wsproto" -version = "1.0.0" +version = "1.1.0" description = "WebSockets state-machine based protocol implementation" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7.0" [package.dependencies] h11 = ">=0.9.0,<1" [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "ffe7ab6733020584382d2d01950153072a46d0738f6d2fe52ac84653d0b16086" +content-hash = "7f939b59288f41a063f4a6634a61d3744b1f73e4c3bce76e97dc766b7919ffe7" [metadata.files] aiofiles = [ {file = "aiofiles-0.7.0-py3-none-any.whl", hash = "sha256:c67a6823b5f23fcab0a2595a289cec7d8c863ffcb4322fb8cd6b90400aedfdbc"}, {file = "aiofiles-0.7.0.tar.gz", hash = "sha256:a1c4fc9b2ff81568c83e21392a82f344ea9d23da906e4f6a52662764545e19d4"}, ] -alembic = [ - {file = "alembic-1.7.6-py3-none-any.whl", hash = "sha256:ad842f2c3ab5c5d4861232730779c05e33db4ba880a08b85eb505e87c01095bc"}, - {file = "alembic-1.7.6.tar.gz", hash = "sha256:6c0c05e9768a896d804387e20b299880fe01bc56484246b0dffe8075d6d3d847"}, -] -anyio = [ - {file = "anyio-3.5.0-py3-none-any.whl", hash = "sha256:b5fa16c5ff93fa1046f2eeb5bbff2dad4d3514d6cda61d02816dba34fa8c3c2e"}, - {file = "anyio-3.5.0.tar.gz", hash = "sha256:a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6"}, -] -asgiref = [ - {file = "asgiref-3.5.0-py3-none-any.whl", hash = "sha256:88d59c13d634dcffe0510be048210188edd79aeccb6a6c9028cdad6f31d730a9"}, - {file = "asgiref-3.5.0.tar.gz", hash = "sha256:2f8abc20f7248433085eda803936d98992f1343ddb022065779f37c5da0181d0"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] +alembic = [] +anyio = [] +asgiref = [] +atomicwrites = [] +attrs = [] authlib = [ {file = "Authlib-0.15.5-py2.py3-none-any.whl", hash = "sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf"}, {file = "Authlib-0.15.5.tar.gz", hash = "sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252"}, ] -bcrypt = [ - {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"}, - {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, - {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, - {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, - {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, - {file = "bcrypt-3.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7"}, - {file = "bcrypt-3.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d"}, - {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, - {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, - {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, -] +bcrypt = [] bleach = [ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] -certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, -] -cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, - {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, -] -click = [ - {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, - {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, -] +certifi = [] +cffi = [] +charset-normalizer = [] +click = [] colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-6.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525"}, - {file = "coverage-6.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e"}, - {file = "coverage-6.3.1-cp310-cp310-win32.whl", hash = "sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217"}, - {file = "coverage-6.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb"}, - {file = "coverage-6.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8"}, - {file = "coverage-6.3.1-cp37-cp37m-win32.whl", hash = "sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0"}, - {file = "coverage-6.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687"}, - {file = "coverage-6.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320"}, - {file = "coverage-6.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a"}, - {file = "coverage-6.3.1-cp38-cp38-win32.whl", hash = "sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10"}, - {file = "coverage-6.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f"}, - {file = "coverage-6.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d"}, - {file = "coverage-6.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38"}, - {file = "coverage-6.3.1-cp39-cp39-win32.whl", hash = "sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2"}, - {file = "coverage-6.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa"}, - {file = "coverage-6.3.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2"}, - {file = "coverage-6.3.1.tar.gz", hash = "sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8"}, -] -cryptography = [ - {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, - {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"}, - {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"}, - {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"}, - {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"}, - {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"}, - {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"}, -] -dnspython = [ - {file = "dnspython-2.2.0-py3-none-any.whl", hash = "sha256:081649da27ced5e75709a1ee542136eaba9842a0fe4c03da4fb0a3d3ed1f3c44"}, - {file = "dnspython-2.2.0.tar.gz", hash = "sha256:e79351e032d0b606b98d38a4b0e6e2275b31a5b85c873e587cc11b73aca026d6"}, + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] +coverage = [] +cryptography = [] +dnspython = [] email-validator = [ {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, @@ -1270,10 +1121,7 @@ execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] -fakeredis = [ - {file = "fakeredis-1.7.0-py3-none-any.whl", hash = "sha256:6f1e04f64557ad3b6835bdc6e5a8d022cbace4bdc24a47ad58f6a72e0fbff760"}, - {file = "fakeredis-1.7.0.tar.gz", hash = "sha256:c9bd12e430336cbd3e189fae0e91eb99997b93e76dbfdd6ed67fa352dc684c71"}, -] +fakeredis = [] fastapi = [ {file = "fastapi-0.71.0-py3-none-any.whl", hash = "sha256:a78eca6b084de9667f2d5f37e2ae297270e5a119cd01c2f04815795da92fc87f"}, {file = "fastapi-0.71.0.tar.gz", hash = "sha256:2b5ac0ae89c80b40d1dd4b2ea0bb1f78d7c4affd3644d080bf050f084759fff2"}, @@ -1281,10 +1129,7 @@ fastapi = [ feedgen = [ {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, ] -filelock = [ - {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, - {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, -] +filelock = [] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, @@ -1382,10 +1227,7 @@ idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -importlib-metadata = [ - {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, - {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, -] +importlib-metadata = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1394,192 +1236,62 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] -itsdangerous = [ - {file = "itsdangerous-2.0.1-py3-none-any.whl", hash = "sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c"}, - {file = "itsdangerous-2.0.1.tar.gz", hash = "sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0"}, -] +itsdangerous = [] jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, -] -lxml = [ - {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"}, - {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"}, - {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"}, - {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"}, - {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"}, - {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"}, - {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"}, - {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"}, - {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"}, - {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"}, - {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"}, - {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"}, - {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"}, - {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"}, - {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"}, - {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"}, - {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"}, - {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"}, - {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"}, - {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"}, - {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"}, - {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"}, - {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"}, - {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"}, - {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"}, - {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"}, - {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"}, - {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"}, - {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"}, - {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"}, - {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"}, - {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, -] -mako = [ - {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, - {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, -] -markdown = [ - {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, - {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] +lxml = [] +mako = [] +markdown = [] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -mysqlclient = [ - {file = "mysqlclient-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:02c8826e6add9b20f4cb12dcf016485f7b1d6e30356a1204d05431867a1b3947"}, - {file = "mysqlclient-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b62d23c11c516cedb887377c8807628c1c65d57593b57853186a6ee18b0c6a5b"}, - {file = "mysqlclient-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2c8410f54492a3d2488a6a53e2d85b7e016751a1e7d116e7aea9c763f59f5e8c"}, - {file = "mysqlclient-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6279263d5a9feca3e0edbc2b2a52c057375bf301d47da2089c075ff76331d14"}, - {file = "mysqlclient-2.1.0.tar.gz", hash = "sha256:973235686f1b720536d417bf0a0d39b4ab3d5086b2b6ad5e6752393428c02b12"}, -] -orjson = [ - {file = "orjson-3.6.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:e4a7cad6c63306318453980d302c7c0b74c0cc290dd1f433bbd7d31a5af90cf1"}, - {file = "orjson-3.6.6-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e533941dca4a0530a876de32e54bf2fd3269cdec3751aebde7bfb5b5eba98e74"}, - {file = "orjson-3.6.6-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:9adf63be386eaa34278967512b83ff8fc4bed036a246391ae236f68d23c47452"}, - {file = "orjson-3.6.6-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:3b636753ae34d4619b11ea7d664a2f1e87e55e9738e5123e12bcce22acae9d13"}, - {file = "orjson-3.6.6-cp310-none-win_amd64.whl", hash = "sha256:78a10295ed048fd916c6584d6d27c232eae805a43e7c14be56e3745f784f0eb6"}, - {file = "orjson-3.6.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:82b4f9fb2af7799b52932a62eac484083f930d5519560d6f64b24d66a368d03f"}, - {file = "orjson-3.6.6-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a0033d07309cc7d8b8c4bc5d42f0dd4422b53ceb91dee9f4086bb2afa70b7772"}, - {file = "orjson-3.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b321f99473116ab7c7c028377372f7b4adba4029aaca19cd567e83898f55579"}, - {file = "orjson-3.6.6-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:b9c98ed94f1688cc11b5c61b8eea39d854a1a2f09f71d8a5af005461b14994ed"}, - {file = "orjson-3.6.6-cp37-cp37m-manylinux_2_24_x86_64.whl", hash = "sha256:00b333a41392bd07a8603c42670547dbedf9b291485d773f90c6470eff435608"}, - {file = "orjson-3.6.6-cp37-none-win_amd64.whl", hash = "sha256:8d4fd3bdee65a81f2b79c50937d4b3c054e1e6bfa3fc72ed018a97c0c7c3d521"}, - {file = "orjson-3.6.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:954c9f8547247cd7a8c91094ff39c9fe314b5eaeaec90b7bfb7384a4108f416f"}, - {file = "orjson-3.6.6-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:74e5aed657ed0b91ef05d44d6a26d3e3e12ce4d2d71f75df41a477b05878c4a9"}, - {file = "orjson-3.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4008a5130e6e9c33abaa95e939e0e755175da10745740aa6968461b2f16830e2"}, - {file = "orjson-3.6.6-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:012761d5f3d186deb4f6238f15e9ea7c1aac6deebc8f5b741ba3b4fafe017460"}, - {file = "orjson-3.6.6-cp38-cp38-manylinux_2_24_x86_64.whl", hash = "sha256:b464546718a940b48d095a98df4c04808bfa6c8706fe751fc3f9390bc2f82643"}, - {file = "orjson-3.6.6-cp38-none-win_amd64.whl", hash = "sha256:f10a800f4e5a4aab52076d4628e9e4dab9370bdd9d8ea254ebfde846b653ab25"}, - {file = "orjson-3.6.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8010d2610cfab721725ef14d578c7071e946bbdae63322d8f7b49061cf3fde8d"}, - {file = "orjson-3.6.6-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8dca67a4855e1e0f9a2ea0386e8db892708522e1171dc0ddf456932288fbae63"}, - {file = "orjson-3.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af065d60523139b99bd35b839c7a2d8c5da55df8a8c4402d2eb6cdc07fa7a624"}, - {file = "orjson-3.6.6-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:fa1f389cc9f766ae0cf7ba3533d5089836b01a5ccb3f8d904297f1fcf3d9dc34"}, - {file = "orjson-3.6.6-cp39-cp39-manylinux_2_24_x86_64.whl", hash = "sha256:ec1221ad78f94d27b162a1d35672b62ef86f27f0e4c2b65051edb480cc86b286"}, - {file = "orjson-3.6.6-cp39-none-win_amd64.whl", hash = "sha256:afed2af55eeda1de6b3f1cbc93431981b19d380fcc04f6ed86e74c1913070304"}, - {file = "orjson-3.6.6.tar.gz", hash = "sha256:55dd988400fa7fbe0e31407c683f5aaab013b5bd967167b8fe058186773c4d6c"}, -] +mysqlclient = [] +orjson = [] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, @@ -1606,42 +1318,9 @@ priority = [ {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"}, {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"}, ] -prometheus-client = [ - {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"}, - {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"}, -] -prometheus-fastapi-instrumentator = [ - {file = "prometheus-fastapi-instrumentator-5.7.1.tar.gz", hash = "sha256:5371f1b494e2b00017a02898d854119b4929025d1a203670b07b3f42dd0b5526"}, - {file = "prometheus_fastapi_instrumentator-5.7.1-py3-none-any.whl", hash = "sha256:da40ea0df14b0e95d584769747fba777522a8df6a8c47cec2edf798f1fff49b5"}, -] -protobuf = [ - {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, - {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, - {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, - {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, - {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, - {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, - {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, - {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, - {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, - {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, - {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, - {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, - {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, - {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, - {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, - {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, - {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, - {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, -] +prometheus-client = [] +prometheus-fastapi-instrumentator = [] +protobuf = [] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -1657,82 +1336,15 @@ pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -pydantic = [ - {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, - {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, - {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, - {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, - {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, - {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, - {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, - {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, - {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, - {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, - {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, -] +pydantic = [] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] -pygit2 = [ - {file = "pygit2-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4a9a031bb0d2c5cf964da1f6d7a193416a97664655ec43ec349d3609bbde154"}, - {file = "pygit2-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:afcfb8ba97cfedcb8f890ff1e74c4d63755234cca1ca22c2a969e91b8059ae3e"}, - {file = "pygit2-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f87498ce717302a7525dad1ee604badc85bdff7bd453473d601077ac58e7cae"}, - {file = "pygit2-1.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2355cf24719a35542a88075988c8e11cd155aa375a1f12d62b980986da992eb4"}, - {file = "pygit2-1.7.2-cp310-cp310-win32.whl", hash = "sha256:0d72bd05dd3cf514ea2e2df32a2d361f6f29da7d5f02cf0980ea149f49cdfb37"}, - {file = "pygit2-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:1b7ff5b656db280ca5d31ecdb17709ed7eaf4e9f419b2fa66f7ff92d8234f621"}, - {file = "pygit2-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6aa018c101056c2a8e1fb6467c10281afa088b3b7bc7c17defb404f66039669a"}, - {file = "pygit2-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a27f8cab6dbef912ccdd690b97948dbf978cffc2ef96ee01b1a8944bfb713f0b"}, - {file = "pygit2-1.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c538a0234baa091a02342895d31e5b7c29d85ada44a0b9b4a5fdf78b5607cd48"}, - {file = "pygit2-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:b15579b69381ba41199f5eb7fc85f153105d535c91b8da0321aaa14fec19f09c"}, - {file = "pygit2-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6c2ee00048862e193b2b88267f880632735f53db0f2c7f9ebebb21a43d22e58b"}, - {file = "pygit2-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c24f3413522c970ae46e79b645ac0978a5be98863a6c6619e8f710bb137e1cb"}, - {file = "pygit2-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d42a7cc4b53cc369b82266c7257fe1808ec0e30c34f1796a0b0fa12a0db9ebe"}, - {file = "pygit2-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b1694ad8b4702e9e83a79a97bf3f1b44674057ae9d40bc7eb92e4b4baf79d94"}, - {file = "pygit2-1.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a382db82ad4ba3109e74c7b82d6c6c1e451200ee379bad8a17936027c65ea98"}, - {file = "pygit2-1.7.2-cp38-cp38-win32.whl", hash = "sha256:6c168efd7e3bdaeeccfa5ccbe2718107a1fe65cda959586f88a73228217a8783"}, - {file = "pygit2-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:041e34f7efd96c7edbea2f478704756fc189082561611c88bc95cf2d085923b5"}, - {file = "pygit2-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ef34b881da55b6702087575ea48a90a08e1077a7f64faa909d9840e16f36c63b"}, - {file = "pygit2-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0e6368a96058cf619ad574de2b4575f58d363f3f6d4de8e172e1e5d10e1fad36"}, - {file = "pygit2-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0748b413966da9b3d3ca8a0a79c63f6581a89b883d2ba64355bbfdb250f2e066"}, - {file = "pygit2-1.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d34954c21f109f176d8104b253fc8ce2ca17efb43cfe228d6530c200f362b83"}, - {file = "pygit2-1.7.2-cp39-cp39-win32.whl", hash = "sha256:32979cb98ffd183ed0458c519e6615deeb6a8cc1252223396eee8f526f09989f"}, - {file = "pygit2-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:56d55452dc3eca844d92503d755c8e11699b7ab3b845b81cf365f85d6385d7e0"}, - {file = "pygit2-1.7.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:409c76dea47c2c678295c42f55798da7a0a9adcc6394fe75c061864254bafeef"}, - {file = "pygit2-1.7.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be038fecd27a9a7046cd45b4a6e847955dab2d6e2352ff41ab3b55f700aa0f3d"}, - {file = "pygit2-1.7.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e91afd629b90b528b756ca2a0fbd5bf8df2cdc08ccd5ab144fbfe69bfc587d"}, - {file = "pygit2-1.7.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:17b06a1ecc16b90fa652cf5cf9698dfb16a87501b76f7001e1d4934a38a49737"}, - {file = "pygit2-1.7.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fe7cdd56d0e5a89ed7754d1aedc6516349f16072225ccfc7b9349ab6448a052"}, - {file = "pygit2-1.7.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a766b5a988ab373a040d1769e0e1df4618a9f8f33464746b9b2a3c92576df4"}, - {file = "pygit2-1.7.2.tar.gz", hash = "sha256:70a4536a35452c31f823b59b6fdb665aa3778a43b73ccda3a4f79fa9962ad2bb"}, -] +pygit2 = [] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, @@ -1769,10 +1381,7 @@ redis = [ {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, ] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] +requests = [] rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, @@ -1789,44 +1398,7 @@ sortedcontainers = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] -sqlalchemy = [ - {file = "SQLAlchemy-1.4.31-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c3abc34fed19fdeaead0ced8cf56dd121f08198008c033596aa6aae7cc58f59f"}, - {file = "SQLAlchemy-1.4.31-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8d0949b11681380b4a50ac3cd075e4816afe9fa4a8c8ae006c1ca26f0fa40ad8"}, - {file = "SQLAlchemy-1.4.31-cp27-cp27m-win32.whl", hash = "sha256:f3b7ec97e68b68cb1f9ddb82eda17b418f19a034fa8380a0ac04e8fe01532875"}, - {file = "SQLAlchemy-1.4.31-cp27-cp27m-win_amd64.whl", hash = "sha256:81f2dd355b57770fdf292b54f3e0a9823ec27a543f947fa2eb4ec0df44f35f0d"}, - {file = "SQLAlchemy-1.4.31-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4ad31cec8b49fd718470328ad9711f4dc703507d434fd45461096da0a7135ee0"}, - {file = "SQLAlchemy-1.4.31-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:05fa14f279d43df68964ad066f653193187909950aa0163320b728edfc400167"}, - {file = "SQLAlchemy-1.4.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dccff41478050e823271642837b904d5f9bda3f5cf7d371ce163f00a694118d6"}, - {file = "SQLAlchemy-1.4.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57205844f246bab9b666a32f59b046add8995c665d9ecb2b7b837b087df90639"}, - {file = "SQLAlchemy-1.4.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8210090a816d48a4291a47462bac750e3bc5c2442e6d64f7b8137a7c3f9ac5"}, - {file = "SQLAlchemy-1.4.31-cp310-cp310-win32.whl", hash = "sha256:2e216c13ecc7fcdcbb86bb3225425b3ed338e43a8810c7089ddb472676124b9b"}, - {file = "SQLAlchemy-1.4.31-cp310-cp310-win_amd64.whl", hash = "sha256:e3a86b59b6227ef72ffc10d4b23f0fe994bef64d4667eab4fb8cd43de4223bec"}, - {file = "SQLAlchemy-1.4.31-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2fd4d3ca64c41dae31228b80556ab55b6489275fb204827f6560b65f95692cf3"}, - {file = "SQLAlchemy-1.4.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f22c040d196f841168b1456e77c30a18a3dc16b336ddbc5a24ce01ab4e95ae0"}, - {file = "SQLAlchemy-1.4.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0c7171aa5a57e522a04a31b84798b6c926234cb559c0939840c3235cf068813"}, - {file = "SQLAlchemy-1.4.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d046a9aeba9bc53e88a41e58beb72b6205abb9a20f6c136161adf9128e589db5"}, - {file = "SQLAlchemy-1.4.31-cp36-cp36m-win32.whl", hash = "sha256:d86132922531f0dc5a4f424c7580a472a924dd737602638e704841c9cb24aea2"}, - {file = "SQLAlchemy-1.4.31-cp36-cp36m-win_amd64.whl", hash = "sha256:ca68c52e3cae491ace2bf39b35fef4ce26c192fd70b4cd90f040d419f70893b5"}, - {file = "SQLAlchemy-1.4.31-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:cf2cd387409b12d0a8b801610d6336ee7d24043b6dd965950eaec09b73e7262f"}, - {file = "SQLAlchemy-1.4.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb4b15fb1f0aafa65cbdc62d3c2078bea1ceecbfccc9a1f23a2113c9ac1191fa"}, - {file = "SQLAlchemy-1.4.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c317ddd7c586af350a6aef22b891e84b16bff1a27886ed5b30f15c1ed59caeaa"}, - {file = "SQLAlchemy-1.4.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c7ed6c69debaf6198fadb1c16ae1253a29a7670bbf0646f92582eb465a0b999"}, - {file = "SQLAlchemy-1.4.31-cp37-cp37m-win32.whl", hash = "sha256:6a01ec49ca54ce03bc14e10de55dfc64187a2194b3b0e5ac0fdbe9b24767e79e"}, - {file = "SQLAlchemy-1.4.31-cp37-cp37m-win_amd64.whl", hash = "sha256:330eb45395874cc7787214fdd4489e2afb931bc49e0a7a8f9cd56d6e9c5b1639"}, - {file = "SQLAlchemy-1.4.31-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5e9c7b3567edbc2183607f7d9f3e7e89355b8f8984eec4d2cd1e1513c8f7b43f"}, - {file = "SQLAlchemy-1.4.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de85c26a5a1c72e695ab0454e92f60213b4459b8d7c502e0be7a6369690eeb1a"}, - {file = "SQLAlchemy-1.4.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:975f5c0793892c634c4920057da0de3a48bbbbd0a5c86f5fcf2f2fedf41b76da"}, - {file = "SQLAlchemy-1.4.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c20c8415173b119762b6110af64448adccd4d11f273fb9f718a9865b88a99c"}, - {file = "SQLAlchemy-1.4.31-cp38-cp38-win32.whl", hash = "sha256:b35dca159c1c9fa8a5f9005e42133eed82705bf8e243da371a5e5826440e65ca"}, - {file = "SQLAlchemy-1.4.31-cp38-cp38-win_amd64.whl", hash = "sha256:b7b20c88873675903d6438d8b33fba027997193e274b9367421e610d9da76c08"}, - {file = "SQLAlchemy-1.4.31-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:85e4c244e1de056d48dae466e9baf9437980c19fcde493e0db1a0a986e6d75b4"}, - {file = "SQLAlchemy-1.4.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79e73d5ee24196d3057340e356e6254af4d10e1fc22d3207ea8342fc5ffb977"}, - {file = "SQLAlchemy-1.4.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:15a03261aa1e68f208e71ae3cd845b00063d242cbf8c87348a0c2c0fc6e1f2ac"}, - {file = "SQLAlchemy-1.4.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ddc5e5ccc0160e7ad190e5c61eb57560f38559e22586955f205e537cda26034"}, - {file = "SQLAlchemy-1.4.31-cp39-cp39-win32.whl", hash = "sha256:289465162b1fa1e7a982f8abe59d26a8331211cad4942e8031d2b7db1f75e649"}, - {file = "SQLAlchemy-1.4.31-cp39-cp39-win_amd64.whl", hash = "sha256:9e4fb2895b83993831ba2401b6404de953fdbfa9d7d4fa6a4756294a83bbc94f"}, - {file = "SQLAlchemy-1.4.31.tar.gz", hash = "sha256:582b59d1e5780a447aada22b461e50b404a9dc05768da1d87368ad8190468418"}, -] +sqlalchemy = [] srcinfo = [ {file = "srcinfo-0.0.8-py3-none-any.whl", hash = "sha256:0922ee4302b927d7ddea74c47e539b226a0a7738dc89f95b66404a28d07f3f6b"}, {file = "srcinfo-0.0.8.tar.gz", hash = "sha256:5ac610cf8b15d4b0a0374bd1f7ad301675c2938f0414addf3ef7d7e3fcaf5c65"}, @@ -1843,18 +1415,9 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tomli = [ - {file = "tomli-2.0.0-py3-none-any.whl", hash = "sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224"}, - {file = "tomli-2.0.0.tar.gz", hash = "sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1"}, -] -typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, -] -urllib3 = [ - {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, - {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, -] +tomli = [] +typing-extensions = [] +urllib3 = [] uvicorn = [ {file = "uvicorn-0.15.0-py3-none-any.whl", hash = "sha256:17f898c64c71a2640514d4089da2689e5db1ce5d4086c2d53699bf99513421c1"}, {file = "uvicorn-0.15.0.tar.gz", hash = "sha256:d9a3c0dd1ca86728d3e235182683b4cf94cd53a867c288eaeca80ee781b2caff"}, @@ -1863,15 +1426,6 @@ webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] -werkzeug = [ - {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"}, - {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"}, -] -wsproto = [ - {file = "wsproto-1.0.0-py3-none-any.whl", hash = "sha256:d8345d1808dd599b5ffb352c25a367adb6157e664e140dbecba3f9bc007edb9f"}, - {file = "wsproto-1.0.0.tar.gz", hash = "sha256:868776f8456997ad0d9720f7322b746bbe9193751b5b290b7f924659377c8c38"}, -] -zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, -] +werkzeug = [] +wsproto = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 41d8301f..656a854b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ protobuf = "^3.19.0" pygit2 = "^1.7.0" python-multipart = "^0.0.5" redis = "^3.5.3" -requests = "^2.26.0" +requests = "^2.28.1" paginate = "^0.5.6" # SQL From 0e82916b0a149e81b0936eb5edcfd387798e9481 Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Tue, 2 Aug 2022 16:30:45 +0200 Subject: [PATCH 052/415] fix(python): don't show maintainer link for non logged in users Show a plain maintainer text for non logged in users like the submitted, last packager. Closes #373 --- templates/partials/packages/details.html | 5 ++++- test/test_pkgbase_routes.py | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index e0eda54c..771b311d 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -108,7 +108,7 @@

    diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index a152c590..dae43e37 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -272,9 +272,9 @@ def test_pkgbase_maintainer(client: TestClient, user: User, maintainer: User, root = parse_root(resp.text) maint = root.xpath('//table[@id="pkginfo"]/tr[@class="pkgmaint"]/td')[0] - maint, comaint = maint.xpath('./a') - assert maint.text.strip() == maintainer.Username - assert comaint.text.strip() == user.Username + maint, comaint = maint.text.strip().split() + assert maint == maintainer.Username + assert comaint == f'({user.Username})' def test_pkgbase_voters(client: TestClient, tu_user: User, package: Package): From 913ce8a4f0cac79fcd089cb8a66a6aa1b02be601 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 12 Aug 2022 19:58:55 -0700 Subject: [PATCH 053/415] fix(performance): lazily load expensive modules within aurweb.db Closes #374 Signed-off-by: Kevin Morris --- aurweb/db.py | 81 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 47 insertions(+), 34 deletions(-) diff --git a/aurweb/db.py b/aurweb/db.py index 4c53730a..94514d35 100644 --- a/aurweb/db.py +++ b/aurweb/db.py @@ -1,34 +1,15 @@ -import functools -import hashlib -import math -import os -import re - -from typing import Iterable, NewType - -import sqlalchemy - -from sqlalchemy import create_engine, event -from sqlalchemy.engine.base import Engine -from sqlalchemy.engine.url import URL -from sqlalchemy.orm import Query, Session, SessionTransaction, scoped_session, sessionmaker - -import aurweb.config -import aurweb.util - +# Supported database drivers. DRIVERS = { "mysql": "mysql+mysqldb" } -# Some types we don't get access to in this module. -Base = NewType("Base", "aurweb.models.declarative_base.Base") - def make_random_value(table: str, column: str, length: int): """ Generate a unique, random value for a string column in a table. :return: A unique string that is not in the database """ + import aurweb.util string = aurweb.util.make_random_string(length) while query(table).filter(column == string).first(): string = aurweb.util.make_random_string(length) @@ -52,6 +33,10 @@ def test_name() -> str: :return: Unhashed database name """ + import os + + import aurweb.config + db = os.environ.get("PYTEST_CURRENT_TEST", aurweb.config.get("database", "name")) return db.split(":")[0] @@ -70,7 +55,10 @@ def name() -> str: dbname = test_name() if not dbname.startswith("test/"): return dbname + + import hashlib sha1 = hashlib.sha1(dbname.encode()).hexdigest() + return "db" + sha1 @@ -78,12 +66,13 @@ def name() -> str: _sessions = dict() -def get_session(engine: Engine = None) -> Session: +def get_session(engine=None): """ Return aurweb.db's global session. """ dbname = name() global _sessions if dbname not in _sessions: + from sqlalchemy.orm import scoped_session, sessionmaker if not engine: # pragma: no cover engine = get_engine() @@ -106,13 +95,17 @@ def pop_session(dbname: str) -> None: _sessions.pop(dbname) -def refresh(model: Base) -> Base: - """ Refresh the session's knowledge of `model`. """ +def refresh(model): + """ + Refresh the session's knowledge of `model`. + + :returns: Passed in `model` + """ get_session().refresh(model) return model -def query(Model: Base, *args, **kwargs) -> Query: +def query(Model, *args, **kwargs): """ Perform an ORM query against the database session. @@ -124,7 +117,7 @@ def query(Model: Base, *args, **kwargs) -> Query: return get_session().query(Model).filter(*args, **kwargs) -def create(Model: Base, *args, **kwargs) -> Base: +def create(Model, *args, **kwargs): """ Create a record and add() it to the database session. @@ -135,7 +128,7 @@ def create(Model: Base, *args, **kwargs) -> Base: return add(instance) -def delete(model: Base) -> None: +def delete(model) -> None: """ Delete a set of records found by Query.filter(*args, **kwargs). @@ -144,8 +137,9 @@ def delete(model: Base) -> None: get_session().delete(model) -def delete_all(iterable: Iterable) -> None: +def delete_all(iterable) -> None: """ Delete each instance found in `iterable`. """ + import aurweb.util session_ = get_session() aurweb.util.apply_all(iterable, session_.delete) @@ -155,23 +149,29 @@ def rollback() -> None: get_session().rollback() -def add(model: Base) -> Base: +def add(model): """ Add `model` to the database session. """ get_session().add(model) return model -def begin() -> SessionTransaction: +def begin(): """ Begin an SQLAlchemy SessionTransaction. """ return get_session().begin() -def get_sqlalchemy_url() -> URL: +def get_sqlalchemy_url(): """ Build an SQLAlchemy URL for use with create_engine. :return: sqlalchemy.engine.url.URL """ + import sqlalchemy + + from sqlalchemy.engine.url import URL + + import aurweb.config + constructor = URL parts = sqlalchemy.__version__.split('.') @@ -209,13 +209,17 @@ def get_sqlalchemy_url() -> URL: def sqlite_regexp(regex, item) -> bool: # pragma: no cover """ Method which mimics SQL's REGEXP for SQLite. """ + import re return bool(re.search(regex, str(item))) -def setup_sqlite(engine: Engine) -> None: # pragma: no cover +def setup_sqlite(engine) -> None: # pragma: no cover """ Perform setup for an SQLite engine. """ + from sqlalchemy import event + @event.listens_for(engine, "connect") def do_begin(conn, record): + import functools create_deterministic_function = functools.partial( conn.create_function, deterministic=True @@ -227,7 +231,7 @@ def setup_sqlite(engine: Engine) -> None: # pragma: no cover _engines = dict() -def get_engine(dbname: str = None, echo: bool = False) -> Engine: +def get_engine(dbname: str = None, echo: bool = False): """ Return the SQLAlchemy engine for `dbname`. @@ -238,6 +242,8 @@ def get_engine(dbname: str = None, echo: bool = False) -> Engine: :param echo: Flag passed through to sqlalchemy.create_engine :return: SQLAlchemy Engine instance """ + import aurweb.config + if not dbname: dbname = name() @@ -254,6 +260,7 @@ def get_engine(dbname: str = None, echo: bool = False) -> Engine: "echo": echo, "connect_args": connect_args } + from sqlalchemy import create_engine _engines[dbname] = create_engine(get_sqlalchemy_url(), **kwargs) if is_sqlite: # pragma: no cover @@ -301,7 +308,10 @@ class ConnectionExecutor: _conn = None _paramstyle = None - def __init__(self, conn, backend=aurweb.config.get("database", "backend")): + def __init__(self, conn, backend=None): + import aurweb.config + + backend = backend or aurweb.config.get("database", "backend") self._conn = conn if backend == "mysql": self._paramstyle = "format" @@ -339,6 +349,7 @@ class Connection: _conn = None def __init__(self): + import aurweb.config aur_db_backend = aurweb.config.get('database', 'backend') if aur_db_backend == 'mysql': @@ -357,7 +368,9 @@ class Connection: elif aur_db_backend == 'sqlite': # pragma: no cover # TODO: SQLite support has been removed in FastAPI. It remains # here to fund its support for PHP until it is removed. + import math import sqlite3 + aur_db_name = aurweb.config.get('database', 'name') self._conn = sqlite3.connect(aur_db_name) self._conn.create_function("POWER", 2, math.pow) From 1a7f6e1fa9f500fead3650ef1e4ec9521884e1d8 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 12 Aug 2022 21:37:34 -0700 Subject: [PATCH 054/415] feat(db): add an index for SSHPubKeys.PubKey Speeds up SSHPubKeys.PubKey searches in a larger database. Signed-off-by: Kevin Morris --- aurweb/schema.py | 1 + ...d70103d2e82_add_sshpubkeys_pubkey_index.py | 28 +++++++++++++++++++ test/test_migration.py | 23 +++++++++++++++ 3 files changed, 52 insertions(+) create mode 100644 migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py create mode 100644 test/test_migration.py diff --git a/aurweb/schema.py b/aurweb/schema.py index d2644541..e1373bf4 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -87,6 +87,7 @@ SSHPubKeys = Table( Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), Column('Fingerprint', String(44), primary_key=True), Column('PubKey', String(4096), nullable=False), + Index('SSHPubKeysPubKey', 'PubKey'), mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', ) diff --git a/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py b/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py new file mode 100644 index 00000000..61e4dc79 --- /dev/null +++ b/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py @@ -0,0 +1,28 @@ +"""add SSHPubKeys.PubKey index + +Revision ID: dd70103d2e82 +Revises: d64e5571bc8d +Create Date: 2022-08-12 21:30:26.155465 + +""" +import traceback + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'dd70103d2e82' +down_revision = 'd64e5571bc8d' +branch_labels = None +depends_on = None + + +def upgrade(): + try: + op.create_index("SSHPubKeysPubKey", "SSHPubKeys", ["PubKey"]) + except Exception: + traceback.print_exc() + print("failing silently...") + + +def downgrade(): + op.drop_index("SSHPubKeysPubKey", "SSHPubKeys") diff --git a/test/test_migration.py b/test/test_migration.py new file mode 100644 index 00000000..cf8702fa --- /dev/null +++ b/test/test_migration.py @@ -0,0 +1,23 @@ +import pytest + +from sqlalchemy import inspect + +from aurweb.db import get_engine +from aurweb.models.ssh_pub_key import SSHPubKey + + +@pytest.fixture(autouse=True) +def setup(db_test): + return + + +def test_sshpubkeys_pubkey_index(): + insp = inspect(get_engine()) + indexes = insp.get_indexes(SSHPubKey.__tablename__) + + found_pk = False + for idx in indexes: + if idx.get("name") == "SSHPubKeysPubKey": + assert idx.get("column_names") == ["PubKey"] + found_pk = True + assert found_pk From 5abd5db313c871678bcf54e7a2c2a0fc056401b0 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 13 Aug 2022 19:23:19 -0700 Subject: [PATCH 055/415] Revert "feat(db): add an index for SSHPubKeys.PubKey" This reverts commit 1a7f6e1fa9f500fead3650ef1e4ec9521884e1d8. This commit broke account creation in some way. We'd still like to do this, but we need to ensure it does not intrude on other facets. Extra: We should really work out how this even passed tests; it should not have. --- aurweb/schema.py | 1 - ...d70103d2e82_add_sshpubkeys_pubkey_index.py | 28 ------------------- test/test_migration.py | 23 --------------- 3 files changed, 52 deletions(-) delete mode 100644 migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py delete mode 100644 test/test_migration.py diff --git a/aurweb/schema.py b/aurweb/schema.py index e1373bf4..d2644541 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -87,7 +87,6 @@ SSHPubKeys = Table( Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), Column('Fingerprint', String(44), primary_key=True), Column('PubKey', String(4096), nullable=False), - Index('SSHPubKeysPubKey', 'PubKey'), mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', ) diff --git a/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py b/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py deleted file mode 100644 index 61e4dc79..00000000 --- a/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py +++ /dev/null @@ -1,28 +0,0 @@ -"""add SSHPubKeys.PubKey index - -Revision ID: dd70103d2e82 -Revises: d64e5571bc8d -Create Date: 2022-08-12 21:30:26.155465 - -""" -import traceback - -from alembic import op - -# revision identifiers, used by Alembic. -revision = 'dd70103d2e82' -down_revision = 'd64e5571bc8d' -branch_labels = None -depends_on = None - - -def upgrade(): - try: - op.create_index("SSHPubKeysPubKey", "SSHPubKeys", ["PubKey"]) - except Exception: - traceback.print_exc() - print("failing silently...") - - -def downgrade(): - op.drop_index("SSHPubKeysPubKey", "SSHPubKeys") diff --git a/test/test_migration.py b/test/test_migration.py deleted file mode 100644 index cf8702fa..00000000 --- a/test/test_migration.py +++ /dev/null @@ -1,23 +0,0 @@ -import pytest - -from sqlalchemy import inspect - -from aurweb.db import get_engine -from aurweb.models.ssh_pub_key import SSHPubKey - - -@pytest.fixture(autouse=True) -def setup(db_test): - return - - -def test_sshpubkeys_pubkey_index(): - insp = inspect(get_engine()) - indexes = insp.get_indexes(SSHPubKey.__tablename__) - - found_pk = False - for idx in indexes: - if idx.get("name") == "SSHPubKeysPubKey": - assert idx.get("column_names") == ["PubKey"] - found_pk = True - assert found_pk From 6c7e2749688100a10ac7de1d422b8c4cee98f393 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 13 Aug 2022 19:52:50 -0700 Subject: [PATCH 056/415] feat(db): add an index for SSHPubKeys.PubKey (#2) Speeds up SSHPubKeys.PubKey searches in a larger database. Fixed form of the original commit which was reverted, 1a7f6e1fa9f500fead3650ef1e4ec9521884e1d8 Signed-off-by: Kevin Morris --- aurweb/schema.py | 2 ++ ...d70103d2e82_add_sshpubkeys_pubkey_index.py | 35 +++++++++++++++++++ test/test_migration.py | 23 ++++++++++++ 3 files changed, 60 insertions(+) create mode 100644 migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py create mode 100644 test/test_migration.py diff --git a/aurweb/schema.py b/aurweb/schema.py index d2644541..3d8369c9 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -87,6 +87,8 @@ SSHPubKeys = Table( Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), Column('Fingerprint', String(44), primary_key=True), Column('PubKey', String(4096), nullable=False), + Index('SSHPubKeysUserID', 'UserID'), + Index('SSHPubKeysPubKey', 'PubKey'), mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', ) diff --git a/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py b/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py new file mode 100644 index 00000000..7d3f4b59 --- /dev/null +++ b/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py @@ -0,0 +1,35 @@ +"""add SSHPubKeys.PubKey index + +Revision ID: dd70103d2e82 +Revises: d64e5571bc8d +Create Date: 2022-08-12 21:30:26.155465 + +""" +import traceback + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'dd70103d2e82' +down_revision = 'd64e5571bc8d' +branch_labels = None +depends_on = None + + +def upgrade(): + try: + op.create_index("SSHPubKeysUserID", "SSHPubKeys", ["UserID"]) + except Exception: + traceback.print_exc() + print("failing silently...") + + try: + op.create_index("SSHPubKeysPubKey", "SSHPubKeys", ["PubKey"]) + except Exception: + traceback.print_exc() + print("failing silently...") + + +def downgrade(): + op.drop_index("SSHPubKeysPubKey", "SSHPubKeys") + op.drop_index("SSHPubKeysUserID", "SSHPubKeys") diff --git a/test/test_migration.py b/test/test_migration.py new file mode 100644 index 00000000..cf8702fa --- /dev/null +++ b/test/test_migration.py @@ -0,0 +1,23 @@ +import pytest + +from sqlalchemy import inspect + +from aurweb.db import get_engine +from aurweb.models.ssh_pub_key import SSHPubKey + + +@pytest.fixture(autouse=True) +def setup(db_test): + return + + +def test_sshpubkeys_pubkey_index(): + insp = inspect(get_engine()) + indexes = insp.get_indexes(SSHPubKey.__tablename__) + + found_pk = False + for idx in indexes: + if idx.get("name") == "SSHPubKeysPubKey": + assert idx.get("column_names") == ["PubKey"] + found_pk = True + assert found_pk From 952c24783baa6c5924c5aaf2b9d9003866284657 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 13 Aug 2022 20:12:58 -0700 Subject: [PATCH 057/415] fix(docker): apply chown each time sshd is started Signed-off-by: Kevin Morris --- docker/scripts/run-sshd.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docker/scripts/run-sshd.sh b/docker/scripts/run-sshd.sh index d488e80d..45bd0e08 100755 --- a/docker/scripts/run-sshd.sh +++ b/docker/scripts/run-sshd.sh @@ -1,2 +1,7 @@ #!/bin/bash + +# Update this every time. +chown -R aur:aur /aurweb/aur.git + +# Start up sshd exec /usr/sbin/sshd -e -p 2222 -D From 829a8b4b813c23e2e85dad4e7aca00ffba86601d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 13 Aug 2022 20:56:43 -0700 Subject: [PATCH 058/415] Revert "fix(docker): apply chown each time sshd is started" This reverts commit 952c24783baa6c5924c5aaf2b9d9003866284657. The issue found was actually: - If `./aur.git` exists within the aurweb repository locally, it also ends up in the destination, stopping the aurweb_git_data volume from being mounted properly. --- docker/scripts/run-sshd.sh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docker/scripts/run-sshd.sh b/docker/scripts/run-sshd.sh index 45bd0e08..d488e80d 100755 --- a/docker/scripts/run-sshd.sh +++ b/docker/scripts/run-sshd.sh @@ -1,7 +1,2 @@ #!/bin/bash - -# Update this every time. -chown -R aur:aur /aurweb/aur.git - -# Start up sshd exec /usr/sbin/sshd -e -p 2222 -D From 6f7ac33166883c1c9c1b2b73c5735e46a49d3f6d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 13 Aug 2022 23:28:31 -0700 Subject: [PATCH 059/415] Revert "feat(db): add an index for SSHPubKeys.PubKey (#2)" This reverts commit 6c7e2749688100a10ac7de1d422b8c4cee98f393. Once again, this does actually cause issues with foreign keys. Removing it for now and will revisit this. --- aurweb/schema.py | 2 -- ...d70103d2e82_add_sshpubkeys_pubkey_index.py | 35 ------------------- test/test_migration.py | 23 ------------ 3 files changed, 60 deletions(-) delete mode 100644 migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py delete mode 100644 test/test_migration.py diff --git a/aurweb/schema.py b/aurweb/schema.py index 3d8369c9..d2644541 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -87,8 +87,6 @@ SSHPubKeys = Table( Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), Column('Fingerprint', String(44), primary_key=True), Column('PubKey', String(4096), nullable=False), - Index('SSHPubKeysUserID', 'UserID'), - Index('SSHPubKeysPubKey', 'PubKey'), mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', ) diff --git a/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py b/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py deleted file mode 100644 index 7d3f4b59..00000000 --- a/migrations/versions/dd70103d2e82_add_sshpubkeys_pubkey_index.py +++ /dev/null @@ -1,35 +0,0 @@ -"""add SSHPubKeys.PubKey index - -Revision ID: dd70103d2e82 -Revises: d64e5571bc8d -Create Date: 2022-08-12 21:30:26.155465 - -""" -import traceback - -from alembic import op - -# revision identifiers, used by Alembic. -revision = 'dd70103d2e82' -down_revision = 'd64e5571bc8d' -branch_labels = None -depends_on = None - - -def upgrade(): - try: - op.create_index("SSHPubKeysUserID", "SSHPubKeys", ["UserID"]) - except Exception: - traceback.print_exc() - print("failing silently...") - - try: - op.create_index("SSHPubKeysPubKey", "SSHPubKeys", ["PubKey"]) - except Exception: - traceback.print_exc() - print("failing silently...") - - -def downgrade(): - op.drop_index("SSHPubKeysPubKey", "SSHPubKeys") - op.drop_index("SSHPubKeysUserID", "SSHPubKeys") diff --git a/test/test_migration.py b/test/test_migration.py deleted file mode 100644 index cf8702fa..00000000 --- a/test/test_migration.py +++ /dev/null @@ -1,23 +0,0 @@ -import pytest - -from sqlalchemy import inspect - -from aurweb.db import get_engine -from aurweb.models.ssh_pub_key import SSHPubKey - - -@pytest.fixture(autouse=True) -def setup(db_test): - return - - -def test_sshpubkeys_pubkey_index(): - insp = inspect(get_engine()) - indexes = insp.get_indexes(SSHPubKey.__tablename__) - - found_pk = False - for idx in indexes: - if idx.get("name") == "SSHPubKeysPubKey": - assert idx.get("column_names") == ["PubKey"] - found_pk = True - assert found_pk From d63615a9946c2d82af82750f09cceca441f7117c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 13 Aug 2022 23:17:53 -0700 Subject: [PATCH 060/415] fix(docker): fix ca entrypoint logic and healthcheck With this commit, it is advised to `rm ./data/root_ca.crt ./data/*.pem`, as new certificates and a root CA will be generated while utilizing the step volume. Closes #367 Signed-off-by: Kevin Morris --- docker-compose.yml | 30 ++++++++++++++++-------------- docker/ca-entrypoint.sh | 40 ++++++++++++++++------------------------ docker/health/ca.sh | 4 ++-- 3 files changed, 34 insertions(+), 40 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index a56cbe72..9edffeeb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -31,8 +31,10 @@ services: entrypoint: /docker/ca-entrypoint.sh command: /docker/scripts/run-ca.sh healthcheck: - test: "bash /docker/health/run-ca.sh" - interval: 2s + test: "bash /docker/health/ca.sh" + interval: 3s + volumes: + - step:/root/.step memcached: image: aurweb:latest @@ -40,7 +42,7 @@ services: command: /docker/scripts/run-memcached.sh healthcheck: test: "bash /docker/health/memcached.sh" - interval: 2s + interval: 3s redis: image: aurweb:latest @@ -49,7 +51,7 @@ services: command: /docker/scripts/run-redis.sh healthcheck: test: "bash /docker/health/redis.sh" - interval: 2s + interval: 3s ports: - "127.0.0.1:16379:6379" @@ -67,7 +69,7 @@ services: - mariadb_data:/var/lib/mysql healthcheck: test: "bash /docker/health/mariadb.sh" - interval: 2s + interval: 3s mariadb_init: image: aurweb:latest @@ -98,7 +100,7 @@ services: - mariadb_test_run:/var/run/mysqld # Bind socket in this volume. healthcheck: test: "bash /docker/health/mariadb.sh" - interval: 2s + interval: 3s git: image: aurweb:latest @@ -113,7 +115,7 @@ services: - "2222:2222" healthcheck: test: "bash /docker/health/sshd.sh" - interval: 2s + interval: 3s depends_on: mariadb_init: condition: service_started @@ -129,7 +131,7 @@ services: command: /docker/scripts/run-smartgit.sh healthcheck: test: "bash /docker/health/smartgit.sh" - interval: 2s + interval: 3s cgit-php: image: aurweb:latest @@ -142,7 +144,7 @@ services: command: /docker/scripts/run-cgit.sh 3000 healthcheck: test: "bash /docker/health/cgit.sh 3000" - interval: 2s + interval: 3s depends_on: git: condition: service_healthy @@ -162,7 +164,7 @@ services: command: /docker/scripts/run-cgit.sh 3000 healthcheck: test: "bash /docker/health/cgit.sh 3000" - interval: 2s + interval: 3s depends_on: git: condition: service_healthy @@ -199,7 +201,7 @@ services: command: /docker/scripts/run-php.sh healthcheck: test: "bash /docker/health/php.sh" - interval: 2s + interval: 3s depends_on: git: condition: service_healthy @@ -228,7 +230,7 @@ services: command: /docker/scripts/run-fastapi.sh "${FASTAPI_BACKEND}" healthcheck: test: "bash /docker/health/fastapi.sh ${FASTAPI_BACKEND}" - interval: 2s + interval: 3s depends_on: git: condition: service_healthy @@ -254,10 +256,10 @@ services: - "127.0.0.1:8444:8444" # FastAPI healthcheck: test: "bash /docker/health/nginx.sh" - interval: 2s + interval: 3s depends_on: ca: - condition: service_started + condition: service_healthy cgit-php: condition: service_healthy cgit-fastapi: diff --git a/docker/ca-entrypoint.sh b/docker/ca-entrypoint.sh index d03efbbc..55c7cd75 100755 --- a/docker/ca-entrypoint.sh +++ b/docker/ca-entrypoint.sh @@ -89,34 +89,26 @@ step_cert_request() { chmod 666 /data/${1}.*.pem } -if [ ! -f $DATA_ROOT_CA ]; then +if [ ! -d /root/.step/config ]; then + # Remove existing certs. + rm -vf /data/localhost.{cert,key}.pem /data/root_ca.crt + setup_step_ca install_step_ca + + start_step_ca + for host in $DATA_CERT_HOSTS; do + step_cert_request $host /data/${host}.cert.pem /data/${host}.key.pem + done + kill_step_ca + + echo -n "WARN: Your certificates are being regenerated to resolve " + echo -n "an inconsistent step-ca state. You will need to re-import " + echo "the root CA certificate into your browser." +else + exec "$@" fi -# For all hosts separated by spaces in $DATA_CERT_HOSTS, perform a check -# for their existence in /data and react accordingly. -for host in $DATA_CERT_HOSTS; do - if [ -f /data/${host}.cert.pem ] && [ -f /data/${host}.key.pem ]; then - # Found an override. Move on to running the service after - # printing a notification to the user. - echo "Found '${host}.{cert,key}.pem' override, skipping..." - echo -n "Note: If you need to regenerate certificates, run " - echo '`rm -f data/*.{cert,key}.pem` before starting this service.' - exec "$@" - else - # Otherwise, we had a missing cert or key, so remove both. - rm -f /data/${host}.cert.pem - rm -f /data/${host}.key.pem - fi -done - -start_step_ca -for host in $DATA_CERT_HOSTS; do - step_cert_request $host /data/${host}.cert.pem /data/${host}.key.pem -done -kill_step_ca - # Set permissions to /data to rwx for everybody. chmod 777 /data diff --git a/docker/health/ca.sh b/docker/health/ca.sh index 3e4bbe8e..6bf8360e 100755 --- a/docker/health/ca.sh +++ b/docker/health/ca.sh @@ -1,2 +1,2 @@ - -exec printf "" 2>>/dev/null >>/dev/tcp/127.0.0.1/8443 +#!/bin/bash +exec curl -qkiI 'https://localhost:8443/' From a82d552e1bf0892e93b2c560e0924ada67f4d89d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 12 Aug 2022 17:18:10 -0700 Subject: [PATCH 061/415] update: migrate new transifex client configuration Signed-off-by: Kevin Morris --- .tx/config | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.tx/config b/.tx/config index e986f81c..7f53b684 100644 --- a/.tx/config +++ b/.tx/config @@ -1,7 +1,8 @@ [main] host = https://www.transifex.com -[aurweb.aurwebpot] +[o:lfleischer:p:aurweb:r:aurwebpot] file_filter = po/.po source_file = po/aurweb.pot source_lang = en + From 4565aa38cf3cc227bf34a9f8fa40ca698b12ded5 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 12 Aug 2022 17:18:54 -0700 Subject: [PATCH 062/415] update: Swedish translations Pulled from Transifex on 08/12/2022 - 08/13/2022. Signed-off-by: Kevin Morris --- po/sv_SE.po | 109 ++++++++++++++++++++++++++-------------------------- 1 file changed, 55 insertions(+), 54 deletions(-) diff --git a/po/sv_SE.po b/po/sv_SE.po index 6d09e207..6abb8452 100644 --- a/po/sv_SE.po +++ b/po/sv_SE.po @@ -4,17 +4,18 @@ # # Translators: # Johannes Löthberg , 2015-2016 +# Kevin Morris , 2022 # Kim Svensson , 2011 # Kim Svensson , 2012 -# Luna Jernberg , 2021 +# Luna Jernberg , 2021-2022 # Robin Björnsvik , 2014-2015 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Kevin Morris , 2022\n" "Language-Team: Swedish (Sweden) (http://www.transifex.com/lfleischer/aurweb/language/sv_SE/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -36,17 +37,17 @@ msgstr "Notera" #: html/404.php msgid "Git clone URLs are not meant to be opened in a browser." -msgstr "" +msgstr "git clone URLs är inte avsedda att öppnas i en webbläsare." #: html/404.php #, php-format msgid "To clone the Git repository of %s, run %s." -msgstr "" +msgstr "För att klona Git-förrådet för %s, kör %s." #: html/404.php #, php-format msgid "Click %shere%s to return to the %s details page." -msgstr "" +msgstr "Klicka %shär%s för att återgå till %sdetaljsidan." #: html/503.php msgid "Service Unavailable" @@ -169,7 +170,7 @@ msgstr "Redigera kommentar" #: html/home.php template/header.php msgid "Dashboard" -msgstr "" +msgstr "Informationspanel" #: html/home.php template/header.php msgid "Home" @@ -197,7 +198,7 @@ msgstr "Sam-ansvariga paket" #: html/home.php msgid "Search for packages I co-maintain" -msgstr "" +msgstr "Sök efter paket jag är sam-ansvarig för" #: html/home.php #, php-format @@ -459,7 +460,7 @@ msgstr "Fortsätt" msgid "" "If you have forgotten the user name and the primary e-mail address you used " "to register, please send a message to the %saur-general%s mailing list." -msgstr "" +msgstr "Om du har glömt användarnamnet och den primära e-postadress som du använde för att registrera dig, vänligen skicka ett meddelande till %saur-general%s e-postlistan." #: html/passreset.php msgid "Enter your user name or your primary e-mail address:" @@ -479,7 +480,7 @@ msgstr "De valda paketen har inte gjorts herrelösa, kryssa i konfirmationsrutan msgid "" "The selected packages have not been adopted, check the confirmation " "checkbox." -msgstr "" +msgstr "De valda paketen har inte adopteras, markera kryssrutan för bekräftelse." #: html/pkgbase.php lib/pkgreqfuncs.inc.php msgid "Cannot find package to merge votes and comments into." @@ -545,7 +546,7 @@ msgstr "Använd det här formuläret för att göra paket basen %s%s%s och de f msgid "" "By selecting the checkbox, you confirm that you want to no longer be a " "package co-maintainer." -msgstr "" +msgstr "Genom att markera kryssrutan bekräftar du att du inte längre vill vara sam-ansvarig för paket." #: html/pkgdisown.php #, php-format @@ -585,7 +586,7 @@ msgid "" " package version in the AUR does not match the most recent commit. Flagging " "this package should only be done if the sources moved or changes in the " "PKGBUILD are required because of recent upstream changes." -msgstr "" +msgstr "Detta verkar vara ett VCS-paket. Flagga den %sinte%s som inaktuell om paketversionen i AUR inte matchar den mest senaste commit. Flaggning av detta paket bör endast göras om källkoden har flyttats eller ändringar i PKGBUILD krävs på grund av de senaste uppströmsändringarna." #: html/pkgflag.php #, php-format @@ -974,7 +975,7 @@ msgstr "Paket detaljer kunde inte hittas." #: aurweb/routers/auth.py msgid "Bad Referer header." -msgstr "" +msgstr "Dåligt referenshuvud." #: aurweb/routers/packages.py msgid "You did not select any packages to be notified about." @@ -982,19 +983,19 @@ msgstr "Du har inte valt några paket att notifieras om." #: aurweb/routers/packages.py msgid "The selected packages' notifications have been enabled." -msgstr "" +msgstr "De valda paketens aviseringar har aktiverats." #: aurweb/routers/packages.py msgid "You did not select any packages for notification removal." -msgstr "" +msgstr "Du har inte valt några paket för notifieringsborttagning." #: aurweb/routers/packages.py msgid "A package you selected does not have notifications enabled." -msgstr "" +msgstr "Ett paket du valde har inga notifieringar aktiverade." #: aurweb/routers/packages.py msgid "The selected packages' notifications have been removed." -msgstr "" +msgstr "De valda paketens notifieringar har blivit borttagna." #: lib/pkgbasefuncs.inc.php msgid "You must be logged in before you can flag packages." @@ -1046,7 +1047,7 @@ msgstr "Du måste vara inloggad före du kan adoptera paket." #: aurweb/routers/package.py msgid "You are not allowed to adopt one of the packages you selected." -msgstr "" +msgstr "Du är inte tillåten att adoptera ett av paketen du valde." #: lib/pkgbasefuncs.inc.php msgid "You must be logged in before you can disown packages." @@ -1054,7 +1055,7 @@ msgstr "Du måste vara inloggad före du kan göra paket härrelösa." #: aurweb/routers/packages.py msgid "You are not allowed to disown one of the packages you selected." -msgstr "" +msgstr "Du har inte tillåtelse att göra ett av paketen du valde herrelöst." #: lib/pkgbasefuncs.inc.php msgid "You did not select any packages to adopt." @@ -1354,7 +1355,7 @@ msgid "" "If you do not hide your email address, it is visible to all registered AUR " "users. If you hide your email address, it is visible to members of the Arch " "Linux staff only." -msgstr "" +msgstr "Om du inte döljer din e-postadress är den synlig för alla registrerade AUR-användare. Om du döljer din e-postadress är den endast synlig för medlemmar av Arch Linux-personalen." #: template/account_edit_form.php msgid "Backup Email Address" @@ -1370,14 +1371,14 @@ msgstr "Ange eventuellt en sekundär e-postadress som kan användas för att åt msgid "" "Password reset links are always sent to both your primary and your backup " "email address." -msgstr "" +msgstr "Länkar för återställning av lösenord skickas alltid till både din primära och din reserv epostadress." #: template/account_edit_form.php #, php-format msgid "" "Your backup email address is always only visible to members of the Arch " "Linux staff, independent of the %s setting." -msgstr "" +msgstr "Din backup-e-postadress är alltid endast synlig för medlemmar av Arch Linux-personalen, oberoende av %s inställningen." #: template/account_edit_form.php msgid "Language" @@ -1435,7 +1436,7 @@ msgstr "Ditt nuvarande lösenord" msgid "" "To protect the AUR against automated account creation, we kindly ask you to " "provide the output of the following command:" -msgstr "" +msgstr "För att skydda AUR mot automatiskt kontoskapande ber vi dig att tillhandahålla utdata från följande kommando:" #: template/account_edit_form.php msgid "Answer" @@ -1654,7 +1655,7 @@ msgstr "Lägg till kommentar" msgid "" "Git commit identifiers referencing commits in the AUR package repository and" " URLs are converted to links automatically." -msgstr "" +msgstr "Git commit-identifierare som refererar till commits i AUR-paketförrådet och URL:er konverteras automatiskt till länkar." #: template/pkg_comment_form.php #, php-format @@ -1829,7 +1830,7 @@ msgid "" "By submitting a deletion request, you ask a Trusted User to delete the " "package base. This type of request should be used for duplicates, software " "abandoned by upstream, as well as illegal and irreparably broken packages." -msgstr "" +msgstr "Genom att skicka en begäran om borttagning ber du en Trusted User att ta bort paketbasen. Denna typ av begäran bör användas för dubbletter, programvara som övergetts av uppström, såväl som olagliga och irreparabelt trasiga paket." #: template/pkgreq_form.php msgid "" @@ -1837,7 +1838,7 @@ msgid "" "base and transfer its votes and comments to another package base. Merging a " "package does not affect the corresponding Git repositories. Make sure you " "update the Git history of the target package yourself." -msgstr "" +msgstr "Genom att skicka en sammanslagningsförfrågan ber du en Trusted User att ta bort paketbasen och överföra dess röster och kommentarer till en annan paketbas. Att slå samman ett paket påverkar inte motsvarande Git-förråd. Se till att du själv uppdaterar Git-historiken för målpaketet." #: template/pkgreq_form.php msgid "" @@ -1845,7 +1846,7 @@ msgid "" "package base. Please only do this if the package needs maintainer action, " "the maintainer is MIA and you already tried to contact the maintainer " "previously." -msgstr "" +msgstr "Genom att skicka in en föräldralös begäran ber du en Trusted User att avfärda paketbasen. Vänligen gör endast detta om paketet behöver underhållsåtgärder, underhållaren är MIA och du redan försökt kontakta underhållaren tidigare." #: template/pkgreq_results.php msgid "No requests matched your search criteria." @@ -1907,7 +1908,7 @@ msgstr "Stäng" #: template/pkgreq_results.php msgid "Pending" -msgstr "" +msgstr "Väntar på" #: template/pkgreq_results.php msgid "Closed" @@ -2026,7 +2027,7 @@ msgstr "Version" msgid "" "Popularity is calculated as the sum of all votes with each vote being " "weighted with a factor of %.2f per day since its creation." -msgstr "" +msgstr "Populäritet räknas ut som summan av alla röster, och alla röster har en vikt med en faktor av %.2f per dag sedan den skapades. " #: template/pkg_search_results.php template/tu_details.php #: template/tu_list.php @@ -2180,18 +2181,18 @@ msgid "" "A password reset request was submitted for the account {user} associated " "with your email address. If you wish to reset your password follow the link " "[1] below, otherwise ignore this message and nothing will happen." -msgstr "" +msgstr "En begäran om lösenordsåterställning skickades för kontot {user} som är kopplat till din e-postadress. Om du vill återställa ditt lösenord, följ länken [1] nedan, annars ignorera detta meddelande och ingenting kommer att hända." #: scripts/notify.py msgid "Welcome to the Arch User Repository" -msgstr "" +msgstr "Välkommen till Arch User Repository" #: scripts/notify.py msgid "" "Welcome to the Arch User Repository! In order to set an initial password for" " your new account, please click the link [1] below. If the link does not " "work, try copying and pasting it into your browser." -msgstr "" +msgstr "Välkommen till Arch User Repository! För att ställa in ett första lösenord för ditt nya konto, klicka på länken [1] nedan. Om länken inte fungerar, försök att kopiera och klistra in den i din webbläsare." #: scripts/notify.py #, python-brace-format @@ -2208,7 +2209,7 @@ msgstr "{user} [1] lade till följande kommentar till {pkgbase} [2]:" msgid "" "If you no longer wish to receive notifications about this package, please go" " to the package page [2] and select \"{label}\"." -msgstr "" +msgstr "Om du inte längre vill få meddelanden om detta paket, gå till paketsidan [2] och välj \"{label}\"." #: scripts/notify.py #, python-brace-format @@ -2218,47 +2219,47 @@ msgstr "AUR paket uppdatering: {pkgbase}" #: scripts/notify.py #, python-brace-format msgid "{user} [1] pushed a new commit to {pkgbase} [2]." -msgstr "" +msgstr "{user} [1] knuffade en ny commit till {pkgbase} [2]." #: scripts/notify.py #, python-brace-format msgid "AUR Out-of-date Notification for {pkgbase}" -msgstr "" +msgstr "AUR inaktuell avisering för {pkgbase}" #: scripts/notify.py #, python-brace-format msgid "Your package {pkgbase} [1] has been flagged out-of-date by {user} [2]:" -msgstr "" +msgstr "Ditt paket {pkgbase} [1] har flaggats som inaktuellt av {user} [2]:" #: scripts/notify.py #, python-brace-format msgid "AUR Ownership Notification for {pkgbase}" -msgstr "" +msgstr "AUR-ägarskapsmeddelande för {pkgbase}" #: scripts/notify.py #, python-brace-format msgid "The package {pkgbase} [1] was adopted by {user} [2]." -msgstr "" +msgstr "Paketet {pkgbase} [1] adopterades av {user} [2]." #: scripts/notify.py #, python-brace-format msgid "The package {pkgbase} [1] was disowned by {user} [2]." -msgstr "" +msgstr "Paketet {pkgbase} [1] gjordes herrelöst av {user} [2]." #: scripts/notify.py #, python-brace-format msgid "AUR Co-Maintainer Notification for {pkgbase}" -msgstr "" +msgstr "AUR sam-ansvarig meddelande för {pkgbase}" #: scripts/notify.py #, python-brace-format msgid "You were added to the co-maintainer list of {pkgbase} [1]." -msgstr "" +msgstr "Du blev tillagd till sam-ansvarig listan för {pkgbase} [1]." #: scripts/notify.py #, python-brace-format msgid "You were removed from the co-maintainer list of {pkgbase} [1]." -msgstr "" +msgstr "Du blev borttagen från sam-ansvarig listan för {pkgbase} [1]." #: scripts/notify.py #, python-brace-format @@ -2272,7 +2273,7 @@ msgid "" "\n" "-- \n" "If you no longer wish receive notifications about the new package, please go to [3] and click \"{label}\"." -msgstr "" +msgstr "{user} [1] slog ihop {old} [2] till {new} [3].\n\n--\nOm du inte längre vill få meddelanden om det nya paketet, gå till [3] och klicka på \"{label}\"." #: scripts/notify.py #, python-brace-format @@ -2280,19 +2281,19 @@ msgid "" "{user} [1] deleted {pkgbase} [2].\n" "\n" "You will no longer receive notifications about this package." -msgstr "" +msgstr "{user} [1] raderade {pkgbase} [2].\n\nDu kommer inte längre att få aviseringar om detta paket." #: scripts/notify.py #, python-brace-format msgid "TU Vote Reminder: Proposal {id}" -msgstr "" +msgstr "TU röstningspåminnelse: Förslag {id}" #: scripts/notify.py #, python-brace-format msgid "" "Please remember to cast your vote on proposal {id} [1]. The voting period " "ends in less than 48 hours." -msgstr "" +msgstr "Kom ihåg att lägga din röst på förslaget {id} [1]. Omröstningsperioden slutar om mindre än 48 timmar." #: aurweb/routers/accounts.py msgid "Invalid account type provided." @@ -2308,33 +2309,33 @@ msgstr "Du har inte behörighet att ändra den här användarens kontotyp till % #: aurweb/packages/requests.py msgid "No due existing orphan requests to accept for %s." -msgstr "" +msgstr "Inga befintliga herrelösa begäranden att acceptera för %s." #: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "Internt serverfel" #: templates/errors/500.html msgid "A fatal error has occurred." -msgstr "" +msgstr "Ett allvarligt fel har inträffat." #: templates/errors/500.html msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "" +msgstr "Detaljer har loggats och kommer att granskas av postmästaren så snabbt som möjligt. Vi ber om ursäkt för eventuella besvär som detta kan ha orsakat." #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "" +msgstr "AUR serverfel" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Relaterade paketförfrågningar stängningskommentar..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Denna åtgärd kommer att stänga alla väntande paketförfrågningar relaterade till det paketet. %sOm kommentarer%s utelämnas kommer en stängningskommentar att automatiskt genereras." From 9497f6e671dff3c742ce9a4ec7d8226bff102121 Mon Sep 17 00:00:00 2001 From: Jelle van der Waa Date: Sun, 14 Aug 2022 15:43:13 +0200 Subject: [PATCH 063/415] fix(aurweb): resolve exception in ratelimit Redis's get() method can return None which makes an RPC request error out: File "/srv/http/aurweb/aurweb/ratelimit.py", line 103, in check_ratelimit requests = int(requests.decode()) AttributeError: 'NoneType' object has no attribute 'decode' --- aurweb/ratelimit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aurweb/ratelimit.py b/aurweb/ratelimit.py index 659ab6b8..86063f5d 100644 --- a/aurweb/ratelimit.py +++ b/aurweb/ratelimit.py @@ -94,7 +94,7 @@ def check_ratelimit(request: Request): # valid cache value will be returned which must be converted # to an int. Otherwise, use the database record returned # by update_ratelimit. - if not config.getboolean("ratelimit", "cache"): + if not config.getboolean("ratelimit", "cache") or requests is None: # If we got nothing from pipeline.get, we did not use # the Redis path of logic: use the DB record's count. requests = record.Requests From b4e0aea2b73e2cde6968dac72fb1b2c9fcb5a17b Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 14 Aug 2022 19:25:49 -0700 Subject: [PATCH 064/415] Merged bugfixes Brings in: 9497f6e671dff3c742ce9a4ec7d8226bff102121 Closes #512 Thanks, jelle! Signed-off-by: Kevin Morris From 801df832e53e56bc364f73fb88a1315eea91fb55 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 15 Aug 2022 10:06:44 -0700 Subject: [PATCH 065/415] fix(rpc): correct URLPath in package results This was incorrectly using the particular Package record's name to format options.snapshot_uri in order to produce URLPath. It should, instead, use the PackageBase record's name, which this commit resolves. Bug reported by thomy2000 Closes #382 Signed-off-by: Kevin Morris --- aurweb/rpc.py | 2 +- test/test_rpc.py | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index f04de7d6..3ea7e070 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -138,7 +138,7 @@ class RPC: "Version": package.Version, "Description": package.Description, "URL": package.URL, - "URLPath": snapshot_uri % package.Name, + "URLPath": snapshot_uri % package.PackageBaseName, "NumVotes": package.NumVotes, "Popularity": pop, "OutOfDate": package.OutOfDateTS, diff --git a/test/test_rpc.py b/test/test_rpc.py index 0e24467a..c0861d3d 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -297,6 +297,28 @@ def test_rpc_singular_info(client: TestClient, assert response_data == expected_data +def test_rpc_split_package_urlpath(client: TestClient, user: User): + with db.begin(): + pkgbase = db.create(PackageBase, Name="pkg", + Maintainer=user, Packager=user) + pkgs = [ + db.create(Package, PackageBase=pkgbase, Name="pkg_1"), + db.create(Package, PackageBase=pkgbase, Name="pkg_2"), + ] + + with client as request: + response = request.get("/rpc", params={ + "v": 5, + "type": "info", + "arg": [pkgs[0].Name], + }) + + data = orjson.loads(response.text) + snapshot_uri = config.get("options", "snapshot_uri") + urlpath = data.get("results")[0].get("URLPath") + assert urlpath == (snapshot_uri % pkgbase.Name) + + def test_rpc_nonexistent_package(client: TestClient): # Make dummy request. with client as request: From 7b047578fd5b64c0f8af47130aa02e7271690f6d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 15 Aug 2022 12:10:55 -0700 Subject: [PATCH 066/415] fix: correct kwarg name for approved users of creds.has_credential Signed-off-by: Kevin Morris --- aurweb/auth/creds.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/auth/creds.py b/aurweb/auth/creds.py index 100aad8c..05b30d5d 100644 --- a/aurweb/auth/creds.py +++ b/aurweb/auth/creds.py @@ -69,8 +69,8 @@ cred_filters = { def has_credential(user: User, credential: int, - approved_users: list = tuple()): + approved: list = tuple()): - if user in approved_users: + if user in approved: return True return user.AccountTypeID in cred_filters[credential] From 7a52da5587f3c9d751b3b88526889a3f818c9754 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 15 Aug 2022 13:57:32 -0700 Subject: [PATCH 067/415] fix: guard POST keywords & allow co-maintainers to see keyword form This addresses a severe security issue, which is omitted from this git message for obscurity purposes. Otherwise, it allows co-maintainers to see the keyword form when viewing a package they co-maintain. Closes #378 Signed-off-by: Kevin Morris --- aurweb/routers/pkgbase.py | 6 ++++++ templates/partials/packages/details.html | 4 ++-- test/test_pkgbase_routes.py | 19 +++++++++++++++++-- test/test_templates.py | 4 +++- 4 files changed, 28 insertions(+), 5 deletions(-) diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 6cd4199d..1bca5ea3 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -96,6 +96,12 @@ async def pkgbase_keywords(request: Request, name: str, keywords: str = Form(default=str())): pkgbase = get_pkg_or_base(name, PackageBase) + approved = [pkgbase.Maintainer] + [c.User for c in pkgbase.comaintainers] + has_cred = creds.has_credential(request.user, creds.PKGBASE_SET_KEYWORDS, + approved=approved) + if not has_cred: + return Response(status_code=HTTPStatus.UNAUTHORIZED) + # Lowercase all keywords. Our database table is case insensitive, # and providing CI duplicates of keywords is erroneous. keywords = set(k.lower() for k in keywords.split()) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index 771b311d..ca7159be 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -33,10 +33,10 @@ {% endif %} - {% if pkgbase.keywords.count() or request.user.has_credential(creds.PKGBASE_SET_KEYWORDS, approved=[pkgbase.Maintainer]) %} + {% if pkgbase.keywords.count() or request.user.has_credential(creds.PKGBASE_SET_KEYWORDS, approved=[pkgbase.Maintainer] + comaintainers) %} - {% if request.user.has_credential(creds.PKGBASE_SET_KEYWORDS, approved=[pkgbase.Maintainer]) %} + {% if request.user.has_credential(creds.PKGBASE_SET_KEYWORDS, approved=[pkgbase.Maintainer] + comaintainers) %}
    {{ "Total" | tr }} {{ "Trusted Users" | tr }}:{{ "Total" | tr }} {{ "Trusted Users" | tr }}: {{ trusted_user_count }}
    {{ "Active" | tr }} {{ "Trusted Users" | tr }}:{{ "Active" | tr }} {{ "Trusted Users" | tr }}: {{ active_trusted_user_count }}
    {{ "Maintainer" | tr }}: - {% if pkgbase.Maintainer %} + {% if request.user.is_authenticated() and pkgbase.Maintainer %} {{ pkgbase.Maintainer.Username }} @@ -118,6 +118,9 @@ {% endif %} {% else %} {{ pkgbase.Maintainer.Username | default("None" | tr) }} + {% if comaintainers %} + ({{ comaintainers|join(', ') }}) + {% endif %} {% endif %}
    {{ "Keywords" | tr }}:
    Date: Mon, 15 Aug 2022 14:49:34 -0700 Subject: [PATCH 068/415] fix: secure access to comment edits to user who owns the comment Found along with the previous commit to be a security hole in our implementation. This commit resolves an issue regarding comment editing. Signed-off-by: Kevin Morris --- aurweb/routers/pkgbase.py | 2 ++ test/test_pkgbase_routes.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 1bca5ea3..c735f474 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -286,6 +286,8 @@ async def pkgbase_comment_post( if not comment: raise HTTPException(status_code=HTTPStatus.BAD_REQUEST) + elif request.user.ID != db_comment.UsersID: + raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED) # If the provided comment is different than the record's version, # update the db record. diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index 5c44ea47..f6bcf5d7 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -467,6 +467,22 @@ def test_pkgbase_comments(client: TestClient, maintainer: User, user: User, assert "form" in data +def test_pkgbase_comment_edit_unauthorized(client: TestClient, + user: User, + maintainer: User, + package: Package, + comment: PackageComment): + pkgbase = package.PackageBase + + cookies = {"AURSID": maintainer.login(Request(), "testPassword")} + with client as request: + endp = f"/pkgbase/{pkgbase.Name}/comments/{comment.ID}" + response = request.post(endp, data={ + "comment": "abcd im trying to change this comment." + }, cookies=cookies) + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_pkgbase_comment_delete(client: TestClient, maintainer: User, user: User, From 33bf5df236166cbbbd8ef1b611145effc5813acd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Leon=20M=C3=B6ller?= Date: Fri, 12 Aug 2022 18:43:18 +0200 Subject: [PATCH 069/415] fix: show unflag link to flagger While the flagger is allowed to unflag a package, the link to do so is hidden from them. Fix by adding the flagger to the unflag list. Fix #380 --- aurweb/pkgbase/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 5ffe490e..63621d63 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -46,7 +46,7 @@ def make_context(request: Request, pkgbase: PackageBase, ).all() ] context["unflaggers"] = context["comaintainers"].copy() - context["unflaggers"].append(pkgbase.Maintainer) + context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger]) context["packages_count"] = pkgbase.packages.count() context["keywords"] = pkgbase.keywords From fb1fb2ef3b6ff441ce30c5fd50781376e4cd02c4 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 17 Aug 2022 09:59:56 -0700 Subject: [PATCH 070/415] feat: documentation for web authentication (login, verification) Signed-off-by: Kevin Morris --- doc/web-auth.md | 111 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 doc/web-auth.md diff --git a/doc/web-auth.md b/doc/web-auth.md new file mode 100644 index 00000000..5f6679d4 --- /dev/null +++ b/doc/web-auth.md @@ -0,0 +1,111 @@ +# aurweb Web Authentication + +aurweb uses an HTTP cookie to persist user sessions across requests. +This cookie **must** be delivered with a request in order to be considered +an authenticated user. + +See [HTTP Cookie](#http-cookie) for detailed information about the cookie. + +## HTTP Cookie + +aurweb utilizes an HTTP cookie by the name of `AURSID` to track +user authentication across requests. + +This cookie's requirements changes due to aurweb's configuration +in the following ways: + +- `options.disable_http_login: 0` + - [Samesite=LAX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), Max-Age +- `options.disable_http_login: 1` + - [Secure, HttpOnly](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#restrict_access_to_cookies), [Samesite=Strict](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), Max-Age + +### Max-Age + +The value used for the `AURSID` Max-Age attribute is decided based +off of the "Remember Me" checkbox on the login page. Both paths +use their own independent configuration for the number of seconds +that each type of session should stay alive. + +- "Remember Me" unchecked while logging in + - `options.login_timeout` is used +- "Remember Me" checked while logging in + - `options.persistent_cookie_timeout` is used + +Both `options.login_timeout` and `options.persistent_cookie_timeout` +indicate the number of seconds the session should live. + +### Notes + +At all times, aur.archlinux.org operates over HTTPS. Secure cookies will +only remain intact when subsequently requesting an aurweb route through +the HTTPS scheme at the same host as the cookie was obtained. + +## Login Process + +When a user logs in to aurweb, the following steps are taken: + +1. Was a Referer header delivered from an address starting with +`{aurweb_url}/login`? + 1. No, an HTTP 400 Bad Request response is returned + 2. Yes, move on to 2 +2. Does a Users database record exist for the given username/email? + 1. No, you are returned to the login page with `Bad username or password.` + error + 2. Yes, move on to 3 +3. Is the user suspended? + 1. Yes, you are returned to the login page with `Account Suspended` error + 2. No, move on to 4 +4. Can the user login with the given password? + 1. No, you are returned to the login page with `Bad username or password.` + error + 2. Yes, move on to 5 +5. Update the user's `LastLogin` and `LastLoginIPAddress` columns +6. Does the user have a related Sessions record? + 1. No, generate a new Sessions record with a new unique `SessionID` + 2. Yes, update the Sessions record's `SessionID` column with a new unique + string and update the Sessions record's `LastUpdateTS` column if it has + expired + 3. In both cases, set the user's `InactivityTS` column to `0` + 4. In both cases, return the new `SessionID` column value and move on to 7 +7. Return a redirect to the `next` GET variable with the +following cookies set: + 1. `AURSID` + - Unique session string matching the user's related + `Sessions.SessionID` column + 2. `AURTZ` + - User's timezone setting + 3. `AURLANG` + - User's language setting + 4. `AURREMEMBER` + - Boolean state of the "Remember Me" checkbox when login submitted + +## Auth Verification + +When a request is made toward aurweb, a middleware is responsible for +verifying the user's auth cookie. If no valid `AURSID` cookie could be +found for a user in the database, the request is considered unauthenticated. + +The following list of steps describes exactly how this verification works: +1. Was the `AURSID` cookie delivered? + 1. No, the algorithm ends, you are considered unauthenticated + 2. Yes, move on to 2 +2. Was the `AURREMEMBER` cookie delivered with a value of 1? + 1. No, set the expected session timeout **T** to `options.login_timeout` + 2. Yes, set the expected session timeout **T** to + `options.persistent_cookie_timeout` +3. Does a Sessions database record exist which matches the `AURSID`? + 1. No, the algorithm ends, you are considered unauthenticated + 2. Yes, move on to 4 +4. Does the Sessions record's LastUpdateTS column fit within `utcnow - T`? + 1. No, the Sessions record at hand is deleted, the algorithm ends, you + are considered unauthenticated + 2. Yes, move on to 5 +5. You are considered authenticated + +## aur.archlinux.org Auth-Related Configuration + +- Operates over HTTPS with a Let's Encrypt SSL certificate +- `options.disable_http_login: 1` +- `options.login_timeout: ` +- `options.persistent_cookie_timeout: ` + From f10732960cd17e680e1a8a7b420b0b15ff391099 Mon Sep 17 00:00:00 2001 From: Joakim Saario Date: Thu, 18 Aug 2022 18:35:25 +0200 Subject: [PATCH 071/415] fix: Use SameSite=Lax on cookies --- aurweb/cookies.py | 10 ++++---- doc/web-auth.md | 2 +- test/test_auth_routes.py | 52 ++++++++++++++++++++++++++++++++++------ 3 files changed, 50 insertions(+), 14 deletions(-) diff --git a/aurweb/cookies.py b/aurweb/cookies.py index 442a4c0a..58d14515 100644 --- a/aurweb/cookies.py +++ b/aurweb/cookies.py @@ -5,15 +5,13 @@ from aurweb import config def samesite() -> str: - """ Produce cookie SameSite value based on options.disable_http_login. + """ Produce cookie SameSite value. - When options.disable_http_login is True, "strict" is returned. Otherwise, - "lax" is returned. + Currently this is hard-coded to return "lax" - :returns "strict" if options.disable_http_login else "lax" + :returns "lax" """ - secure = config.getboolean("options", "disable_http_login") - return "strict" if secure else "lax" + return "lax" def timeout(extended: bool) -> int: diff --git a/doc/web-auth.md b/doc/web-auth.md index 5f6679d4..1161af6f 100644 --- a/doc/web-auth.md +++ b/doc/web-auth.md @@ -17,7 +17,7 @@ in the following ways: - `options.disable_http_login: 0` - [Samesite=LAX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), Max-Age - `options.disable_http_login: 1` - - [Secure, HttpOnly](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#restrict_access_to_cookies), [Samesite=Strict](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), Max-Age + - [Samesite=LAX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), [Secure, HttpOnly](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#restrict_access_to_cookies) ### Max-Age diff --git a/test/test_auth_routes.py b/test/test_auth_routes.py index 8467adea..5942edcf 100644 --- a/test/test_auth_routes.py +++ b/test/test_auth_routes.py @@ -109,14 +109,52 @@ def test_login_email(client: TestClient, user: user): assert "AURSID" in resp.cookies -def mock_getboolean(a, b): - if a == "options" and b == "disable_http_login": - return True - return bool(aurweb.config.get(a, b)) +def mock_getboolean(**overrided_configs): + mocked_config = { + tuple(config.split("__")): value + for config, value in overrided_configs.items() + } + + def side_effect(*args): + return mocked_config.get(args, bool(aurweb.config.get(*args))) + + return side_effect -@mock.patch("aurweb.config.getboolean", side_effect=mock_getboolean) -def test_secure_login(getboolean: bool, client: TestClient, user: User): +@mock.patch( + "aurweb.config.getboolean", + side_effect=mock_getboolean(options__disable_http_login=False) +) +def test_insecure_login(getboolean: mock.Mock, client: TestClient, user: User): + post_data = { + "user": user.Username, + "passwd": "testPassword", + "next": "/" + } + + # Perform a login request with the data matching our user. + with client as request: + response = request.post("/login", data=post_data, + allow_redirects=False) + + # Make sure we got the expected status out of it. + assert response.status_code == int(HTTPStatus.SEE_OTHER) + + # Let's check what we got in terms of cookies for AURSID. + # Make sure that a secure cookie got passed to us. + cookie = next(c for c in response.cookies if c.name == "AURSID") + assert cookie.secure is False + assert cookie.has_nonstandard_attr("HttpOnly") is False + assert cookie.has_nonstandard_attr("SameSite") is True + assert cookie.get_nonstandard_attr("SameSite") == "lax" + assert cookie.value is not None and len(cookie.value) > 0 + + +@mock.patch( + "aurweb.config.getboolean", + side_effect=mock_getboolean(options__disable_http_login=True) +) +def test_secure_login(getboolean: mock.Mock, client: TestClient, user: User): """ In this test, we check to verify the course of action taken by starlette when providing secure=True to a response cookie. This is achieved by mocking aurweb.config.getboolean to return @@ -154,7 +192,7 @@ def test_secure_login(getboolean: bool, client: TestClient, user: User): assert cookie.secure is True assert cookie.has_nonstandard_attr("HttpOnly") is True assert cookie.has_nonstandard_attr("SameSite") is True - assert cookie.get_nonstandard_attr("SameSite") == "strict" + assert cookie.get_nonstandard_attr("SameSite") == "lax" assert cookie.value is not None and len(cookie.value) > 0 # Let's make sure we actually have a session relationship From 4303086c0e59d510c2f5ad28083574889340eba6 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 18 Aug 2022 14:47:24 -0700 Subject: [PATCH 072/415] Merged branch 'sameorigin-lax' Closes #351 Signed-off-by: Kevin Morris From 8e43932aa6497ccf024e957687fd87120f2125cf Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 18 Aug 2022 14:57:42 -0700 Subject: [PATCH 073/415] fix(doc): re-add Max-Age to list of secure cookie attributes Signed-off-by: Kevin Morris --- doc/web-auth.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/web-auth.md b/doc/web-auth.md index 1161af6f..17284889 100644 --- a/doc/web-auth.md +++ b/doc/web-auth.md @@ -17,7 +17,7 @@ in the following ways: - `options.disable_http_login: 0` - [Samesite=LAX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), Max-Age - `options.disable_http_login: 1` - - [Samesite=LAX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), [Secure, HttpOnly](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#restrict_access_to_cookies) + - [Samesite=LAX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#samesite_attribute), [Secure, HttpOnly](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#restrict_access_to_cookies), Max-Age ### Max-Age From fd4aaed208fb862c2f66edbe122f4c4e5d52c765 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 17 Aug 2022 10:01:06 -0700 Subject: [PATCH 074/415] fix: use max-age for all cookie expirations in addition, remove cookie expiration for AURREMEMBER -- we don't really care about a session time for this cookie, it merely acts as a flag given out on login to remember what the user selected Signed-off-by: Kevin Morris --- aurweb/routers/auth.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/aurweb/routers/auth.py b/aurweb/routers/auth.py index 9f465388..50cec419 100644 --- a/aurweb/routers/auth.py +++ b/aurweb/routers/auth.py @@ -6,7 +6,7 @@ from sqlalchemy import or_ import aurweb.config -from aurweb import cookies, db, time +from aurweb import cookies, db from aurweb.auth import requires_auth, requires_guest from aurweb.exceptions import handle_form_exceptions from aurweb.l10n import get_translator_for_request @@ -65,15 +65,11 @@ async def login_post(request: Request, return await login_template(request, next, errors=["Bad username or password."]) - login_timeout = aurweb.config.getint("options", "login_timeout") - - expires_at = int(time.utcnow() + max(cookie_timeout, login_timeout)) - response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER) secure = aurweb.config.getboolean("options", "disable_http_login") - response.set_cookie("AURSID", sid, expires=expires_at, + response.set_cookie("AURSID", sid, max_age=cookie_timeout, secure=secure, httponly=secure, samesite=cookies.samesite()) response.set_cookie("AURTZ", user.Timezone, @@ -83,7 +79,6 @@ async def login_post(request: Request, secure=secure, httponly=secure, samesite=cookies.samesite()) response.set_cookie("AURREMEMBER", remember_me, - expires=expires_at, secure=secure, httponly=secure, samesite=cookies.samesite()) return response From ab2956eef79aed68e8da3c37b237950916f78c25 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 18 Aug 2022 16:02:03 -0700 Subject: [PATCH 075/415] feat: add pytest unit of independent user unflagging Signed-off-by: Kevin Morris --- test/test_pkgbase_routes.py | 49 +++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index f6bcf5d7..8be08f83 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -1457,3 +1457,52 @@ def test_unauthorized_pkgbase_keywords(client: TestClient, package: Package): endp = f"/pkgbase/{pkgbase.Name}/keywords" response = request.post(endp, cookies=cookies) assert response.status_code == HTTPStatus.UNAUTHORIZED + + +def test_independent_user_unflag(client: TestClient, user: User, + package: Package): + with db.begin(): + flagger = db.create(User, Username="test_flagger", + Email="test_flagger@example.com", + Passwd="testPassword") + + pkgbase = package.PackageBase + cookies = {"AURSID": flagger.login(Request(), "testPassword")} + with client as request: + endp = f"/pkgbase/{pkgbase.Name}/flag" + response = request.post(endp, data={ + "comments": "This thing needs a flag!" + }, cookies=cookies, allow_redirects=True) + assert response.status_code == HTTPStatus.OK + + # At this point, we've flagged it as `flagger`. + # Now, we should be able to view the "Unflag package" link on the package + # page when browsing as that `flagger` user. + with client as request: + endp = f"/pkgbase/{pkgbase.Name}" + response = request.get(endp, cookies=cookies, allow_redirects=True) + assert response.status_code == HTTPStatus.OK + + # Assert that the "Unflag package" link appears in the DOM. + root = parse_root(response.text) + elems = root.xpath('//input[@name="do_UnFlag"]') + assert len(elems) == 1 + + # Now, unflag the package by "clicking" the "Unflag package" link. + with client as request: + endp = f"/pkgbase/{pkgbase.Name}/unflag" + response = request.post(endp, cookies=cookies, allow_redirects=True) + assert response.status_code == HTTPStatus.OK + + # For the last time, let's check the GET response. The package should + # not show as flagged anymore, and thus the "Unflag package" link + # should be missing. + with client as request: + endp = f"/pkgbase/{pkgbase.Name}" + response = request.get(endp, cookies=cookies, allow_redirects=True) + assert response.status_code == HTTPStatus.OK + + # Assert that the "Unflag package" link does not appear in the DOM. + root = parse_root(response.text) + elems = root.xpath('//input[@name="do_UnFlag"]') + assert len(elems) == 0 From 08d485206cc821f4b041c7ace163ac54821984ce Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 3 Aug 2022 16:50:52 +0300 Subject: [PATCH 076/415] feature: allow co-maintainers to disown their pkg Derived off of original work done by Leonidas Spyropoulos at https://gitlab.archlinux.org/archlinux/aurweb/-/merge_requests/503 This revision of that original work finishes off the inconsistencies mentioned in the original MR and adds a small bit of testing for more regression checks. Fixes: #360 Signed-off-by: Kevin Morris --- aurweb/pkgbase/actions.py | 11 +++ aurweb/routers/pkgbase.py | 19 +++-- templates/partials/packages/actions.html | 2 +- templates/pkgbase/disown.html | 38 +++++----- test/test_pkgbase_routes.py | 88 +++++++++++++++++++++--- 5 files changed, 123 insertions(+), 35 deletions(-) diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index 46609f89..27143d51 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -50,6 +50,12 @@ def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None: notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)] is_maint = disowner == pkgbase.Maintainer + + comaint = pkgbase.comaintainers.filter( + PackageComaintainer.User == disowner + ).one_or_none() + is_comaint = comaint is not None + if is_maint: with db.begin(): # Comaintainer with the lowest Priority value; next-in-line. @@ -63,6 +69,11 @@ def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None: else: # Otherwise, just orphan the package completely. pkgbase.Maintainer = None + elif is_comaint: + # This disown request is from a Comaintainer + with db.begin(): + notif = pkgbaseutil.remove_comaintainer(comaint) + notifs.append(notif) elif request.user.has_credential(creds.PKGBASE_DISOWN): # Otherwise, the request user performing this disownage is a # Trusted User and we treat it like a standard orphan request. diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index c735f474..1f09cfc8 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -545,15 +545,18 @@ async def pkgbase_disown_get(request: Request, name: str, next: str = Query(default=str())): pkgbase = get_pkg_or_base(name, PackageBase) + comaints = {c.User for c in pkgbase.comaintainers} + approved = [pkgbase.Maintainer] + list(comaints) has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, - approved=[pkgbase.Maintainer]) + approved=approved) if not has_cred: - return RedirectResponse(f"/pkgbase/{name}", - HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", HTTPStatus.SEE_OTHER) context = templates.make_context(request, "Disown Package") context["pkgbase"] = pkgbase context["next"] = next or "/pkgbase/{name}" + context["is_maint"] = request.user == pkgbase.Maintainer + context["is_comaint"] = request.user in comaints return render_template(request, "pkgbase/disown.html", context) @@ -566,8 +569,10 @@ async def pkgbase_disown_post(request: Request, name: str, next: str = Form(default=str())): pkgbase = get_pkg_or_base(name, PackageBase) + comaints = {c.User for c in pkgbase.comaintainers} + approved = [pkgbase.Maintainer] + list(comaints) has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, - approved=[pkgbase.Maintainer]) + approved=approved) if not has_cred: return RedirectResponse(f"/pkgbase/{name}", HTTPStatus.SEE_OTHER) @@ -580,8 +585,9 @@ async def pkgbase_disown_post(request: Request, name: str, return render_template(request, "pkgbase/disown.html", context, status_code=HTTPStatus.BAD_REQUEST) - with db.begin(): - update_closure_comment(pkgbase, ORPHAN_ID, comments) + if request.user != pkgbase.Maintainer and request.user not in comaints: + with db.begin(): + update_closure_comment(pkgbase, ORPHAN_ID, comments) try: actions.pkgbase_disown_instance(request, pkgbase) @@ -862,7 +868,6 @@ async def pkgbase_merge_post(request: Request, name: str, comments: str = Form(default=str()), confirm: bool = Form(default=False), next: str = Form(default=str())): - pkgbase = get_pkg_or_base(name, PackageBase) context = await make_variable_context(request, "Package Merging") context["pkgbase"] = pkgbase diff --git a/templates/partials/packages/actions.html b/templates/partials/packages/actions.html index 2144b07a..fa8c994f 100644 --- a/templates/partials/packages/actions.html +++ b/templates/partials/packages/actions.html @@ -131,7 +131,7 @@ /> - {% elif request.user.has_credential(creds.PKGBASE_DISOWN, approved=[pkgbase.Maintainer]) %} + {% elif request.user.has_credential(creds.PKGBASE_DISOWN, approved=[pkgbase.Maintainer] + comaintainers) %}
  • {{ "Disown Package" | tr }} diff --git a/templates/pkgbase/disown.html b/templates/pkgbase/disown.html index 3cc7988d..1aedde4f 100644 --- a/templates/pkgbase/disown.html +++ b/templates/pkgbase/disown.html @@ -27,14 +27,16 @@ {% endfor %} -

    - {{ - "This action will close any pending package requests " - "related to it. If %sComments%s are omitted, a closure " - "comment will be autogenerated." - | tr | format("", "") | safe - }} -

    + {% if not is_maint and not is_comaint %} +

    + {{ + "This action will close any pending package requests " + "related to it. If %sComments%s are omitted, a closure " + "comment will be autogenerated." + | tr | format("", "") | safe + }} +

    + {% endif %}

    {{ @@ -47,14 +49,18 @@

    -

    - - -

    + {% if not is_maint and not is_comaint %} +

    + + +

    + {% else %} + + {% endif %}

    {pkg.Name}' - for pkg in provides - ]) + return ", ".join( + [f'{pkg.Name}' for pkg in provides] + ) def get_pkg_or_base( - name: str, - cls: Union[models.Package, models.PackageBase] = models.PackageBase) \ - -> Union[models.Package, models.PackageBase]: - """ Get a PackageBase instance by its name or raise a 404 if + name: str, cls: Union[models.Package, models.PackageBase] = models.PackageBase +) -> Union[models.Package, models.PackageBase]: + """Get a PackageBase instance by its name or raise a 404 if it can't be found in the database. :param name: {Package,PackageBase}.Name @@ -109,8 +106,7 @@ def get_pkg_or_base( return instance -def get_pkgbase_comment(pkgbase: models.PackageBase, id: int) \ - -> models.PackageComment: +def get_pkgbase_comment(pkgbase: models.PackageBase, id: int) -> models.PackageComment: comment = pkgbase.comments.filter(models.PackageComment.ID == id).first() if not comment: raise HTTPException(status_code=HTTPStatus.NOT_FOUND) @@ -122,9 +118,8 @@ def out_of_date(packages: orm.Query) -> orm.Query: return packages.filter(models.PackageBase.OutOfDateTS.isnot(None)) -def updated_packages(limit: int = 0, - cache_ttl: int = 600) -> list[models.Package]: - """ Return a list of valid Package objects ordered by their +def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Package]: + """Return a list of valid Package objects ordered by their ModifiedTS column in descending order from cache, after setting the cache when no key yet exists. @@ -139,10 +134,11 @@ def updated_packages(limit: int = 0, return orjson.loads(packages) with db.begin(): - query = db.query(models.Package).join(models.PackageBase).filter( - models.PackageBase.PackagerUID.isnot(None) - ).order_by( - models.PackageBase.ModifiedTS.desc() + query = ( + db.query(models.Package) + .join(models.PackageBase) + .filter(models.PackageBase.PackagerUID.isnot(None)) + .order_by(models.PackageBase.ModifiedTS.desc()) ) if limit: @@ -152,13 +148,13 @@ def updated_packages(limit: int = 0, for pkg in query: # For each Package returned by the query, append a dict # containing Package columns we're interested in. - packages.append({ - "Name": pkg.Name, - "Version": pkg.Version, - "PackageBase": { - "ModifiedTS": pkg.PackageBase.ModifiedTS + packages.append( + { + "Name": pkg.Name, + "Version": pkg.Version, + "PackageBase": {"ModifiedTS": pkg.PackageBase.ModifiedTS}, } - }) + ) # Store the JSON serialization of the package_updates key into Redis. redis.set("package_updates", orjson.dumps(packages)) @@ -168,9 +164,8 @@ def updated_packages(limit: int = 0, return packages -def query_voted(query: list[models.Package], - user: models.User) -> dict[int, bool]: - """ Produce a dictionary of package base ID keys to boolean values, +def query_voted(query: list[models.Package], user: models.User) -> dict[int, bool]: + """Produce a dictionary of package base ID keys to boolean values, which indicate whether or not the package base has a vote record related to user. @@ -180,20 +175,18 @@ def query_voted(query: list[models.Package], """ output = defaultdict(bool) query_set = {pkg.PackageBaseID for pkg in query} - voted = db.query(models.PackageVote).join( - models.PackageBase, - models.PackageBase.ID.in_(query_set) - ).filter( - models.PackageVote.UsersID == user.ID + voted = ( + db.query(models.PackageVote) + .join(models.PackageBase, models.PackageBase.ID.in_(query_set)) + .filter(models.PackageVote.UsersID == user.ID) ) for vote in voted: output[vote.PackageBase.ID] = True return output -def query_notified(query: list[models.Package], - user: models.User) -> dict[int, bool]: - """ Produce a dictionary of package base ID keys to boolean values, +def query_notified(query: list[models.Package], user: models.User) -> dict[int, bool]: + """Produce a dictionary of package base ID keys to boolean values, which indicate whether or not the package base has a notification record related to user. @@ -203,19 +196,17 @@ def query_notified(query: list[models.Package], """ output = defaultdict(bool) query_set = {pkg.PackageBaseID for pkg in query} - notified = db.query(models.PackageNotification).join( - models.PackageBase, - models.PackageBase.ID.in_(query_set) - ).filter( - models.PackageNotification.UserID == user.ID + notified = ( + db.query(models.PackageNotification) + .join(models.PackageBase, models.PackageBase.ID.in_(query_set)) + .filter(models.PackageNotification.UserID == user.ID) ) for notif in notified: output[notif.PackageBase.ID] = True return output -def pkg_required(pkgname: str, provides: list[str]) \ - -> list[PackageDependency]: +def pkg_required(pkgname: str, provides: list[str]) -> list[PackageDependency]: """ Get dependencies that match a string in `[pkgname] + provides`. @@ -225,9 +216,12 @@ def pkg_required(pkgname: str, provides: list[str]) \ :return: List of PackageDependency instances """ targets = set([pkgname] + provides) - query = db.query(PackageDependency).join(Package).filter( - PackageDependency.DepName.in_(targets) - ).order_by(Package.Name.asc()) + query = ( + db.query(PackageDependency) + .join(Package) + .filter(PackageDependency.DepName.in_(targets)) + .order_by(Package.Name.asc()) + ) return query diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index 27143d51..4834f8dd 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -14,15 +14,15 @@ logger = logging.get_logger(__name__) def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None: - notif = db.query(pkgbase.notifications.filter( - PackageNotification.UserID == request.user.ID - ).exists()).scalar() + notif = db.query( + pkgbase.notifications.filter( + PackageNotification.UserID == request.user.ID + ).exists() + ).scalar() has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY) if has_cred and not notif: with db.begin(): - db.create(PackageNotification, - PackageBase=pkgbase, - User=request.user) + db.create(PackageNotification, PackageBase=pkgbase, User=request.user) def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None: @@ -36,8 +36,11 @@ def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None: - has_cred = request.user.has_credential(creds.PKGBASE_UNFLAG, approved=[ - pkgbase.Flagger, pkgbase.Maintainer] + [c.User for c in pkgbase.comaintainers]) + has_cred = request.user.has_credential( + creds.PKGBASE_UNFLAG, + approved=[pkgbase.Flagger, pkgbase.Maintainer] + + [c.User for c in pkgbase.comaintainers], + ) if has_cred: with db.begin(): pkgbase.OutOfDateTS = None @@ -93,9 +96,9 @@ def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None: notif.send() -def pkgbase_delete_instance(request: Request, pkgbase: PackageBase, - comments: str = str()) \ - -> list[notify.Notification]: +def pkgbase_delete_instance( + request: Request, pkgbase: PackageBase, comments: str = str() +) -> list[notify.Notification]: notifs = handle_request(request, DELETION_ID, pkgbase) + [ notify.DeleteNotification(request.user.ID, pkgbase.ID) ] @@ -107,8 +110,9 @@ def pkgbase_delete_instance(request: Request, pkgbase: PackageBase, return notifs -def pkgbase_merge_instance(request: Request, pkgbase: PackageBase, - target: PackageBase, comments: str = str()) -> None: +def pkgbase_merge_instance( + request: Request, pkgbase: PackageBase, target: PackageBase, comments: str = str() +) -> None: pkgbasename = str(pkgbase.Name) # Create notifications. @@ -144,8 +148,10 @@ def pkgbase_merge_instance(request: Request, pkgbase: PackageBase, db.delete(pkgbase) # Log this out for accountability purposes. - logger.info(f"Trusted User '{request.user.Username}' merged " - f"'{pkgbasename}' into '{target.Name}'.") + logger.info( + f"Trusted User '{request.user.Username}' merged " + f"'{pkgbasename}' into '{target.Name}'." + ) # Send notifications. util.apply_all(notifs, lambda n: n.send()) diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 63621d63..223c3013 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -10,19 +10,23 @@ from aurweb.models.package_comment import PackageComment from aurweb.models.package_request import PENDING_ID, PackageRequest from aurweb.models.package_vote import PackageVote from aurweb.scripts import notify -from aurweb.templates import make_context as _make_context -from aurweb.templates import make_variable_context as _make_variable_context +from aurweb.templates import ( + make_context as _make_context, + make_variable_context as _make_variable_context, +) -async def make_variable_context(request: Request, pkgbase: PackageBase) \ - -> dict[str, Any]: +async def make_variable_context( + request: Request, pkgbase: PackageBase +) -> dict[str, Any]: ctx = await _make_variable_context(request, pkgbase.Name) return make_context(request, pkgbase, ctx) -def make_context(request: Request, pkgbase: PackageBase, - context: dict[str, Any] = None) -> dict[str, Any]: - """ Make a basic context for package or pkgbase. +def make_context( + request: Request, pkgbase: PackageBase, context: dict[str, Any] = None +) -> dict[str, Any]: + """Make a basic context for package or pkgbase. :param request: FastAPI request :param pkgbase: PackageBase instance @@ -34,14 +38,16 @@ def make_context(request: Request, pkgbase: PackageBase, # Per page and offset. offset, per_page = util.sanitize_params( request.query_params.get("O", defaults.O), - request.query_params.get("PP", defaults.COMMENTS_PER_PAGE)) + request.query_params.get("PP", defaults.COMMENTS_PER_PAGE), + ) context["O"] = offset context["PP"] = per_page context["git_clone_uri_anon"] = config.get("options", "git_clone_uri_anon") context["git_clone_uri_priv"] = config.get("options", "git_clone_uri_priv") context["pkgbase"] = pkgbase context["comaintainers"] = [ - c.User for c in pkgbase.comaintainers.order_by( + c.User + for c in pkgbase.comaintainers.order_by( PackageComaintainer.Priority.asc() ).all() ] @@ -53,9 +59,11 @@ def make_context(request: Request, pkgbase: PackageBase, context["comments_total"] = pkgbase.comments.order_by( PackageComment.CommentTS.desc() ).count() - context["comments"] = pkgbase.comments.order_by( - PackageComment.CommentTS.desc() - ).limit(per_page).offset(offset) + context["comments"] = ( + pkgbase.comments.order_by(PackageComment.CommentTS.desc()) + .limit(per_page) + .offset(offset) + ) context["pinned_comments"] = pkgbase.comments.filter( PackageComment.PinnedTS != 0 ).order_by(PackageComment.CommentTS.desc()) @@ -70,15 +78,15 @@ def make_context(request: Request, pkgbase: PackageBase, ).scalar() context["requests"] = pkgbase.requests.filter( - and_(PackageRequest.Status == PENDING_ID, - PackageRequest.ClosedTS.is_(None)) + and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None)) ).count() return context -def remove_comaintainer(comaint: PackageComaintainer) \ - -> notify.ComaintainerRemoveNotification: +def remove_comaintainer( + comaint: PackageComaintainer, +) -> notify.ComaintainerRemoveNotification: """ Remove a PackageComaintainer. @@ -107,9 +115,9 @@ def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None: """ notifications = [] with db.begin(): - comaintainers = pkgbase.comaintainers.join(User).filter( - User.Username.in_(usernames) - ).all() + comaintainers = ( + pkgbase.comaintainers.join(User).filter(User.Username.in_(usernames)).all() + ) notifications = [ notify.ComaintainerRemoveNotification(co.User.ID, pkgbase.ID) for co in comaintainers @@ -133,23 +141,23 @@ def latest_priority(pkgbase: PackageBase) -> int: """ # Order comaintainers related to pkgbase by Priority DESC. - record = pkgbase.comaintainers.order_by( - PackageComaintainer.Priority.desc()).first() + record = pkgbase.comaintainers.order_by(PackageComaintainer.Priority.desc()).first() # Use Priority column if record exists, otherwise 0. return record.Priority if record else 0 class NoopComaintainerNotification: - """ A noop notification stub used as an error-state return value. """ + """A noop notification stub used as an error-state return value.""" def send(self) -> None: - """ noop """ + """noop""" return -def add_comaintainer(pkgbase: PackageBase, comaintainer: User) \ - -> notify.ComaintainerAddNotification: +def add_comaintainer( + pkgbase: PackageBase, comaintainer: User +) -> notify.ComaintainerAddNotification: """ Add a new comaintainer to `pkgbase`. @@ -165,14 +173,19 @@ def add_comaintainer(pkgbase: PackageBase, comaintainer: User) \ new_prio = latest_priority(pkgbase) + 1 with db.begin(): - db.create(PackageComaintainer, PackageBase=pkgbase, - User=comaintainer, Priority=new_prio) + db.create( + PackageComaintainer, + PackageBase=pkgbase, + User=comaintainer, + Priority=new_prio, + ) return notify.ComaintainerAddNotification(comaintainer.ID, pkgbase.ID) -def add_comaintainers(request: Request, pkgbase: PackageBase, - usernames: list[str]) -> None: +def add_comaintainers( + request: Request, pkgbase: PackageBase, usernames: list[str] +) -> None: """ Add comaintainers to `pkgbase`. @@ -216,7 +229,6 @@ def rotate_comaintainers(pkgbase: PackageBase) -> None: :param pkgbase: PackageBase instance """ - comaintainers = pkgbase.comaintainers.order_by( - PackageComaintainer.Priority.asc()) + comaintainers = pkgbase.comaintainers.order_by(PackageComaintainer.Priority.asc()) for i, comaint in enumerate(comaintainers): comaint.Priority = i + 1 diff --git a/aurweb/pkgbase/validate.py b/aurweb/pkgbase/validate.py index baefc415..3c50e578 100644 --- a/aurweb/pkgbase/validate.py +++ b/aurweb/pkgbase/validate.py @@ -5,9 +5,13 @@ from aurweb.exceptions import ValidationError from aurweb.models import PackageBase -def request(pkgbase: PackageBase, - type: str, comments: str, merge_into: str, - context: dict[str, Any]) -> None: +def request( + pkgbase: PackageBase, + type: str, + comments: str, + merge_into: str, + context: dict[str, Any], +) -> None: if not comments: raise ValidationError(["The comment field must not be empty."]) @@ -15,21 +19,16 @@ def request(pkgbase: PackageBase, # Perform merge-related checks. if not merge_into: # TODO: This error needs to be translated. - raise ValidationError( - ['The "Merge into" field must not be empty.']) + raise ValidationError(['The "Merge into" field must not be empty.']) - target = db.query(PackageBase).filter( - PackageBase.Name == merge_into - ).first() + target = db.query(PackageBase).filter(PackageBase.Name == merge_into).first() if not target: # TODO: This error needs to be translated. - raise ValidationError([ - "The package base you want to merge into does not exist." - ]) + raise ValidationError( + ["The package base you want to merge into does not exist."] + ) db.refresh(target) if target.ID == pkgbase.ID: # TODO: This error needs to be translated. - raise ValidationError([ - "You cannot merge a package base into itself." - ]) + raise ValidationError(["You cannot merge a package base into itself."]) diff --git a/aurweb/prometheus.py b/aurweb/prometheus.py index 227d46ed..0bbea4be 100644 --- a/aurweb/prometheus.py +++ b/aurweb/prometheus.py @@ -19,8 +19,9 @@ def instrumentator(): # Their license is included in LICENSES/starlette_exporter. # The code has been modified to remove child route checks # (since we don't have any) and to stay within an 80-width limit. -def get_matching_route_path(scope: dict[Any, Any], routes: list[Route], - route_name: Optional[str] = None) -> str: +def get_matching_route_path( + scope: dict[Any, Any], routes: list[Route], route_name: Optional[str] = None +) -> str: """ Find a matching route and return its original path string @@ -34,7 +35,7 @@ def get_matching_route_path(scope: dict[Any, Any], routes: list[Route], if match == Match.FULL: route_name = route.path - ''' + """ # This path exists in the original function's code, but we # don't need it (currently), so it's been removed to avoid # useless test coverage. @@ -47,7 +48,7 @@ def get_matching_route_path(scope: dict[Any, Any], routes: list[Route], route_name = None else: route_name += child_route_name - ''' + """ return route_name elif match == Match.PARTIAL and route_name is None: @@ -55,9 +56,11 @@ def get_matching_route_path(scope: dict[Any, Any], routes: list[Route], def http_requests_total() -> Callable[[Info], None]: - metric = Counter("http_requests_total", - "Number of HTTP requests.", - labelnames=("method", "path", "status")) + metric = Counter( + "http_requests_total", + "Number of HTTP requests.", + labelnames=("method", "path", "status"), + ) def instrumentation(info: Info) -> None: if info.request.method.lower() in ("head", "options"): # pragma: no cover @@ -79,13 +82,13 @@ def http_requests_total() -> Callable[[Info], None]: if hasattr(app, "root_path"): app_root_path = getattr(app, "root_path") if root_path.startswith(app_root_path): - root_path = root_path[len(app_root_path):] + root_path = root_path[len(app_root_path) :] base_scope = { "type": scope.get("type"), "path": root_path + scope.get("path"), "path_params": scope.get("path_params", {}), - "method": scope.get("method") + "method": scope.get("method"), } method = scope.get("method") @@ -102,7 +105,8 @@ def http_api_requests_total() -> Callable[[Info], None]: metric = Counter( "http_api_requests", "Number of times an RPC API type has been requested.", - labelnames=("type", "status")) + labelnames=("type", "status"), + ) def instrumentation(info: Info) -> None: if info.request.method.lower() in ("head", "options"): # pragma: no cover diff --git a/aurweb/ratelimit.py b/aurweb/ratelimit.py index 86063f5d..cb08cdf5 100644 --- a/aurweb/ratelimit.py +++ b/aurweb/ratelimit.py @@ -38,8 +38,7 @@ def _update_ratelimit_db(request: Request): now = time.utcnow() time_to_delete = now - window_length - records = db.query(ApiRateLimit).filter( - ApiRateLimit.WindowStart < time_to_delete) + records = db.query(ApiRateLimit).filter(ApiRateLimit.WindowStart < time_to_delete) with db.begin(): db.delete_all(records) @@ -47,9 +46,7 @@ def _update_ratelimit_db(request: Request): record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first() with db.begin(): if not record: - record = db.create(ApiRateLimit, - WindowStart=now, - IP=host, Requests=1) + record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1) else: record.Requests += 1 @@ -58,7 +55,7 @@ def _update_ratelimit_db(request: Request): def update_ratelimit(request: Request, pipeline: Pipeline): - """ Update the ratelimit stored in Redis or the database depending + """Update the ratelimit stored in Redis or the database depending on AUR_CONFIG's [options] cache setting. This Redis-capable function is slightly different than most. If Redis @@ -75,7 +72,7 @@ def update_ratelimit(request: Request, pipeline: Pipeline): def check_ratelimit(request: Request): - """ Increment and check to see if request has exceeded their rate limit. + """Increment and check to see if request has exceeded their rate limit. :param request: FastAPI request :returns: True if the request host has exceeded the rate limit else False diff --git a/aurweb/redis.py b/aurweb/redis.py index e29b8e37..af179b9b 100644 --- a/aurweb/redis.py +++ b/aurweb/redis.py @@ -1,9 +1,7 @@ import fakeredis - from redis import ConnectionPool, Redis import aurweb.config - from aurweb import logging logger = logging.get_logger(__name__) @@ -11,7 +9,7 @@ pool = None class FakeConnectionPool: - """ A fake ConnectionPool class which holds an internal reference + """A fake ConnectionPool class which holds an internal reference to a fakeredis handle. We normally deal with Redis by keeping its ConnectionPool globally diff --git a/aurweb/routers/__init__.py b/aurweb/routers/__init__.py index da79e38f..f77bce4f 100644 --- a/aurweb/routers/__init__.py +++ b/aurweb/routers/__init__.py @@ -3,7 +3,18 @@ API routers for FastAPI. See https://fastapi.tiangolo.com/tutorial/bigger-applications/ """ -from . import accounts, auth, html, packages, pkgbase, requests, rpc, rss, sso, trusted_user +from . import ( + accounts, + auth, + html, + packages, + pkgbase, + requests, + rpc, + rss, + sso, + trusted_user, +) """ aurweb application routes. This constant can be any iterable diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index dcac72b0..db05955a 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -1,6 +1,5 @@ import copy import typing - from http import HTTPStatus from typing import Any @@ -9,7 +8,6 @@ from fastapi.responses import HTMLResponse, RedirectResponse from sqlalchemy import and_, or_ import aurweb.config - from aurweb import cookies, db, l10n, logging, models, util from aurweb.auth import account_type_required, requires_auth, requires_guest from aurweb.captcha import get_captcha_salts @@ -37,21 +35,23 @@ async def passreset(request: Request): @router.post("/passreset", response_class=HTMLResponse) @handle_form_exceptions @requires_guest -async def passreset_post(request: Request, - user: str = Form(...), - resetkey: str = Form(default=None), - password: str = Form(default=None), - confirm: str = Form(default=None)): +async def passreset_post( + request: Request, + user: str = Form(...), + resetkey: str = Form(default=None), + password: str = Form(default=None), + confirm: str = Form(default=None), +): context = await make_variable_context(request, "Password Reset") # The user parameter being required, we can match against criteria = or_(models.User.Username == user, models.User.Email == user) - db_user = db.query(models.User, - and_(criteria, models.User.Suspended == 0)).first() + db_user = db.query(models.User, and_(criteria, models.User.Suspended == 0)).first() if db_user is None: context["errors"] = ["Invalid e-mail."] - return render_template(request, "passreset.html", context, - status_code=HTTPStatus.NOT_FOUND) + return render_template( + request, "passreset.html", context, status_code=HTTPStatus.NOT_FOUND + ) db.refresh(db_user) if resetkey: @@ -59,29 +59,34 @@ async def passreset_post(request: Request, if not db_user.ResetKey or resetkey != db_user.ResetKey: context["errors"] = ["Invalid e-mail."] - return render_template(request, "passreset.html", context, - status_code=HTTPStatus.NOT_FOUND) + return render_template( + request, "passreset.html", context, status_code=HTTPStatus.NOT_FOUND + ) if not user or not password: context["errors"] = ["Missing a required field."] - return render_template(request, "passreset.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if password != confirm: # If the provided password does not match the provided confirm. context["errors"] = ["Password fields do not match."] - return render_template(request, "passreset.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if len(password) < models.User.minimum_passwd_length(): # Translate the error here, which simplifies error output # in the jinja2 template. _ = get_translator_for_request(request) - context["errors"] = [_( - "Your password must be at least %s characters.") % ( - str(models.User.minimum_passwd_length()))] - return render_template(request, "passreset.html", context, - status_code=HTTPStatus.BAD_REQUEST) + context["errors"] = [ + _("Your password must be at least %s characters.") + % (str(models.User.minimum_passwd_length())) + ] + return render_template( + request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST + ) # We got to this point; everything matched up. Update the password # and remove the ResetKey. @@ -92,8 +97,9 @@ async def passreset_post(request: Request, db_user.update_password(password) # Render ?step=complete. - return RedirectResponse(url="/passreset?step=complete", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse( + url="/passreset?step=complete", status_code=HTTPStatus.SEE_OTHER + ) # If we got here, we continue with issuing a resetkey for the user. resetkey = generate_resetkey() @@ -103,13 +109,13 @@ async def passreset_post(request: Request, ResetKeyNotification(db_user.ID).send() # Render ?step=confirm. - return RedirectResponse(url="/passreset?step=confirm", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse( + url="/passreset?step=confirm", status_code=HTTPStatus.SEE_OTHER + ) -def process_account_form(request: Request, user: models.User, - args: dict[str, Any]): - """ Process an account form. All fields are optional and only checks +def process_account_form(request: Request, user: models.User, args: dict[str, Any]): + """Process an account form. All fields are optional and only checks requirements in the case they are present. ``` @@ -146,7 +152,7 @@ def process_account_form(request: Request, user: models.User, validate.username_in_use, validate.email_in_use, validate.invalid_account_type, - validate.invalid_captcha + validate.invalid_captcha, ] try: @@ -158,11 +164,10 @@ def process_account_form(request: Request, user: models.User, return (True, []) -def make_account_form_context(context: dict, - request: Request, - user: models.User, - args: dict): - """ Modify a FastAPI context and add attributes for the account form. +def make_account_form_context( + context: dict, request: Request, user: models.User, args: dict +): + """Modify a FastAPI context and add attributes for the account form. :param context: FastAPI context :param request: FastAPI request @@ -173,15 +178,17 @@ def make_account_form_context(context: dict, # Do not modify the original context. context = copy.copy(context) - context["account_types"] = list(filter( - lambda e: request.user.AccountTypeID >= e[0], - [ - (at.USER_ID, f"Normal {at.USER}"), - (at.TRUSTED_USER_ID, at.TRUSTED_USER), - (at.DEVELOPER_ID, at.DEVELOPER), - (at.TRUSTED_USER_AND_DEV_ID, at.TRUSTED_USER_AND_DEV) - ] - )) + context["account_types"] = list( + filter( + lambda e: request.user.AccountTypeID >= e[0], + [ + (at.USER_ID, f"Normal {at.USER}"), + (at.TRUSTED_USER_ID, at.TRUSTED_USER), + (at.DEVELOPER_ID, at.DEVELOPER), + (at.TRUSTED_USER_AND_DEV_ID, at.TRUSTED_USER_AND_DEV), + ], + ) + ) if request.user.is_authenticated(): context["username"] = args.get("U", user.Username) @@ -229,24 +236,24 @@ def make_account_form_context(context: dict, @router.get("/register", response_class=HTMLResponse) @requires_guest -async def account_register(request: Request, - U: str = Form(default=str()), # Username - E: str = Form(default=str()), # Email - H: str = Form(default=False), # Hide Email - BE: str = Form(default=None), # Backup Email - R: str = Form(default=None), # Real Name - HP: str = Form(default=None), # Homepage - I: str = Form(default=None), # IRC Nick - K: str = Form(default=None), # PGP Key FP - L: str = Form(default=aurweb.config.get( - "options", "default_lang")), - TZ: str = Form(default=aurweb.config.get( - "options", "default_timezone")), - PK: str = Form(default=None), - CN: bool = Form(default=False), # Comment Notify - CU: bool = Form(default=False), # Update Notify - CO: bool = Form(default=False), # Owner Notify - captcha: str = Form(default=str())): +async def account_register( + request: Request, + U: str = Form(default=str()), # Username + E: str = Form(default=str()), # Email + H: str = Form(default=False), # Hide Email + BE: str = Form(default=None), # Backup Email + R: str = Form(default=None), # Real Name + HP: str = Form(default=None), # Homepage + I: str = Form(default=None), # IRC Nick + K: str = Form(default=None), # PGP Key FP + L: str = Form(default=aurweb.config.get("options", "default_lang")), + TZ: str = Form(default=aurweb.config.get("options", "default_timezone")), + PK: str = Form(default=None), + CN: bool = Form(default=False), # Comment Notify + CU: bool = Form(default=False), # Update Notify + CO: bool = Form(default=False), # Owner Notify + captcha: str = Form(default=str()), +): context = await make_variable_context(request, "Register") context["captcha_salt"] = get_captcha_salts()[0] context = make_account_form_context(context, request, None, dict()) @@ -256,32 +263,32 @@ async def account_register(request: Request, @router.post("/register", response_class=HTMLResponse) @handle_form_exceptions @requires_guest -async def account_register_post(request: Request, - U: str = Form(default=str()), # Username - E: str = Form(default=str()), # Email - H: str = Form(default=False), # Hide Email - BE: str = Form(default=None), # Backup Email - R: str = Form(default=''), # Real Name - HP: str = Form(default=None), # Homepage - I: str = Form(default=None), # IRC Nick - K: str = Form(default=None), # PGP Key - L: str = Form(default=aurweb.config.get( - "options", "default_lang")), - TZ: str = Form(default=aurweb.config.get( - "options", "default_timezone")), - PK: str = Form(default=str()), # SSH PubKey - CN: bool = Form(default=False), - UN: bool = Form(default=False), - ON: bool = Form(default=False), - captcha: str = Form(default=None), - captcha_salt: str = Form(...)): +async def account_register_post( + request: Request, + U: str = Form(default=str()), # Username + E: str = Form(default=str()), # Email + H: str = Form(default=False), # Hide Email + BE: str = Form(default=None), # Backup Email + R: str = Form(default=""), # Real Name + HP: str = Form(default=None), # Homepage + I: str = Form(default=None), # IRC Nick + K: str = Form(default=None), # PGP Key + L: str = Form(default=aurweb.config.get("options", "default_lang")), + TZ: str = Form(default=aurweb.config.get("options", "default_timezone")), + PK: str = Form(default=str()), # SSH PubKey + CN: bool = Form(default=False), + UN: bool = Form(default=False), + ON: bool = Form(default=False), + captcha: str = Form(default=None), + captcha_salt: str = Form(...), +): context = await make_variable_context(request, "Register") args = dict(await request.form()) args["K"] = args.get("K", str()).replace(" ", "") K = args.get("K") # Force "H" into a boolean. - args["H"] = H = (args.get("H", str()) == "on") + args["H"] = H = args.get("H", str()) == "on" context = make_account_form_context(context, request, None, args) ok, errors = process_account_form(request, request.user, args) @@ -289,30 +296,45 @@ async def account_register_post(request: Request, # If the field values given do not meet the requirements, # return HTTP 400 with an error. context["errors"] = errors - return render_template(request, "register.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "register.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if not captcha: context["errors"] = ["The CAPTCHA is missing."] - return render_template(request, "register.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "register.html", context, status_code=HTTPStatus.BAD_REQUEST + ) # Create a user with no password with a resetkey, then send # an email off about it. resetkey = generate_resetkey() # By default, we grab the User account type to associate with. - atype = db.query(models.AccountType, - models.AccountType.AccountType == "User").first() + atype = db.query( + models.AccountType, models.AccountType.AccountType == "User" + ).first() # Create a user given all parameters available. with db.begin(): - user = db.create(models.User, Username=U, - Email=E, HideEmail=H, BackupEmail=BE, - RealName=R, Homepage=HP, IRCNick=I, PGPKey=K, - LangPreference=L, Timezone=TZ, CommentNotify=CN, - UpdateNotify=UN, OwnershipNotify=ON, - ResetKey=resetkey, AccountType=atype) + user = db.create( + models.User, + Username=U, + Email=E, + HideEmail=H, + BackupEmail=BE, + RealName=R, + Homepage=HP, + IRCNick=I, + PGPKey=K, + LangPreference=L, + Timezone=TZ, + CommentNotify=CN, + UpdateNotify=UN, + OwnershipNotify=ON, + ResetKey=resetkey, + AccountType=atype, + ) # If a PK was given and either one does not exist or the given # PK mismatches the existing user's SSHPubKey.PubKey. @@ -323,8 +345,9 @@ async def account_register_post(request: Request, pk = " ".join(k) fprint = get_fingerprint(pk) with db.begin(): - db.create(models.SSHPubKey, UserID=user.ID, - PubKey=pk, Fingerprint=fprint) + db.create( + models.SSHPubKey, UserID=user.ID, PubKey=pk, Fingerprint=fprint + ) # Send a reset key notification to the new user. WelcomeNotification(user.ID).send() @@ -334,8 +357,9 @@ async def account_register_post(request: Request, return render_template(request, "register.html", context) -def cannot_edit(request: Request, user: models.User) \ - -> typing.Optional[RedirectResponse]: +def cannot_edit( + request: Request, user: models.User +) -> typing.Optional[RedirectResponse]: """ Decide if `request.user` cannot edit `user`. @@ -373,31 +397,30 @@ async def account_edit(request: Request, username: str): @router.post("/account/{username}/edit", response_class=HTMLResponse) @handle_form_exceptions @requires_auth -async def account_edit_post(request: Request, - username: str, - U: str = Form(default=str()), # Username - J: bool = Form(default=False), - E: str = Form(default=str()), # Email - H: str = Form(default=False), # Hide Email - BE: str = Form(default=None), # Backup Email - R: str = Form(default=None), # Real Name - HP: str = Form(default=None), # Homepage - I: str = Form(default=None), # IRC Nick - K: str = Form(default=None), # PGP Key - L: str = Form(aurweb.config.get( - "options", "default_lang")), - TZ: str = Form(aurweb.config.get( - "options", "default_timezone")), - P: str = Form(default=str()), # New Password - C: str = Form(default=None), # Password Confirm - PK: str = Form(default=None), # PubKey - CN: bool = Form(default=False), # Comment Notify - UN: bool = Form(default=False), # Update Notify - ON: bool = Form(default=False), # Owner Notify - T: int = Form(default=None), - passwd: str = Form(default=str())): - user = db.query(models.User).filter( - models.User.Username == username).first() +async def account_edit_post( + request: Request, + username: str, + U: str = Form(default=str()), # Username + J: bool = Form(default=False), + E: str = Form(default=str()), # Email + H: str = Form(default=False), # Hide Email + BE: str = Form(default=None), # Backup Email + R: str = Form(default=None), # Real Name + HP: str = Form(default=None), # Homepage + I: str = Form(default=None), # IRC Nick + K: str = Form(default=None), # PGP Key + L: str = Form(aurweb.config.get("options", "default_lang")), + TZ: str = Form(aurweb.config.get("options", "default_timezone")), + P: str = Form(default=str()), # New Password + C: str = Form(default=None), # Password Confirm + PK: str = Form(default=None), # PubKey + CN: bool = Form(default=False), # Comment Notify + UN: bool = Form(default=False), # Update Notify + ON: bool = Form(default=False), # Owner Notify + T: int = Form(default=None), + passwd: str = Form(default=str()), +): + user = db.query(models.User).filter(models.User.Username == username).first() response = cannot_edit(request, user) if response: return response @@ -416,13 +439,15 @@ async def account_edit_post(request: Request, if not passwd: context["errors"] = ["Invalid password."] - return render_template(request, "account/edit.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "account/edit.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if not ok: context["errors"] = errors - return render_template(request, "account/edit.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "account/edit.html", context, status_code=HTTPStatus.BAD_REQUEST + ) updates = [ update.simple, @@ -430,7 +455,7 @@ async def account_edit_post(request: Request, update.timezone, update.ssh_pubkey, update.account_type, - update.password + update.password, ] for f in updates: @@ -441,18 +466,17 @@ async def account_edit_post(request: Request, # Update cookies with requests, in case they were changed. response = render_template(request, "account/edit.html", context) - return cookies.update_response_cookies(request, response, - aurtz=TZ, aurlang=L) + return cookies.update_response_cookies(request, response, aurtz=TZ, aurlang=L) @router.get("/account/{username}") async def account(request: Request, username: str): _ = l10n.get_translator_for_request(request) - context = await make_variable_context( - request, _("Account") + " " + username) + context = await make_variable_context(request, _("Account") + " " + username) if not request.user.is_authenticated(): - return render_template(request, "account/show.html", context, - status_code=HTTPStatus.UNAUTHORIZED) + return render_template( + request, "account/show.html", context, status_code=HTTPStatus.UNAUTHORIZED + ) # Get related User record, if possible. user = get_user_by_name(username) @@ -460,11 +484,10 @@ async def account(request: Request, username: str): # Format PGPKey for display with a space between each 4 characters. k = user.PGPKey or str() - context["pgp_key"] = " ".join([k[i:i + 4] for i in range(0, len(k), 4)]) + context["pgp_key"] = " ".join([k[i : i + 4] for i in range(0, len(k), 4)]) login_ts = None - session = db.query(models.Session).filter( - models.Session.UsersID == user.ID).first() + session = db.query(models.Session).filter(models.Session.UsersID == user.ID).first() if session: login_ts = user.session.LastUpdateTS context["login_ts"] = login_ts @@ -480,15 +503,14 @@ async def account_comments(request: Request, username: str): context = make_context(request, "Accounts") context["username"] = username context["comments"] = user.package_comments.order_by( - models.PackageComment.CommentTS.desc()) + models.PackageComment.CommentTS.desc() + ) return render_template(request, "account/comments.html", context) @router.get("/accounts") @requires_auth -@account_type_required({at.TRUSTED_USER, - at.DEVELOPER, - at.TRUSTED_USER_AND_DEV}) +@account_type_required({at.TRUSTED_USER, at.DEVELOPER, at.TRUSTED_USER_AND_DEV}) async def accounts(request: Request): context = make_context(request, "Accounts") return render_template(request, "account/search.html", context) @@ -497,19 +519,19 @@ async def accounts(request: Request): @router.post("/accounts") @handle_form_exceptions @requires_auth -@account_type_required({at.TRUSTED_USER, - at.DEVELOPER, - at.TRUSTED_USER_AND_DEV}) -async def accounts_post(request: Request, - O: int = Form(default=0), # Offset - SB: str = Form(default=str()), # Sort By - U: str = Form(default=str()), # Username - T: str = Form(default=str()), # Account Type - S: bool = Form(default=False), # Suspended - E: str = Form(default=str()), # Email - R: str = Form(default=str()), # Real Name - I: str = Form(default=str()), # IRC Nick - K: str = Form(default=str())): # PGP Key +@account_type_required({at.TRUSTED_USER, at.DEVELOPER, at.TRUSTED_USER_AND_DEV}) +async def accounts_post( + request: Request, + O: int = Form(default=0), # Offset + SB: str = Form(default=str()), # Sort By + U: str = Form(default=str()), # Username + T: str = Form(default=str()), # Account Type + S: bool = Form(default=False), # Suspended + E: str = Form(default=str()), # Email + R: str = Form(default=str()), # Real Name + I: str = Form(default=str()), # IRC Nick + K: str = Form(default=str()), +): # PGP Key context = await make_variable_context(request, "Accounts") context["pp"] = pp = 50 # Hits per page. @@ -534,7 +556,7 @@ async def accounts_post(request: Request, "u": at.USER_ID, "t": at.TRUSTED_USER_ID, "d": at.DEVELOPER_ID, - "td": at.TRUSTED_USER_AND_DEV_ID + "td": at.TRUSTED_USER_AND_DEV_ID, } account_type_id = account_types.get(T, None) @@ -545,7 +567,8 @@ async def accounts_post(request: Request, # Populate this list with any additional statements to # be ANDed together. statements = [ - v for k, v in [ + v + for k, v in [ (account_type_id is not None, models.AccountType.ID == account_type_id), (bool(U), models.User.Username.like(f"%{U}%")), (bool(S), models.User.Suspended == S), @@ -553,7 +576,8 @@ async def accounts_post(request: Request, (bool(R), models.User.RealName.like(f"%{R}%")), (bool(I), models.User.IRCNick.like(f"%{I}%")), (bool(K), models.User.PGPKey.like(f"%{K}%")), - ] if k + ] + if k ] # Filter the query by coe-mbining all statements added above into @@ -571,9 +595,7 @@ async def accounts_post(request: Request, return render_template(request, "account/index.html", context) -def render_terms_of_service(request: Request, - context: dict, - terms: typing.Iterable): +def render_terms_of_service(request: Request, context: dict, terms: typing.Iterable): if not terms: return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) context["unaccepted_terms"] = terms @@ -585,14 +607,21 @@ def render_terms_of_service(request: Request, async def terms_of_service(request: Request): # Query the database for terms that were previously accepted, # but now have a bumped Revision that needs to be accepted. - diffs = db.query(models.Term).join(models.AcceptedTerm).filter( - models.AcceptedTerm.Revision < models.Term.Revision).all() + diffs = ( + db.query(models.Term) + .join(models.AcceptedTerm) + .filter(models.AcceptedTerm.Revision < models.Term.Revision) + .all() + ) # Query the database for any terms that have not yet been accepted. - unaccepted = db.query(models.Term).filter( - ~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID))).all() + unaccepted = ( + db.query(models.Term) + .filter(~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID))) + .all() + ) - for record in (diffs + unaccepted): + for record in diffs + unaccepted: db.refresh(record) # Translate the 'Terms of Service' part of our page title. @@ -607,16 +636,22 @@ async def terms_of_service(request: Request): @router.post("/tos") @handle_form_exceptions @requires_auth -async def terms_of_service_post(request: Request, - accept: bool = Form(default=False)): +async def terms_of_service_post(request: Request, accept: bool = Form(default=False)): # Query the database for terms that were previously accepted, # but now have a bumped Revision that needs to be accepted. - diffs = db.query(models.Term).join(models.AcceptedTerm).filter( - models.AcceptedTerm.Revision < models.Term.Revision).all() + diffs = ( + db.query(models.Term) + .join(models.AcceptedTerm) + .filter(models.AcceptedTerm.Revision < models.Term.Revision) + .all() + ) # Query the database for any terms that have not yet been accepted. - unaccepted = db.query(models.Term).filter( - ~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID))).all() + unaccepted = ( + db.query(models.Term) + .filter(~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID))) + .all() + ) if not accept: # Translate the 'Terms of Service' part of our page title. @@ -628,7 +663,8 @@ async def terms_of_service_post(request: Request, # them instead of reiterating the process in terms_of_service. accept_needed = sorted(unaccepted + diffs) return render_terms_of_service( - request, context, util.apply_all(accept_needed, db.refresh)) + request, context, util.apply_all(accept_needed, db.refresh) + ) with db.begin(): # For each term we found, query for the matching accepted term @@ -636,13 +672,18 @@ async def terms_of_service_post(request: Request, for term in diffs: db.refresh(term) accepted_term = request.user.accepted_terms.filter( - models.AcceptedTerm.TermsID == term.ID).first() + models.AcceptedTerm.TermsID == term.ID + ).first() accepted_term.Revision = term.Revision # For each term that was never accepted, accept it! for term in unaccepted: db.refresh(term) - db.create(models.AcceptedTerm, User=request.user, - Term=term, Revision=term.Revision) + db.create( + models.AcceptedTerm, + User=request.user, + Term=term, + Revision=term.Revision, + ) return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) diff --git a/aurweb/routers/auth.py b/aurweb/routers/auth.py index 50cec419..3f94952e 100644 --- a/aurweb/routers/auth.py +++ b/aurweb/routers/auth.py @@ -5,7 +5,6 @@ from fastapi.responses import HTMLResponse, RedirectResponse from sqlalchemy import or_ import aurweb.config - from aurweb import cookies, db from aurweb.auth import requires_auth, requires_guest from aurweb.exceptions import handle_form_exceptions @@ -17,7 +16,7 @@ router = APIRouter() async def login_template(request: Request, next: str, errors: list = None): - """ Provide login-specific template context to render_template. """ + """Provide login-specific template context to render_template.""" context = await make_variable_context(request, "Login", next) context["errors"] = errors context["url_base"] = f"{request.url.scheme}://{request.url.netloc}" @@ -32,55 +31,73 @@ async def login_get(request: Request, next: str = "/"): @router.post("/login", response_class=HTMLResponse) @handle_form_exceptions @requires_guest -async def login_post(request: Request, - next: str = Form(...), - user: str = Form(default=str()), - passwd: str = Form(default=str()), - remember_me: bool = Form(default=False)): +async def login_post( + request: Request, + next: str = Form(...), + user: str = Form(default=str()), + passwd: str = Form(default=str()), + remember_me: bool = Form(default=False), +): # TODO: Once the Origin header gets broader adoption, this code can be # slightly simplified to use it. login_path = aurweb.config.get("options", "aur_location") + "/login" referer = request.headers.get("Referer") if not referer or not referer.startswith(login_path): _ = get_translator_for_request(request) - raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, - detail=_("Bad Referer header.")) + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail=_("Bad Referer header.") + ) with db.begin(): - user = db.query(User).filter( - or_(User.Username == user, User.Email == user) - ).first() + user = ( + db.query(User) + .filter(or_(User.Username == user, User.Email == user)) + .first() + ) if not user: - return await login_template(request, next, - errors=["Bad username or password."]) + return await login_template(request, next, errors=["Bad username or password."]) if user.Suspended: - return await login_template(request, next, - errors=["Account Suspended"]) + return await login_template(request, next, errors=["Account Suspended"]) cookie_timeout = cookies.timeout(remember_me) sid = user.login(request, passwd, cookie_timeout) if not sid: - return await login_template(request, next, - errors=["Bad username or password."]) + return await login_template(request, next, errors=["Bad username or password."]) - response = RedirectResponse(url=next, - status_code=HTTPStatus.SEE_OTHER) + response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER) secure = aurweb.config.getboolean("options", "disable_http_login") - response.set_cookie("AURSID", sid, max_age=cookie_timeout, - secure=secure, httponly=secure, - samesite=cookies.samesite()) - response.set_cookie("AURTZ", user.Timezone, - secure=secure, httponly=secure, - samesite=cookies.samesite()) - response.set_cookie("AURLANG", user.LangPreference, - secure=secure, httponly=secure, - samesite=cookies.samesite()) - response.set_cookie("AURREMEMBER", remember_me, - secure=secure, httponly=secure, - samesite=cookies.samesite()) + response.set_cookie( + "AURSID", + sid, + max_age=cookie_timeout, + secure=secure, + httponly=secure, + samesite=cookies.samesite(), + ) + response.set_cookie( + "AURTZ", + user.Timezone, + secure=secure, + httponly=secure, + samesite=cookies.samesite(), + ) + response.set_cookie( + "AURLANG", + user.LangPreference, + secure=secure, + httponly=secure, + samesite=cookies.samesite(), + ) + response.set_cookie( + "AURREMEMBER", + remember_me, + secure=secure, + httponly=secure, + samesite=cookies.samesite(), + ) return response @@ -93,8 +110,7 @@ async def logout(request: Request, next: str = Form(default="/")): # Use 303 since we may be handling a post request, that'll get it # to redirect to a get request. - response = RedirectResponse(url=next, - status_code=HTTPStatus.SEE_OTHER) + response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER) response.delete_cookie("AURSID") response.delete_cookie("AURTZ") return response diff --git a/aurweb/routers/html.py b/aurweb/routers/html.py index d31a32c7..2148d535 100644 --- a/aurweb/routers/html.py +++ b/aurweb/routers/html.py @@ -2,17 +2,20 @@ decorators in some way; more complex routes should be defined in their own modules and imported here. """ import os - from http import HTTPStatus from fastapi import APIRouter, Form, HTTPException, Request, Response from fastapi.responses import HTMLResponse, RedirectResponse -from prometheus_client import CONTENT_TYPE_LATEST, CollectorRegistry, generate_latest, multiprocess +from prometheus_client import ( + CONTENT_TYPE_LATEST, + CollectorRegistry, + generate_latest, + multiprocess, +) from sqlalchemy import and_, case, or_ import aurweb.config import aurweb.models.package_request - from aurweb import cookies, db, logging, models, time, util from aurweb.cache import db_count_cache from aurweb.exceptions import handle_form_exceptions @@ -27,17 +30,19 @@ router = APIRouter() @router.get("/favicon.ico") async def favicon(request: Request): - """ Some browsers attempt to find a website's favicon via root uri at - /favicon.ico, so provide a redirection here to our static icon. """ + """Some browsers attempt to find a website's favicon via root uri at + /favicon.ico, so provide a redirection here to our static icon.""" return RedirectResponse("/static/images/favicon.ico") @router.post("/language", response_class=RedirectResponse) @handle_form_exceptions -async def language(request: Request, - set_lang: str = Form(...), - next: str = Form(...), - q: str = Form(default=None)): +async def language( + request: Request, + set_lang: str = Form(...), + next: str = Form(...), + q: str = Form(default=None), +): """ A POST route used to set a session's language. @@ -45,7 +50,7 @@ async def language(request: Request, setting the language on any page, we want to preserve query parameters across the redirect. """ - if next[0] != '/': + if next[0] != "/": return HTMLResponse(b"Invalid 'next' parameter.", status_code=400) query_string = "?" + q if q else str() @@ -56,20 +61,21 @@ async def language(request: Request, request.user.LangPreference = set_lang # In any case, set the response's AURLANG cookie that never expires. - response = RedirectResponse(url=f"{next}{query_string}", - status_code=HTTPStatus.SEE_OTHER) + response = RedirectResponse( + url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER + ) secure = aurweb.config.getboolean("options", "disable_http_login") - response.set_cookie("AURLANG", set_lang, - secure=secure, httponly=secure, - samesite=cookies.samesite()) + response.set_cookie( + "AURLANG", set_lang, secure=secure, httponly=secure, samesite=cookies.samesite() + ) return response @router.get("/", response_class=HTMLResponse) async def index(request: Request): - """ Homepage route. """ + """Homepage route.""" context = make_context(request, "Home") - context['ssh_fingerprints'] = util.get_ssh_fingerprints() + context["ssh_fingerprints"] = util.get_ssh_fingerprints() bases = db.query(models.PackageBase) @@ -79,24 +85,33 @@ async def index(request: Request): # Package statistics. query = bases.filter(models.PackageBase.PackagerUID.isnot(None)) context["package_count"] = await db_count_cache( - redis, "package_count", query, expire=cache_expire) + redis, "package_count", query, expire=cache_expire + ) query = bases.filter( - and_(models.PackageBase.MaintainerUID.is_(None), - models.PackageBase.PackagerUID.isnot(None)) + and_( + models.PackageBase.MaintainerUID.is_(None), + models.PackageBase.PackagerUID.isnot(None), + ) ) context["orphan_count"] = await db_count_cache( - redis, "orphan_count", query, expire=cache_expire) + redis, "orphan_count", query, expire=cache_expire + ) query = db.query(models.User) context["user_count"] = await db_count_cache( - redis, "user_count", query, expire=cache_expire) + redis, "user_count", query, expire=cache_expire + ) query = query.filter( - or_(models.User.AccountTypeID == TRUSTED_USER_ID, - models.User.AccountTypeID == TRUSTED_USER_AND_DEV_ID)) + or_( + models.User.AccountTypeID == TRUSTED_USER_ID, + models.User.AccountTypeID == TRUSTED_USER_AND_DEV_ID, + ) + ) context["trusted_user_count"] = await db_count_cache( - redis, "trusted_user_count", query, expire=cache_expire) + redis, "trusted_user_count", query, expire=cache_expire + ) # Current timestamp. now = time.utcnow() @@ -106,31 +121,40 @@ async def index(request: Request): one_hour = 3600 updated = bases.filter( - and_(models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS >= one_hour, - models.PackageBase.PackagerUID.isnot(None)) + and_( + models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS >= one_hour, + models.PackageBase.PackagerUID.isnot(None), + ) ) query = bases.filter( - and_(models.PackageBase.SubmittedTS >= seven_days_ago, - models.PackageBase.PackagerUID.isnot(None)) + and_( + models.PackageBase.SubmittedTS >= seven_days_ago, + models.PackageBase.PackagerUID.isnot(None), + ) ) context["seven_days_old_added"] = await db_count_cache( - redis, "seven_days_old_added", query, expire=cache_expire) + redis, "seven_days_old_added", query, expire=cache_expire + ) query = updated.filter(models.PackageBase.ModifiedTS >= seven_days_ago) context["seven_days_old_updated"] = await db_count_cache( - redis, "seven_days_old_updated", query, expire=cache_expire) + redis, "seven_days_old_updated", query, expire=cache_expire + ) year = seven_days * 52 # Fifty two weeks worth: one year. year_ago = now - year query = updated.filter(models.PackageBase.ModifiedTS >= year_ago) context["year_old_updated"] = await db_count_cache( - redis, "year_old_updated", query, expire=cache_expire) + redis, "year_old_updated", query, expire=cache_expire + ) query = bases.filter( - models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS < 3600) + models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS < 3600 + ) context["never_updated"] = await db_count_cache( - redis, "never_updated", query, expire=cache_expire) + redis, "never_updated", query, expire=cache_expire + ) # Get the 15 most recently updated packages. context["package_updates"] = updated_packages(15, cache_expire) @@ -140,78 +164,92 @@ async def index(request: Request): # the dashboard display. packages = db.query(models.Package).join(models.PackageBase) - maintained = packages.join( - models.PackageComaintainer, - models.PackageComaintainer.PackageBaseID == models.PackageBase.ID, - isouter=True - ).join( - models.User, - or_(models.PackageBase.MaintainerUID == models.User.ID, - models.PackageComaintainer.UsersID == models.User.ID) - ).filter( - models.User.ID == request.user.ID + maintained = ( + packages.join( + models.PackageComaintainer, + models.PackageComaintainer.PackageBaseID == models.PackageBase.ID, + isouter=True, + ) + .join( + models.User, + or_( + models.PackageBase.MaintainerUID == models.User.ID, + models.PackageComaintainer.UsersID == models.User.ID, + ), + ) + .filter(models.User.ID == request.user.ID) ) # Packages maintained by the user that have been flagged. - context["flagged_packages"] = maintained.filter( - models.PackageBase.OutOfDateTS.isnot(None) - ).order_by( - models.PackageBase.ModifiedTS.desc(), models.Package.Name.asc() - ).limit(50).all() + context["flagged_packages"] = ( + maintained.filter(models.PackageBase.OutOfDateTS.isnot(None)) + .order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.asc()) + .limit(50) + .all() + ) # Flagged packages that request.user has voted for. context["flagged_packages_voted"] = query_voted( - context.get("flagged_packages"), request.user) + context.get("flagged_packages"), request.user + ) # Flagged packages that request.user is being notified about. context["flagged_packages_notified"] = query_notified( - context.get("flagged_packages"), request.user) + context.get("flagged_packages"), request.user + ) - archive_time = aurweb.config.getint('options', 'request_archive_time') + archive_time = aurweb.config.getint("options", "request_archive_time") start = now - archive_time # Package requests created by request.user. - context["package_requests"] = request.user.package_requests.filter( - models.PackageRequest.RequestTS >= start - ).order_by( - # Order primarily by the Status column being PENDING_ID, - # and secondarily by RequestTS; both in descending order. - case([(models.PackageRequest.Status == PENDING_ID, 1)], - else_=0).desc(), - models.PackageRequest.RequestTS.desc() - ).limit(50).all() + context["package_requests"] = ( + request.user.package_requests.filter( + models.PackageRequest.RequestTS >= start + ) + .order_by( + # Order primarily by the Status column being PENDING_ID, + # and secondarily by RequestTS; both in descending order. + case([(models.PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), + models.PackageRequest.RequestTS.desc(), + ) + .limit(50) + .all() + ) # Packages that the request user maintains or comaintains. - context["packages"] = maintained.filter( - models.User.ID == models.PackageBase.MaintainerUID - ).order_by( - models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc() - ).limit(50).all() + context["packages"] = ( + maintained.filter(models.User.ID == models.PackageBase.MaintainerUID) + .order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc()) + .limit(50) + .all() + ) # Packages that request.user has voted for. - context["packages_voted"] = query_voted( - context.get("packages"), request.user) + context["packages_voted"] = query_voted(context.get("packages"), request.user) # Packages that request.user is being notified about. context["packages_notified"] = query_notified( - context.get("packages"), request.user) + context.get("packages"), request.user + ) # Any packages that the request user comaintains. - context["comaintained"] = packages.join( - models.PackageComaintainer - ).filter( - models.PackageComaintainer.UsersID == request.user.ID - ).order_by( - models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc() - ).limit(50).all() + context["comaintained"] = ( + packages.join(models.PackageComaintainer) + .filter(models.PackageComaintainer.UsersID == request.user.ID) + .order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc()) + .limit(50) + .all() + ) # Comaintained packages that request.user has voted for. context["comaintained_voted"] = query_voted( - context.get("comaintained"), request.user) + context.get("comaintained"), request.user + ) # Comaintained packages that request.user is being notified about. context["comaintained_notified"] = query_notified( - context.get("comaintained"), request.user) + context.get("comaintained"), request.user + ) return render_template(request, "index.html", context) @@ -232,16 +270,15 @@ async def archive_sha256(request: Request, archive: str): @router.get("/metrics") async def metrics(request: Request): if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None): - return Response("Prometheus metrics are not enabled.", - status_code=HTTPStatus.SERVICE_UNAVAILABLE) + return Response( + "Prometheus metrics are not enabled.", + status_code=HTTPStatus.SERVICE_UNAVAILABLE, + ) registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) data = generate_latest(registry) - headers = { - "Content-Type": CONTENT_TYPE_LATEST, - "Content-Length": str(len(data)) - } + headers = {"Content-Type": CONTENT_TYPE_LATEST, "Content-Length": str(len(data))} return Response(data, headers=headers) diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index 7bf4e3d4..55d2abf5 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -5,7 +5,6 @@ from typing import Any from fastapi import APIRouter, Form, Query, Request, Response import aurweb.filters # noqa: F401 - from aurweb import config, db, defaults, logging, models, util from aurweb.auth import creds, requires_auth from aurweb.exceptions import InvariantError, handle_form_exceptions @@ -13,23 +12,24 @@ from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID from aurweb.packages import util as pkgutil from aurweb.packages.search import PackageSearch from aurweb.packages.util import get_pkg_or_base -from aurweb.pkgbase import actions as pkgbase_actions -from aurweb.pkgbase import util as pkgbaseutil +from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil from aurweb.templates import make_context, make_variable_context, render_template logger = logging.get_logger(__name__) router = APIRouter() -async def packages_get(request: Request, context: dict[str, Any], - status_code: HTTPStatus = HTTPStatus.OK): +async def packages_get( + request: Request, context: dict[str, Any], status_code: HTTPStatus = HTTPStatus.OK +): # Query parameters used in this request. context["q"] = dict(request.query_params) # Per page and offset. offset, per_page = util.sanitize_params( request.query_params.get("O", defaults.O), - request.query_params.get("PP", defaults.PP)) + request.query_params.get("PP", defaults.PP), + ) context["O"] = offset # Limit PP to options.max_search_results @@ -82,8 +82,7 @@ async def packages_get(request: Request, context: dict[str, Any], if submit == "Orphans": # If the user clicked the "Orphans" button, we only want # orphaned packages. - search.query = search.query.filter( - models.PackageBase.MaintainerUID.is_(None)) + search.query = search.query.filter(models.PackageBase.MaintainerUID.is_(None)) # Collect search result count here; we've applied our keywords. # Including more query operations below, like ordering, will @@ -94,26 +93,31 @@ async def packages_get(request: Request, context: dict[str, Any], search.sort_by(sort_by, sort_order) # Insert search results into the context. - results = search.results().with_entities( - models.Package.ID, - models.Package.Name, - models.Package.PackageBaseID, - models.Package.Version, - models.Package.Description, - models.PackageBase.Popularity, - models.PackageBase.NumVotes, - models.PackageBase.OutOfDateTS, - models.User.Username.label("Maintainer"), - models.PackageVote.PackageBaseID.label("Voted"), - models.PackageNotification.PackageBaseID.label("Notify") - ).group_by(models.Package.Name) + results = ( + search.results() + .with_entities( + models.Package.ID, + models.Package.Name, + models.Package.PackageBaseID, + models.Package.Version, + models.Package.Description, + models.PackageBase.Popularity, + models.PackageBase.NumVotes, + models.PackageBase.OutOfDateTS, + models.User.Username.label("Maintainer"), + models.PackageVote.PackageBaseID.label("Voted"), + models.PackageNotification.PackageBaseID.label("Notify"), + ) + .group_by(models.Package.Name) + ) packages = results.limit(per_page).offset(offset) context["packages"] = packages context["packages_count"] = num_packages - return render_template(request, "packages/index.html", context, - status_code=status_code) + return render_template( + request, "packages/index.html", context, status_code=status_code + ) @router.get("/packages") @@ -123,9 +127,12 @@ async def packages(request: Request) -> Response: @router.get("/packages/{name}") -async def package(request: Request, name: str, - all_deps: bool = Query(default=False), - all_reqs: bool = Query(default=False)) -> Response: +async def package( + request: Request, + name: str, + all_deps: bool = Query(default=False), + all_reqs: bool = Query(default=False), +) -> Response: """ Get a package by name. @@ -156,26 +163,21 @@ async def package(request: Request, name: str, # Add our base information. context = await pkgbaseutil.make_variable_context(request, pkgbase) - context.update( - { - "all_deps": all_deps, - "all_reqs": all_reqs - } - ) + context.update({"all_deps": all_deps, "all_reqs": all_reqs}) context["package"] = pkg # Package sources. context["sources"] = pkg.package_sources.order_by( - models.PackageSource.Source.asc()).all() + models.PackageSource.Source.asc() + ).all() # Listing metadata. context["max_listing"] = max_listing = 20 # Package dependencies. deps = pkg.package_dependencies.order_by( - models.PackageDependency.DepTypeID.asc(), - models.PackageDependency.DepName.asc() + models.PackageDependency.DepTypeID.asc(), models.PackageDependency.DepName.asc() ) context["depends_count"] = deps.count() if not all_deps: @@ -183,8 +185,7 @@ async def package(request: Request, name: str, context["dependencies"] = deps.all() # Package requirements (other packages depend on this one). - reqs = pkgutil.pkg_required( - pkg.Name, [p.RelName for p in rels_data.get("p", [])]) + reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])]) context["reqs_count"] = reqs.count() if not all_reqs: reqs = reqs.limit(max_listing) @@ -210,8 +211,7 @@ async def package(request: Request, name: str, return render_template(request, "packages/show.html", context) -async def packages_unflag(request: Request, package_ids: list[int] = [], - **kwargs): +async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs): if not package_ids: return (False, ["You did not select any packages to unflag."]) @@ -220,11 +220,11 @@ async def packages_unflag(request: Request, package_ids: list[int] = [], bases = set() package_ids = set(package_ids) # Convert this to a set for O(1). - packages = db.query(models.Package).filter( - models.Package.ID.in_(package_ids)).all() + packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all() for pkg in packages: has_cred = request.user.has_credential( - creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger]) + creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger] + ) if not has_cred: return (False, ["You did not select any packages to unflag."]) @@ -236,20 +236,17 @@ async def packages_unflag(request: Request, package_ids: list[int] = [], return (True, ["The selected packages have been unflagged."]) -async def packages_notify(request: Request, package_ids: list[int] = [], - **kwargs): +async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs): # In cases where we encounter errors with the request, we'll # use this error tuple as a return value. # TODO: This error does not yet have a translation. - error_tuple = (False, - ["You did not select any packages to be notified about."]) + error_tuple = (False, ["You did not select any packages to be notified about."]) if not package_ids: return error_tuple bases = set() package_ids = set(package_ids) - packages = db.query(models.Package).filter( - models.Package.ID.in_(package_ids)).all() + packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all() for pkg in packages: if pkg.PackageBase not in bases: @@ -257,9 +254,11 @@ async def packages_notify(request: Request, package_ids: list[int] = [], # Perform some checks on what the user selected for notify. for pkgbase in bases: - notif = db.query(pkgbase.notifications.filter( - models.PackageNotification.UserID == request.user.ID - ).exists()).scalar() + notif = db.query( + pkgbase.notifications.filter( + models.PackageNotification.UserID == request.user.ID + ).exists() + ).scalar() has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY) # If the request user either does not have credentials @@ -275,23 +274,20 @@ async def packages_notify(request: Request, package_ids: list[int] = [], return (True, ["The selected packages' notifications have been enabled."]) -async def packages_unnotify(request: Request, package_ids: list[int] = [], - **kwargs): +async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs): if not package_ids: # TODO: This error does not yet have a translation. - return (False, - ["You did not select any packages for notification removal."]) + return (False, ["You did not select any packages for notification removal."]) # TODO: This error does not yet have a translation. error_tuple = ( False, - ["A package you selected does not have notifications enabled."] + ["A package you selected does not have notifications enabled."], ) bases = set() package_ids = set(package_ids) - packages = db.query(models.Package).filter( - models.Package.ID.in_(package_ids)).all() + packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all() for pkg in packages: if pkg.PackageBase not in bases: @@ -299,9 +295,11 @@ async def packages_unnotify(request: Request, package_ids: list[int] = [], # Perform some checks on what the user selected for notify. for pkgbase in bases: - notif = db.query(pkgbase.notifications.filter( - models.PackageNotification.UserID == request.user.ID - ).exists()).scalar() + notif = db.query( + pkgbase.notifications.filter( + models.PackageNotification.UserID == request.user.ID + ).exists() + ).scalar() if not notif: return error_tuple @@ -312,19 +310,24 @@ async def packages_unnotify(request: Request, package_ids: list[int] = [], return (True, ["The selected packages' notifications have been removed."]) -async def packages_adopt(request: Request, package_ids: list[int] = [], - confirm: bool = False, **kwargs): +async def packages_adopt( + request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs +): if not package_ids: return (False, ["You did not select any packages to adopt."]) if not confirm: - return (False, ["The selected packages have not been adopted, " - "check the confirmation checkbox."]) + return ( + False, + [ + "The selected packages have not been adopted, " + "check the confirmation checkbox." + ], + ) bases = set() package_ids = set(package_ids) - packages = db.query(models.Package).filter( - models.Package.ID.in_(package_ids)).all() + packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all() for pkg in packages: if pkg.PackageBase not in bases: @@ -335,8 +338,10 @@ async def packages_adopt(request: Request, package_ids: list[int] = [], has_cred = request.user.has_credential(creds.PKGBASE_ADOPT) if not (has_cred or not pkgbase.Maintainer): # TODO: This error needs to be translated. - return (False, ["You are not allowed to adopt one of the " - "packages you selected."]) + return ( + False, + ["You are not allowed to adopt one of the " "packages you selected."], + ) # Now, really adopt the bases. for pkgbase in bases: @@ -345,8 +350,7 @@ async def packages_adopt(request: Request, package_ids: list[int] = [], return (True, ["The selected packages have been adopted."]) -def disown_all(request: Request, pkgbases: list[models.PackageBase]) \ - -> list[str]: +def disown_all(request: Request, pkgbases: list[models.PackageBase]) -> list[str]: errors = [] for pkgbase in pkgbases: try: @@ -356,19 +360,24 @@ def disown_all(request: Request, pkgbases: list[models.PackageBase]) \ return errors -async def packages_disown(request: Request, package_ids: list[int] = [], - confirm: bool = False, **kwargs): +async def packages_disown( + request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs +): if not package_ids: return (False, ["You did not select any packages to disown."]) if not confirm: - return (False, ["The selected packages have not been disowned, " - "check the confirmation checkbox."]) + return ( + False, + [ + "The selected packages have not been disowned, " + "check the confirmation checkbox." + ], + ) bases = set() package_ids = set(package_ids) - packages = db.query(models.Package).filter( - models.Package.ID.in_(package_ids)).all() + packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all() for pkg in packages: if pkg.PackageBase not in bases: @@ -376,12 +385,15 @@ async def packages_disown(request: Request, package_ids: list[int] = [], # Check that the user has credentials for every package they selected. for pkgbase in bases: - has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, - approved=[pkgbase.Maintainer]) + has_cred = request.user.has_credential( + creds.PKGBASE_DISOWN, approved=[pkgbase.Maintainer] + ) if not has_cred: # TODO: This error needs to be translated. - return (False, ["You are not allowed to disown one " - "of the packages you selected."]) + return ( + False, + ["You are not allowed to disown one " "of the packages you selected."], + ) # Now, disown all the bases if we can. if errors := disown_all(request, bases): @@ -390,23 +402,31 @@ async def packages_disown(request: Request, package_ids: list[int] = [], return (True, ["The selected packages have been disowned."]) -async def packages_delete(request: Request, package_ids: list[int] = [], - confirm: bool = False, merge_into: str = str(), - **kwargs): +async def packages_delete( + request: Request, + package_ids: list[int] = [], + confirm: bool = False, + merge_into: str = str(), + **kwargs, +): if not package_ids: return (False, ["You did not select any packages to delete."]) if not confirm: - return (False, ["The selected packages have not been deleted, " - "check the confirmation checkbox."]) + return ( + False, + [ + "The selected packages have not been deleted, " + "check the confirmation checkbox." + ], + ) if not request.user.has_credential(creds.PKGBASE_DELETE): return (False, ["You do not have permission to delete packages."]) # set-ify package_ids and query the database for related records. package_ids = set(package_ids) - packages = db.query(models.Package).filter( - models.Package.ID.in_(package_ids)).all() + packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all() if len(packages) != len(package_ids): # Let the user know there was an issue with their input: they have @@ -422,12 +442,15 @@ async def packages_delete(request: Request, package_ids: list[int] = [], notifs += pkgbase_actions.pkgbase_delete_instance(request, pkgbase) # Log out the fact that this happened for accountability. - logger.info(f"Privileged user '{request.user.Username}' deleted the " - f"following package bases: {str(deleted_bases)}.") + logger.info( + f"Privileged user '{request.user.Username}' deleted the " + f"following package bases: {str(deleted_bases)}." + ) util.apply_all(notifs, lambda n: n.send()) return (True, ["The selected packages have been deleted."]) + # A mapping of action string -> callback functions used within the # `packages_post` route below. We expect any action callback to # return a tuple in the format: (succeeded: bool, message: list[str]). @@ -444,10 +467,12 @@ PACKAGE_ACTIONS = { @router.post("/packages") @handle_form_exceptions @requires_auth -async def packages_post(request: Request, - IDs: list[int] = Form(default=[]), - action: str = Form(default=str()), - confirm: bool = Form(default=False)): +async def packages_post( + request: Request, + IDs: list[int] = Form(default=[]), + action: str = Form(default=str()), + confirm: bool = Form(default=False), +): # If an invalid action is specified, just render GET /packages # with an BAD_REQUEST status_code. diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 1f09cfc8..913e3955 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -16,9 +16,7 @@ from aurweb.models.package_vote import PackageVote from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID from aurweb.packages.requests import update_closure_comment from aurweb.packages.util import get_pkg_or_base, get_pkgbase_comment -from aurweb.pkgbase import actions -from aurweb.pkgbase import util as pkgbaseutil -from aurweb.pkgbase import validate +from aurweb.pkgbase import actions, util as pkgbaseutil, validate from aurweb.scripts import notify, popupdate from aurweb.scripts.rendercomment import update_comment_render_fastapi from aurweb.templates import make_variable_context, render_template @@ -44,8 +42,9 @@ async def pkgbase(request: Request, name: str) -> Response: packages = pkgbase.packages.all() pkg = packages[0] if len(packages) == 1 and pkg.Name == pkgbase.Name: - return RedirectResponse(f"/packages/{pkg.Name}", - status_code=int(HTTPStatus.SEE_OTHER)) + return RedirectResponse( + f"/packages/{pkg.Name}", status_code=int(HTTPStatus.SEE_OTHER) + ) # Add our base information. context = pkgbaseutil.make_context(request, pkgbase) @@ -69,8 +68,7 @@ async def pkgbase_voters(request: Request, name: str) -> Response: pkgbase = get_pkg_or_base(name, PackageBase) if not request.user.has_credential(creds.PKGBASE_LIST_VOTERS): - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) context = templates.make_context(request, "Voters") context["pkgbase"] = pkgbase @@ -82,8 +80,7 @@ async def pkgbase_flag_comment(request: Request, name: str): pkgbase = get_pkg_or_base(name, PackageBase) if pkgbase.OutOfDateTS is None: - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) context = templates.make_context(request, "Flag Comment") context["pkgbase"] = pkgbase @@ -92,13 +89,15 @@ async def pkgbase_flag_comment(request: Request, name: str): @router.post("/pkgbase/{name}/keywords") @handle_form_exceptions -async def pkgbase_keywords(request: Request, name: str, - keywords: str = Form(default=str())): +async def pkgbase_keywords( + request: Request, name: str, keywords: str = Form(default=str()) +): pkgbase = get_pkg_or_base(name, PackageBase) approved = [pkgbase.Maintainer] + [c.User for c in pkgbase.comaintainers] - has_cred = creds.has_credential(request.user, creds.PKGBASE_SET_KEYWORDS, - approved=approved) + has_cred = creds.has_credential( + request.user, creds.PKGBASE_SET_KEYWORDS, approved=approved + ) if not has_cred: return Response(status_code=HTTPStatus.UNAUTHORIZED) @@ -108,15 +107,14 @@ async def pkgbase_keywords(request: Request, name: str, # Delete all keywords which are not supplied by the user. with db.begin(): - other_keywords = pkgbase.keywords.filter( - ~PackageKeyword.Keyword.in_(keywords)) - other_keyword_strings = set( - kwd.Keyword.lower() for kwd in other_keywords) + other_keywords = pkgbase.keywords.filter(~PackageKeyword.Keyword.in_(keywords)) + other_keyword_strings = set(kwd.Keyword.lower() for kwd in other_keywords) existing_keywords = set( - kwd.Keyword.lower() for kwd in - pkgbase.keywords.filter( - ~PackageKeyword.Keyword.in_(other_keyword_strings)) + kwd.Keyword.lower() + for kwd in pkgbase.keywords.filter( + ~PackageKeyword.Keyword.in_(other_keyword_strings) + ) ) db.delete_all(other_keywords) @@ -124,8 +122,7 @@ async def pkgbase_keywords(request: Request, name: str, for keyword in new_keywords: db.create(PackageKeyword, PackageBase=pkgbase, Keyword=keyword) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.get("/pkgbase/{name}/flag") @@ -135,8 +132,7 @@ async def pkgbase_flag_get(request: Request, name: str): has_cred = request.user.has_credential(creds.PKGBASE_FLAG) if not has_cred or pkgbase.OutOfDateTS is not None: - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) context = templates.make_context(request, "Flag Package Out-Of-Date") context["pkgbase"] = pkgbase @@ -146,17 +142,20 @@ async def pkgbase_flag_get(request: Request, name: str): @router.post("/pkgbase/{name}/flag") @handle_form_exceptions @requires_auth -async def pkgbase_flag_post(request: Request, name: str, - comments: str = Form(default=str())): +async def pkgbase_flag_post( + request: Request, name: str, comments: str = Form(default=str()) +): pkgbase = get_pkg_or_base(name, PackageBase) if not comments: context = templates.make_context(request, "Flag Package Out-Of-Date") context["pkgbase"] = pkgbase - context["errors"] = ["The selected packages have not been flagged, " - "please enter a comment."] - return render_template(request, "pkgbase/flag.html", context, - status_code=HTTPStatus.BAD_REQUEST) + context["errors"] = [ + "The selected packages have not been flagged, " "please enter a comment." + ] + return render_template( + request, "pkgbase/flag.html", context, status_code=HTTPStatus.BAD_REQUEST + ) has_cred = request.user.has_credential(creds.PKGBASE_FLAG) if has_cred and not pkgbase.OutOfDateTS: @@ -168,18 +167,19 @@ async def pkgbase_flag_post(request: Request, name: str, notify.FlagNotification(request.user.ID, pkgbase.ID).send() - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.post("/pkgbase/{name}/comments") @handle_form_exceptions @requires_auth async def pkgbase_comments_post( - request: Request, name: str, - comment: str = Form(default=str()), - enable_notifications: bool = Form(default=False)): - """ Add a new comment via POST request. """ + request: Request, + name: str, + comment: str = Form(default=str()), + enable_notifications: bool = Form(default=False), +): + """Add a new comment via POST request.""" pkgbase = get_pkg_or_base(name, PackageBase) if not comment: @@ -189,29 +189,34 @@ async def pkgbase_comments_post( # update the db record. now = time.utcnow() with db.begin(): - comment = db.create(PackageComment, User=request.user, - PackageBase=pkgbase, - Comments=comment, RenderedComment=str(), - CommentTS=now) + comment = db.create( + PackageComment, + User=request.user, + PackageBase=pkgbase, + Comments=comment, + RenderedComment=str(), + CommentTS=now, + ) if enable_notifications and not request.user.notified(pkgbase): - db.create(PackageNotification, - User=request.user, - PackageBase=pkgbase) + db.create(PackageNotification, User=request.user, PackageBase=pkgbase) update_comment_render_fastapi(comment) notif = notify.CommentNotification(request.user.ID, pkgbase.ID, comment.ID) notif.send() # Redirect to the pkgbase page. - return RedirectResponse(f"/pkgbase/{pkgbase.Name}#comment-{comment.ID}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse( + f"/pkgbase/{pkgbase.Name}#comment-{comment.ID}", + status_code=HTTPStatus.SEE_OTHER, + ) @router.get("/pkgbase/{name}/comments/{id}/form") @requires_auth -async def pkgbase_comment_form(request: Request, name: str, id: int, - next: str = Query(default=None)): +async def pkgbase_comment_form( + request: Request, name: str, id: int, next: str = Query(default=None) +): """ Produce a comment form for comment {id}. @@ -244,14 +249,16 @@ async def pkgbase_comment_form(request: Request, name: str, id: int, context["next"] = next form = templates.render_raw_template( - request, "partials/packages/comment_form.html", context) + request, "partials/packages/comment_form.html", context + ) return JSONResponse({"form": form}) @router.get("/pkgbase/{name}/comments/{id}/edit") @requires_auth -async def pkgbase_comment_edit(request: Request, name: str, id: int, - next: str = Form(default=None)): +async def pkgbase_comment_edit( + request: Request, name: str, id: int, next: str = Form(default=None) +): """ Render the non-javascript edit form. @@ -276,11 +283,14 @@ async def pkgbase_comment_edit(request: Request, name: str, id: int, @handle_form_exceptions @requires_auth async def pkgbase_comment_post( - request: Request, name: str, id: int, - comment: str = Form(default=str()), - enable_notifications: bool = Form(default=False), - next: str = Form(default=None)): - """ Edit an existing comment. """ + request: Request, + name: str, + id: int, + comment: str = Form(default=str()), + enable_notifications: bool = Form(default=False), + next: str = Form(default=None), +): + """Edit an existing comment.""" pkgbase = get_pkg_or_base(name, PackageBase) db_comment = get_pkgbase_comment(pkgbase, id) @@ -302,24 +312,24 @@ async def pkgbase_comment_post( PackageNotification.PackageBaseID == pkgbase.ID ).first() if enable_notifications and not db_notif: - db.create(PackageNotification, - User=request.user, - PackageBase=pkgbase) + db.create(PackageNotification, User=request.user, PackageBase=pkgbase) update_comment_render_fastapi(db_comment) if not next: next = f"/pkgbase/{pkgbase.Name}" # Redirect to the pkgbase page anchored to the updated comment. - return RedirectResponse(f"{next}#comment-{db_comment.ID}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse( + f"{next}#comment-{db_comment.ID}", status_code=HTTPStatus.SEE_OTHER + ) @router.post("/pkgbase/{name}/comments/{id}/pin") @handle_form_exceptions @requires_auth -async def pkgbase_comment_pin(request: Request, name: str, id: int, - next: str = Form(default=None)): +async def pkgbase_comment_pin( + request: Request, name: str, id: int, next: str = Form(default=None) +): """ Pin a comment. @@ -332,13 +342,15 @@ async def pkgbase_comment_pin(request: Request, name: str, id: int, pkgbase = get_pkg_or_base(name, PackageBase) comment = get_pkgbase_comment(pkgbase, id) - has_cred = request.user.has_credential(creds.COMMENT_PIN, - approved=comment.maintainers()) + has_cred = request.user.has_credential( + creds.COMMENT_PIN, approved=comment.maintainers() + ) if not has_cred: _ = l10n.get_translator_for_request(request) raise HTTPException( status_code=HTTPStatus.UNAUTHORIZED, - detail=_("You are not allowed to pin this comment.")) + detail=_("You are not allowed to pin this comment."), + ) now = time.utcnow() with db.begin(): @@ -353,8 +365,9 @@ async def pkgbase_comment_pin(request: Request, name: str, id: int, @router.post("/pkgbase/{name}/comments/{id}/unpin") @handle_form_exceptions @requires_auth -async def pkgbase_comment_unpin(request: Request, name: str, id: int, - next: str = Form(default=None)): +async def pkgbase_comment_unpin( + request: Request, name: str, id: int, next: str = Form(default=None) +): """ Unpin a comment. @@ -367,13 +380,15 @@ async def pkgbase_comment_unpin(request: Request, name: str, id: int, pkgbase = get_pkg_or_base(name, PackageBase) comment = get_pkgbase_comment(pkgbase, id) - has_cred = request.user.has_credential(creds.COMMENT_PIN, - approved=comment.maintainers()) + has_cred = request.user.has_credential( + creds.COMMENT_PIN, approved=comment.maintainers() + ) if not has_cred: _ = l10n.get_translator_for_request(request) raise HTTPException( status_code=HTTPStatus.UNAUTHORIZED, - detail=_("You are not allowed to unpin this comment.")) + detail=_("You are not allowed to unpin this comment."), + ) with db.begin(): comment.PinnedTS = 0 @@ -387,8 +402,9 @@ async def pkgbase_comment_unpin(request: Request, name: str, id: int, @router.post("/pkgbase/{name}/comments/{id}/delete") @handle_form_exceptions @requires_auth -async def pkgbase_comment_delete(request: Request, name: str, id: int, - next: str = Form(default=None)): +async def pkgbase_comment_delete( + request: Request, name: str, id: int, next: str = Form(default=None) +): """ Delete a comment. @@ -405,13 +421,13 @@ async def pkgbase_comment_delete(request: Request, name: str, id: int, pkgbase = get_pkg_or_base(name, PackageBase) comment = get_pkgbase_comment(pkgbase, id) - authorized = request.user.has_credential(creds.COMMENT_DELETE, - [comment.User]) + authorized = request.user.has_credential(creds.COMMENT_DELETE, [comment.User]) if not authorized: _ = l10n.get_translator_for_request(request) raise HTTPException( status_code=HTTPStatus.UNAUTHORIZED, - detail=_("You are not allowed to delete this comment.")) + detail=_("You are not allowed to delete this comment."), + ) now = time.utcnow() with db.begin(): @@ -427,8 +443,9 @@ async def pkgbase_comment_delete(request: Request, name: str, id: int, @router.post("/pkgbase/{name}/comments/{id}/undelete") @handle_form_exceptions @requires_auth -async def pkgbase_comment_undelete(request: Request, name: str, id: int, - next: str = Form(default=None)): +async def pkgbase_comment_undelete( + request: Request, name: str, id: int, next: str = Form(default=None) +): """ Undelete a comment. @@ -445,13 +462,15 @@ async def pkgbase_comment_undelete(request: Request, name: str, id: int, pkgbase = get_pkg_or_base(name, PackageBase) comment = get_pkgbase_comment(pkgbase, id) - has_cred = request.user.has_credential(creds.COMMENT_UNDELETE, - approved=[comment.User]) + has_cred = request.user.has_credential( + creds.COMMENT_UNDELETE, approved=[comment.User] + ) if not has_cred: _ = l10n.get_translator_for_request(request) raise HTTPException( status_code=HTTPStatus.UNAUTHORIZED, - detail=_("You are not allowed to undelete this comment.")) + detail=_("You are not allowed to undelete this comment."), + ) with db.begin(): comment.Deleter = None @@ -469,23 +488,17 @@ async def pkgbase_comment_undelete(request: Request, name: str, id: int, async def pkgbase_vote(request: Request, name: str): pkgbase = get_pkg_or_base(name, PackageBase) - vote = pkgbase.package_votes.filter( - PackageVote.UsersID == request.user.ID - ).first() + vote = pkgbase.package_votes.filter(PackageVote.UsersID == request.user.ID).first() has_cred = request.user.has_credential(creds.PKGBASE_VOTE) if has_cred and not vote: now = time.utcnow() with db.begin(): - db.create(PackageVote, - User=request.user, - PackageBase=pkgbase, - VoteTS=now) + db.create(PackageVote, User=request.user, PackageBase=pkgbase, VoteTS=now) # Update NumVotes/Popularity. popupdate.run_single(pkgbase) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.post("/pkgbase/{name}/unvote") @@ -494,9 +507,7 @@ async def pkgbase_vote(request: Request, name: str): async def pkgbase_unvote(request: Request, name: str): pkgbase = get_pkg_or_base(name, PackageBase) - vote = pkgbase.package_votes.filter( - PackageVote.UsersID == request.user.ID - ).first() + vote = pkgbase.package_votes.filter(PackageVote.UsersID == request.user.ID).first() has_cred = request.user.has_credential(creds.PKGBASE_VOTE) if has_cred and vote: with db.begin(): @@ -505,8 +516,7 @@ async def pkgbase_unvote(request: Request, name: str): # Update NumVotes/Popularity. popupdate.run_single(pkgbase) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.post("/pkgbase/{name}/notify") @@ -515,8 +525,7 @@ async def pkgbase_unvote(request: Request, name: str): async def pkgbase_notify(request: Request, name: str): pkgbase = get_pkg_or_base(name, PackageBase) actions.pkgbase_notify_instance(request, pkgbase) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.post("/pkgbase/{name}/unnotify") @@ -525,8 +534,7 @@ async def pkgbase_notify(request: Request, name: str): async def pkgbase_unnotify(request: Request, name: str): pkgbase = get_pkg_or_base(name, PackageBase) actions.pkgbase_unnotify_instance(request, pkgbase) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.post("/pkgbase/{name}/unflag") @@ -535,20 +543,19 @@ async def pkgbase_unnotify(request: Request, name: str): async def pkgbase_unflag(request: Request, name: str): pkgbase = get_pkg_or_base(name, PackageBase) actions.pkgbase_unflag_instance(request, pkgbase) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.get("/pkgbase/{name}/disown") @requires_auth -async def pkgbase_disown_get(request: Request, name: str, - next: str = Query(default=str())): +async def pkgbase_disown_get( + request: Request, name: str, next: str = Query(default=str()) +): pkgbase = get_pkg_or_base(name, PackageBase) comaints = {c.User for c in pkgbase.comaintainers} approved = [pkgbase.Maintainer] + list(comaints) - has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, - approved=approved) + has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved) if not has_cred: return RedirectResponse(f"/pkgbase/{name}", HTTPStatus.SEE_OTHER) @@ -563,27 +570,33 @@ async def pkgbase_disown_get(request: Request, name: str, @router.post("/pkgbase/{name}/disown") @handle_form_exceptions @requires_auth -async def pkgbase_disown_post(request: Request, name: str, - comments: str = Form(default=str()), - confirm: bool = Form(default=False), - next: str = Form(default=str())): +async def pkgbase_disown_post( + request: Request, + name: str, + comments: str = Form(default=str()), + confirm: bool = Form(default=False), + next: str = Form(default=str()), +): pkgbase = get_pkg_or_base(name, PackageBase) comaints = {c.User for c in pkgbase.comaintainers} approved = [pkgbase.Maintainer] + list(comaints) - has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, - approved=approved) + has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved) if not has_cred: - return RedirectResponse(f"/pkgbase/{name}", - HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", HTTPStatus.SEE_OTHER) context = templates.make_context(request, "Disown Package") context["pkgbase"] = pkgbase if not confirm: - context["errors"] = [("The selected packages have not been disowned, " - "check the confirmation checkbox.")] - return render_template(request, "pkgbase/disown.html", context, - status_code=HTTPStatus.BAD_REQUEST) + context["errors"] = [ + ( + "The selected packages have not been disowned, " + "check the confirmation checkbox." + ) + ] + return render_template( + request, "pkgbase/disown.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if request.user != pkgbase.Maintainer and request.user not in comaints: with db.begin(): @@ -593,8 +606,9 @@ async def pkgbase_disown_post(request: Request, name: str, actions.pkgbase_disown_instance(request, pkgbase) except InvariantError as exc: context["errors"] = [str(exc)] - return render_template(request, "pkgbase/disown.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "pkgbase/disown.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if not next: next = f"/pkgbase/{name}" @@ -615,8 +629,7 @@ async def pkgbase_adopt_post(request: Request, name: str): # if no maintainer currently exists. actions.pkgbase_adopt_instance(request, pkgbase) - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) @router.get("/pkgbase/{name}/comaintainers") @@ -627,20 +640,20 @@ async def pkgbase_comaintainers(request: Request, name: str) -> Response: # Unauthorized users (Non-TU/Dev and not the pkgbase maintainer) # get redirected to the package base's page. - has_creds = request.user.has_credential(creds.PKGBASE_EDIT_COMAINTAINERS, - approved=[pkgbase.Maintainer]) + has_creds = request.user.has_credential( + creds.PKGBASE_EDIT_COMAINTAINERS, approved=[pkgbase.Maintainer] + ) if not has_creds: - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) # Add our base information. context = templates.make_context(request, "Manage Co-maintainers") - context.update({ - "pkgbase": pkgbase, - "comaintainers": [ - c.User.Username for c in pkgbase.comaintainers - ] - }) + context.update( + { + "pkgbase": pkgbase, + "comaintainers": [c.User.Username for c in pkgbase.comaintainers], + } + ) return render_template(request, "pkgbase/comaintainers.html", context) @@ -648,50 +661,52 @@ async def pkgbase_comaintainers(request: Request, name: str) -> Response: @router.post("/pkgbase/{name}/comaintainers") @handle_form_exceptions @requires_auth -async def pkgbase_comaintainers_post(request: Request, name: str, - users: str = Form(default=str())) \ - -> Response: +async def pkgbase_comaintainers_post( + request: Request, name: str, users: str = Form(default=str()) +) -> Response: # Get the PackageBase. pkgbase = get_pkg_or_base(name, PackageBase) # Unauthorized users (Non-TU/Dev and not the pkgbase maintainer) # get redirected to the package base's page. - has_creds = request.user.has_credential(creds.PKGBASE_EDIT_COMAINTAINERS, - approved=[pkgbase.Maintainer]) + has_creds = request.user.has_credential( + creds.PKGBASE_EDIT_COMAINTAINERS, approved=[pkgbase.Maintainer] + ) if not has_creds: - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) users = {e.strip() for e in users.split("\n") if bool(e.strip())} records = {c.User.Username for c in pkgbase.comaintainers} users_to_rm = records.difference(users) pkgbaseutil.remove_comaintainers(pkgbase, users_to_rm) - logger.debug(f"{request.user} removed comaintainers from " - f"{pkgbase.Name}: {users_to_rm}") + logger.debug( + f"{request.user} removed comaintainers from " f"{pkgbase.Name}: {users_to_rm}" + ) users_to_add = users.difference(records) error = pkgbaseutil.add_comaintainers(request, pkgbase, users_to_add) if error: context = templates.make_context(request, "Manage Co-maintainers") context["pkgbase"] = pkgbase - context["comaintainers"] = [ - c.User.Username for c in pkgbase.comaintainers - ] + context["comaintainers"] = [c.User.Username for c in pkgbase.comaintainers] context["errors"] = [error] return render_template(request, "pkgbase/comaintainers.html", context) - logger.debug(f"{request.user} added comaintainers to " - f"{pkgbase.Name}: {users_to_add}") + logger.debug( + f"{request.user} added comaintainers to " f"{pkgbase.Name}: {users_to_add}" + ) - return RedirectResponse(f"/pkgbase/{pkgbase.Name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse( + f"/pkgbase/{pkgbase.Name}", status_code=HTTPStatus.SEE_OTHER + ) @router.get("/pkgbase/{name}/request") @requires_auth -async def pkgbase_request(request: Request, name: str, - next: str = Query(default=str())): +async def pkgbase_request( + request: Request, name: str, next: str = Query(default=str()) +): pkgbase = get_pkg_or_base(name, PackageBase) context = await make_variable_context(request, "Submit Request") context["pkgbase"] = pkgbase @@ -702,28 +717,28 @@ async def pkgbase_request(request: Request, name: str, @router.post("/pkgbase/{name}/request") @handle_form_exceptions @requires_auth -async def pkgbase_request_post(request: Request, name: str, - type: str = Form(...), - merge_into: str = Form(default=None), - comments: str = Form(default=str()), - next: str = Form(default=str())): +async def pkgbase_request_post( + request: Request, + name: str, + type: str = Form(...), + merge_into: str = Form(default=None), + comments: str = Form(default=str()), + next: str = Form(default=str()), +): pkgbase = get_pkg_or_base(name, PackageBase) # Create our render context. context = await make_variable_context(request, "Submit Request") context["pkgbase"] = pkgbase - types = { - "deletion": DELETION_ID, - "merge": MERGE_ID, - "orphan": ORPHAN_ID - } + types = {"deletion": DELETION_ID, "merge": MERGE_ID, "orphan": ORPHAN_ID} if type not in types: # In the case that someone crafted a POST request with an invalid # type, just return them to the request form with BAD_REQUEST status. - return render_template(request, "pkgbase/request.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "pkgbase/request.html", context, status_code=HTTPStatus.BAD_REQUEST + ) try: validate.request(pkgbase, type, comments, merge_into, context) @@ -735,20 +750,26 @@ async def pkgbase_request_post(request: Request, name: str, # All good. Create a new PackageRequest based on the given type. now = time.utcnow() with db.begin(): - pkgreq = db.create(PackageRequest, - ReqTypeID=types.get(type), - User=request.user, - RequestTS=now, - PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - MergeBaseName=merge_into, - Comments=comments, - ClosureComment=str()) + pkgreq = db.create( + PackageRequest, + ReqTypeID=types.get(type), + User=request.user, + RequestTS=now, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + MergeBaseName=merge_into, + Comments=comments, + ClosureComment=str(), + ) # Prepare notification object. notif = notify.RequestOpenNotification( - request.user.ID, pkgreq.ID, type, - pkgreq.PackageBase.ID, merge_into=merge_into or None) + request.user.ID, + pkgreq.ID, + type, + pkgreq.PackageBase.ID, + merge_into=merge_into or None, + ) # Send the notification now that we're out of the DB scope. notif.send() @@ -767,13 +788,13 @@ async def pkgbase_request_post(request: Request, name: str, pkgbase.Maintainer = None pkgreq.Status = ACCEPTED_ID notif = notify.RequestCloseNotification( - request.user.ID, pkgreq.ID, pkgreq.status_display()) + request.user.ID, pkgreq.ID, pkgreq.status_display() + ) notif.send() logger.debug(f"New request #{pkgreq.ID} is marked for auto-orphan.") elif type == "deletion" and is_maintainer and outdated: # This request should be auto-accepted. - notifs = actions.pkgbase_delete_instance( - request, pkgbase, comments=comments) + notifs = actions.pkgbase_delete_instance(request, pkgbase, comments=comments) util.apply_all(notifs, lambda n: n.send()) logger.debug(f"New request #{pkgreq.ID} is marked for auto-deletion.") @@ -783,11 +804,11 @@ async def pkgbase_request_post(request: Request, name: str, @router.get("/pkgbase/{name}/delete") @requires_auth -async def pkgbase_delete_get(request: Request, name: str, - next: str = Query(default=str())): +async def pkgbase_delete_get( + request: Request, name: str, next: str = Query(default=str()) +): if not request.user.has_credential(creds.PKGBASE_DELETE): - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) context = templates.make_context(request, "Package Deletion") context["pkgbase"] = get_pkg_or_base(name, PackageBase) @@ -798,53 +819,60 @@ async def pkgbase_delete_get(request: Request, name: str, @router.post("/pkgbase/{name}/delete") @handle_form_exceptions @requires_auth -async def pkgbase_delete_post(request: Request, name: str, - confirm: bool = Form(default=False), - comments: str = Form(default=str()), - next: str = Form(default="/packages")): +async def pkgbase_delete_post( + request: Request, + name: str, + confirm: bool = Form(default=False), + comments: str = Form(default=str()), + next: str = Form(default="/packages"), +): pkgbase = get_pkg_or_base(name, PackageBase) if not request.user.has_credential(creds.PKGBASE_DELETE): - return RedirectResponse(f"/pkgbase/{name}", - status_code=HTTPStatus.SEE_OTHER) + return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) if not confirm: context = templates.make_context(request, "Package Deletion") context["pkgbase"] = pkgbase - context["errors"] = [("The selected packages have not been deleted, " - "check the confirmation checkbox.")] - return render_template(request, "pkgbase/delete.html", context, - status_code=HTTPStatus.BAD_REQUEST) + context["errors"] = [ + ( + "The selected packages have not been deleted, " + "check the confirmation checkbox." + ) + ] + return render_template( + request, "pkgbase/delete.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if comments: # Update any existing deletion requests' ClosureComment. with db.begin(): requests = pkgbase.requests.filter( - and_(PackageRequest.Status == PENDING_ID, - PackageRequest.ReqTypeID == DELETION_ID) + and_( + PackageRequest.Status == PENDING_ID, + PackageRequest.ReqTypeID == DELETION_ID, + ) ) for pkgreq in requests: pkgreq.ClosureComment = comments - notifs = actions.pkgbase_delete_instance( - request, pkgbase, comments=comments) + notifs = actions.pkgbase_delete_instance(request, pkgbase, comments=comments) util.apply_all(notifs, lambda n: n.send()) return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) @router.get("/pkgbase/{name}/merge") @requires_auth -async def pkgbase_merge_get(request: Request, name: str, - into: str = Query(default=str()), - next: str = Query(default=str())): +async def pkgbase_merge_get( + request: Request, + name: str, + into: str = Query(default=str()), + next: str = Query(default=str()), +): pkgbase = get_pkg_or_base(name, PackageBase) context = templates.make_context(request, "Package Merging") - context.update({ - "pkgbase": pkgbase, - "into": into, - "next": next - }) + context.update({"pkgbase": pkgbase, "into": into, "next": next}) status_code = HTTPStatus.OK # TODO: Lookup errors from credential instead of hardcoding them. @@ -852,51 +880,58 @@ async def pkgbase_merge_get(request: Request, name: str, # Perhaps additionally: bad_credential_status_code(creds.PKGBASE_MERGE). # Don't take these examples verbatim. We should find good naming. if not request.user.has_credential(creds.PKGBASE_MERGE): - context["errors"] = [ - "Only Trusted Users and Developers can merge packages."] + context["errors"] = ["Only Trusted Users and Developers can merge packages."] status_code = HTTPStatus.UNAUTHORIZED - return render_template(request, "pkgbase/merge.html", context, - status_code=status_code) + return render_template( + request, "pkgbase/merge.html", context, status_code=status_code + ) @router.post("/pkgbase/{name}/merge") @handle_form_exceptions @requires_auth -async def pkgbase_merge_post(request: Request, name: str, - into: str = Form(default=str()), - comments: str = Form(default=str()), - confirm: bool = Form(default=False), - next: str = Form(default=str())): +async def pkgbase_merge_post( + request: Request, + name: str, + into: str = Form(default=str()), + comments: str = Form(default=str()), + confirm: bool = Form(default=False), + next: str = Form(default=str()), +): pkgbase = get_pkg_or_base(name, PackageBase) context = await make_variable_context(request, "Package Merging") context["pkgbase"] = pkgbase # TODO: Lookup errors from credential instead of hardcoding them. if not request.user.has_credential(creds.PKGBASE_MERGE): - context["errors"] = [ - "Only Trusted Users and Developers can merge packages."] - return render_template(request, "pkgbase/merge.html", context, - status_code=HTTPStatus.UNAUTHORIZED) + context["errors"] = ["Only Trusted Users and Developers can merge packages."] + return render_template( + request, "pkgbase/merge.html", context, status_code=HTTPStatus.UNAUTHORIZED + ) if not confirm: - context["errors"] = ["The selected packages have not been deleted, " - "check the confirmation checkbox."] - return render_template(request, "pkgbase/merge.html", context, - status_code=HTTPStatus.BAD_REQUEST) + context["errors"] = [ + "The selected packages have not been deleted, " + "check the confirmation checkbox." + ] + return render_template( + request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST + ) try: target = get_pkg_or_base(into, PackageBase) except HTTPException: - context["errors"] = [ - "Cannot find package to merge votes and comments into."] - return render_template(request, "pkgbase/merge.html", context, - status_code=HTTPStatus.BAD_REQUEST) + context["errors"] = ["Cannot find package to merge votes and comments into."] + return render_template( + request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST + ) if pkgbase == target: context["errors"] = ["Cannot merge a package base with itself."] - return render_template(request, "pkgbase/merge.html", context, - status_code=HTTPStatus.BAD_REQUEST) + return render_template( + request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST + ) with db.begin(): update_closure_comment(pkgbase, MERGE_ID, comments, target=target) diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index 086aa3bc..c7935575 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -18,9 +18,11 @@ router = APIRouter() @router.get("/requests") @requires_auth -async def requests(request: Request, - O: int = Query(default=defaults.O), - PP: int = Query(default=defaults.PP)): +async def requests( + request: Request, + O: int = Query(default=defaults.O), + PP: int = Query(default=defaults.PP), +): context = make_context(request, "Requests") context["q"] = dict(request.query_params) @@ -30,8 +32,7 @@ async def requests(request: Request, context["PP"] = PP # A PackageRequest query, with left inner joined User and RequestType. - query = db.query(PackageRequest).join( - User, User.ID == PackageRequest.UsersID) + query = db.query(PackageRequest).join(User, User.ID == PackageRequest.UsersID) # If the request user is not elevated (TU or Dev), then # filter PackageRequests which are owned by the request user. @@ -39,12 +40,17 @@ async def requests(request: Request, query = query.filter(PackageRequest.UsersID == request.user.ID) context["total"] = query.count() - context["results"] = query.order_by( - # Order primarily by the Status column being PENDING_ID, - # and secondarily by RequestTS; both in descending order. - case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), - PackageRequest.RequestTS.desc() - ).limit(PP).offset(O).all() + context["results"] = ( + query.order_by( + # Order primarily by the Status column being PENDING_ID, + # and secondarily by RequestTS; both in descending order. + case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), + PackageRequest.RequestTS.desc(), + ) + .limit(PP) + .offset(O) + .all() + ) return render_template(request, "requests.html", context) @@ -66,8 +72,9 @@ async def request_close(request: Request, id: int): @router.post("/requests/{id}/close") @handle_form_exceptions @requires_auth -async def request_close_post(request: Request, id: int, - comments: str = Form(default=str())): +async def request_close_post( + request: Request, id: int, comments: str = Form(default=str()) +): pkgreq = get_pkgreq_by_id(id) # `pkgreq`.User can close their own request. @@ -87,7 +94,8 @@ async def request_close_post(request: Request, id: int, pkgreq.Status = REJECTED_ID notify_ = notify.RequestCloseNotification( - request.user.ID, pkgreq.ID, pkgreq.status_display()) + request.user.ID, pkgreq.ID, pkgreq.status_display() + ) notify_.send() return RedirectResponse("/requests", status_code=HTTPStatus.SEE_OTHER) diff --git a/aurweb/routers/rpc.py b/aurweb/routers/rpc.py index ff58063f..a0cf5019 100644 --- a/aurweb/routers/rpc.py +++ b/aurweb/routers/rpc.py @@ -1,12 +1,10 @@ import hashlib import re - from http import HTTPStatus from typing import Optional from urllib.parse import unquote import orjson - from fastapi import APIRouter, Form, Query, Request, Response from fastapi.responses import JSONResponse @@ -19,7 +17,7 @@ router = APIRouter() def parse_args(request: Request): - """ Handle legacy logic of 'arg' and 'arg[]' query parameter handling. + """Handle legacy logic of 'arg' and 'arg[]' query parameter handling. When 'arg' appears as the last argument given to the query string, that argument is used by itself as one single argument, regardless @@ -39,9 +37,7 @@ def parse_args(request: Request): # Create a list of (key, value) pairs of the given 'arg' and 'arg[]' # query parameters from last to first. query = list(reversed(unquote(request.url.query).split("&"))) - parts = [ - e.split("=", 1) for e in query if e.startswith(("arg=", "arg[]=")) - ] + parts = [e.split("=", 1) for e in query if e.startswith(("arg=", "arg[]="))] args = [] if parts: @@ -63,24 +59,28 @@ def parse_args(request: Request): return args -JSONP_EXPR = re.compile(r'^[a-zA-Z0-9()_.]{1,128}$') +JSONP_EXPR = re.compile(r"^[a-zA-Z0-9()_.]{1,128}$") -async def rpc_request(request: Request, - v: Optional[int] = None, - type: Optional[str] = None, - by: Optional[str] = defaults.RPC_SEARCH_BY, - arg: Optional[str] = None, - args: Optional[list[str]] = [], - callback: Optional[str] = None): +async def rpc_request( + request: Request, + v: Optional[int] = None, + type: Optional[str] = None, + by: Optional[str] = defaults.RPC_SEARCH_BY, + arg: Optional[str] = None, + args: Optional[list[str]] = [], + callback: Optional[str] = None, +): # Create a handle to our RPC class. rpc = RPC(version=v, type=type) # If ratelimit was exceeded, return a 429 Too Many Requests. if check_ratelimit(request): - return JSONResponse(rpc.error("Rate limit reached"), - status_code=int(HTTPStatus.TOO_MANY_REQUESTS)) + return JSONResponse( + rpc.error("Rate limit reached"), + status_code=int(HTTPStatus.TOO_MANY_REQUESTS), + ) # If `callback` was provided, produce a text/javascript response # valid for the jsonp callback. Otherwise, by default, return @@ -115,15 +115,11 @@ async def rpc_request(request: Request, # The ETag header expects quotes to surround any identifier. # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag - headers = { - "Content-Type": content_type, - "ETag": f'"{etag}"' - } + headers = {"Content-Type": content_type, "ETag": f'"{etag}"'} if_none_match = request.headers.get("If-None-Match", str()) - if if_none_match and if_none_match.strip("\t\n\r\" ") == etag: - return Response(headers=headers, - status_code=int(HTTPStatus.NOT_MODIFIED)) + if if_none_match and if_none_match.strip('\t\n\r" ') == etag: + return Response(headers=headers, status_code=int(HTTPStatus.NOT_MODIFIED)) if callback: content = f"/**/{callback}({content.decode()})" @@ -135,13 +131,15 @@ async def rpc_request(request: Request, @router.get("/rpc.php") # Temporary! Remove on 03/04 @router.get("/rpc/") @router.get("/rpc") -async def rpc(request: Request, - v: Optional[int] = Query(default=None), - type: Optional[str] = Query(default=None), - by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), - arg: Optional[str] = Query(default=None), - args: Optional[list[str]] = Query(default=[], alias="arg[]"), - callback: Optional[str] = Query(default=None)): +async def rpc( + request: Request, + v: Optional[int] = Query(default=None), + type: Optional[str] = Query(default=None), + by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), + arg: Optional[str] = Query(default=None), + args: Optional[list[str]] = Query(default=[], alias="arg[]"), + callback: Optional[str] = Query(default=None), +): if not request.url.query: return documentation() return await rpc_request(request, v, type, by, arg, args, callback) @@ -152,11 +150,13 @@ async def rpc(request: Request, @router.post("/rpc/") @router.post("/rpc") @handle_form_exceptions -async def rpc_post(request: Request, - v: Optional[int] = Form(default=None), - type: Optional[str] = Form(default=None), - by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY), - arg: Optional[str] = Form(default=None), - args: Optional[list[str]] = Form(default=[], alias="arg[]"), - callback: Optional[str] = Form(default=None)): +async def rpc_post( + request: Request, + v: Optional[int] = Form(default=None), + type: Optional[str] = Form(default=None), + by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY), + arg: Optional[str] = Form(default=None), + args: Optional[list[str]] = Form(default=[], alias="arg[]"), + callback: Optional[str] = Form(default=None), +): return await rpc_request(request, v, type, by, arg, args, callback) diff --git a/aurweb/routers/rss.py b/aurweb/routers/rss.py index 0996f3cd..ee85b738 100644 --- a/aurweb/routers/rss.py +++ b/aurweb/routers/rss.py @@ -10,9 +10,8 @@ from aurweb.models import Package, PackageBase router = APIRouter() -def make_rss_feed(request: Request, packages: list, - date_attr: str): - """ Create an RSS Feed string for some packages. +def make_rss_feed(request: Request, packages: list, date_attr: str): + """Create an RSS Feed string for some packages. :param request: A FastAPI request :param packages: A list of packages to add to the RSS feed @@ -26,10 +25,12 @@ def make_rss_feed(request: Request, packages: list, base = f"{request.url.scheme}://{request.url.netloc}" feed.link(href=base, rel="alternate") feed.link(href=f"{base}/rss", rel="self") - feed.image(title="AUR Newest Packages", - url=f"{base}/static/css/archnavbar/aurlogo.png", - link=base, - description="AUR Newest Packages Feed") + feed.image( + title="AUR Newest Packages", + url=f"{base}/static/css/archnavbar/aurlogo.png", + link=base, + description="AUR Newest Packages Feed", + ) for pkg in packages: entry = feed.add_entry(order="append") @@ -53,8 +54,12 @@ def make_rss_feed(request: Request, packages: list, @router.get("/rss/") async def rss(request: Request): - packages = db.query(Package).join(PackageBase).order_by( - PackageBase.SubmittedTS.desc()).limit(100) + packages = ( + db.query(Package) + .join(PackageBase) + .order_by(PackageBase.SubmittedTS.desc()) + .limit(100) + ) feed = make_rss_feed(request, packages, "SubmittedTS") response = Response(feed, media_type="application/rss+xml") @@ -69,8 +74,12 @@ async def rss(request: Request): @router.get("/rss/modified") async def rss_modified(request: Request): - packages = db.query(Package).join(PackageBase).order_by( - PackageBase.ModifiedTS.desc()).limit(100) + packages = ( + db.query(Package) + .join(PackageBase) + .order_by(PackageBase.ModifiedTS.desc()) + .limit(100) + ) feed = make_rss_feed(request, packages, "ModifiedTS") response = Response(feed, media_type="application/rss+xml") diff --git a/aurweb/routers/sso.py b/aurweb/routers/sso.py index eff1c63f..e1356cfb 100644 --- a/aurweb/routers/sso.py +++ b/aurweb/routers/sso.py @@ -1,11 +1,9 @@ import time import uuid - from http import HTTPStatus from urllib.parse import urlencode import fastapi - from authlib.integrations.starlette_client import OAuth, OAuthError from fastapi import Depends, HTTPException from fastapi.responses import RedirectResponse @@ -14,7 +12,6 @@ from starlette.requests import Request import aurweb.config import aurweb.db - from aurweb import util from aurweb.l10n import get_translator_for_request from aurweb.schema import Bans, Sessions, Users @@ -43,14 +40,18 @@ async def login(request: Request, redirect: str = None): The `redirect` argument is a query parameter specifying the post-login redirect URL. """ - authenticate_url = aurweb.config.get("options", "aur_location") + "/sso/authenticate" + authenticate_url = ( + aurweb.config.get("options", "aur_location") + "/sso/authenticate" + ) if redirect: authenticate_url = authenticate_url + "?" + urlencode([("redirect", redirect)]) return await oauth.sso.authorize_redirect(request, authenticate_url, prompt="login") def is_account_suspended(conn, user_id): - row = conn.execute(select([Users.c.Suspended]).where(Users.c.ID == user_id)).fetchone() + row = conn.execute( + select([Users.c.Suspended]).where(Users.c.ID == user_id) + ).fetchone() return row is not None and bool(row[0]) @@ -60,23 +61,27 @@ def open_session(request, conn, user_id): """ if is_account_suspended(conn, user_id): _ = get_translator_for_request(request) - raise HTTPException(status_code=HTTPStatus.FORBIDDEN, - detail=_('Account suspended')) + raise HTTPException( + status_code=HTTPStatus.FORBIDDEN, detail=_("Account suspended") + ) # TODO This is a terrible message because it could imply the attempt at # logging in just caused the suspension. sid = uuid.uuid4().hex - conn.execute(Sessions.insert().values( - UsersID=user_id, - SessionID=sid, - LastUpdateTS=time.time(), - )) + conn.execute( + Sessions.insert().values( + UsersID=user_id, + SessionID=sid, + LastUpdateTS=time.time(), + ) + ) # Update user’s last login information. - conn.execute(Users.update() - .where(Users.c.ID == user_id) - .values(LastLogin=int(time.time()), - LastLoginIPAddress=request.client.host)) + conn.execute( + Users.update() + .where(Users.c.ID == user_id) + .values(LastLogin=int(time.time()), LastLoginIPAddress=request.client.host) + ) return sid @@ -98,7 +103,9 @@ def is_aur_url(url): @router.get("/sso/authenticate") -async def authenticate(request: Request, redirect: str = None, conn=Depends(aurweb.db.connect)): +async def authenticate( + request: Request, redirect: str = None, conn=Depends(aurweb.db.connect) +): """ Receive an OpenID Connect ID token, validate it, then process it to create an new AUR session. @@ -107,9 +114,12 @@ async def authenticate(request: Request, redirect: str = None, conn=Depends(aurw _ = get_translator_for_request(request) raise HTTPException( status_code=HTTPStatus.FORBIDDEN, - detail=_('The login form is currently disabled for your IP address, ' - 'probably due to sustained spam attacks. Sorry for the ' - 'inconvenience.')) + detail=_( + "The login form is currently disabled for your IP address, " + "probably due to sustained spam attacks. Sorry for the " + "inconvenience." + ), + ) try: token = await oauth.sso.authorize_access_token(request) @@ -120,30 +130,41 @@ async def authenticate(request: Request, redirect: str = None, conn=Depends(aurw _ = get_translator_for_request(request) raise HTTPException( status_code=HTTPStatus.BAD_REQUEST, - detail=_('Bad OAuth token. Please retry logging in from the start.')) + detail=_("Bad OAuth token. Please retry logging in from the start."), + ) sub = user.get("sub") # this is the SSO account ID in JWT terminology if not sub: _ = get_translator_for_request(request) - raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, - detail=_("JWT is missing its `sub` field.")) + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=_("JWT is missing its `sub` field."), + ) - aur_accounts = conn.execute(select([Users.c.ID]).where(Users.c.SSOAccountID == sub)) \ - .fetchall() + aur_accounts = conn.execute( + select([Users.c.ID]).where(Users.c.SSOAccountID == sub) + ).fetchall() if not aur_accounts: return "Sorry, we don’t seem to know you Sir " + sub elif len(aur_accounts) == 1: sid = open_session(request, conn, aur_accounts[0][Users.c.ID]) - response = RedirectResponse(redirect if redirect and is_aur_url(redirect) else "/") + response = RedirectResponse( + redirect if redirect and is_aur_url(redirect) else "/" + ) secure_cookies = aurweb.config.getboolean("options", "disable_http_login") - response.set_cookie(key="AURSID", value=sid, httponly=True, - secure=secure_cookies) + response.set_cookie( + key="AURSID", value=sid, httponly=True, secure=secure_cookies + ) if "id_token" in token: # We save the id_token for the SSO logout. It’s not too important # though, so if we can’t find it, we can live without it. - response.set_cookie(key="SSO_ID_TOKEN", value=token["id_token"], - path="/sso/", httponly=True, - secure=secure_cookies) + response.set_cookie( + key="SSO_ID_TOKEN", + value=token["id_token"], + path="/sso/", + httponly=True, + secure=secure_cookies, + ) return util.add_samesite_fields(response, "strict") else: # We’ve got a severe integrity violation. @@ -165,8 +186,12 @@ async def logout(request: Request): return RedirectResponse("/") metadata = await oauth.sso.load_server_metadata() - query = urlencode({'post_logout_redirect_uri': aurweb.config.get('options', 'aur_location'), - 'id_token_hint': id_token}) - response = RedirectResponse(metadata["end_session_endpoint"] + '?' + query) + query = urlencode( + { + "post_logout_redirect_uri": aurweb.config.get("options", "aur_location"), + "id_token_hint": id_token, + } + ) + response = RedirectResponse(metadata["end_session_endpoint"] + "?" + query) response.delete_cookie("SSO_ID_TOKEN", path="/sso/") return response diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index e1267409..a84bb6bd 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -1,6 +1,5 @@ import html import typing - from http import HTTPStatus from typing import Any @@ -30,33 +29,36 @@ ADDVOTE_SPECIFICS = { "add_tu": (7 * 24 * 60 * 60, 0.66), "remove_tu": (7 * 24 * 60 * 60, 0.75), "remove_inactive_tu": (5 * 24 * 60 * 60, 0.66), - "bylaws": (7 * 24 * 60 * 60, 0.75) + "bylaws": (7 * 24 * 60 * 60, 0.75), } def populate_trusted_user_counts(context: dict[str, Any]) -> None: tu_query = db.query(User).filter( - or_(User.AccountTypeID == TRUSTED_USER_ID, - User.AccountTypeID == TRUSTED_USER_AND_DEV_ID) + or_( + User.AccountTypeID == TRUSTED_USER_ID, + User.AccountTypeID == TRUSTED_USER_AND_DEV_ID, + ) ) context["trusted_user_count"] = tu_query.count() # In case any records have a None InactivityTS. active_tu_query = tu_query.filter( - or_(User.InactivityTS.is_(None), - User.InactivityTS == 0) + or_(User.InactivityTS.is_(None), User.InactivityTS == 0) ) context["active_trusted_user_count"] = active_tu_query.count() @router.get("/tu") @requires_auth -async def trusted_user(request: Request, - coff: int = 0, # current offset - cby: str = "desc", # current by - poff: int = 0, # past offset - pby: str = "desc"): # past by - """ Proposal listings. """ +async def trusted_user( + request: Request, + coff: int = 0, # current offset + cby: str = "desc", # current by + poff: int = 0, # past offset + pby: str = "desc", +): # past by + """Proposal listings.""" if not request.user.has_credential(creds.TU_LIST_VOTES): return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) @@ -81,40 +83,47 @@ async def trusted_user(request: Request, past_by = "desc" context["past_by"] = past_by - current_votes = db.query(models.TUVoteInfo).filter( - models.TUVoteInfo.End > ts).order_by( - models.TUVoteInfo.Submitted.desc()) + current_votes = ( + db.query(models.TUVoteInfo) + .filter(models.TUVoteInfo.End > ts) + .order_by(models.TUVoteInfo.Submitted.desc()) + ) context["current_votes_count"] = current_votes.count() current_votes = current_votes.limit(pp).offset(current_off) - context["current_votes"] = reversed(current_votes.all()) \ - if current_by == "asc" else current_votes.all() + context["current_votes"] = ( + reversed(current_votes.all()) if current_by == "asc" else current_votes.all() + ) context["current_off"] = current_off - past_votes = db.query(models.TUVoteInfo).filter( - models.TUVoteInfo.End <= ts).order_by( - models.TUVoteInfo.Submitted.desc()) + past_votes = ( + db.query(models.TUVoteInfo) + .filter(models.TUVoteInfo.End <= ts) + .order_by(models.TUVoteInfo.Submitted.desc()) + ) context["past_votes_count"] = past_votes.count() past_votes = past_votes.limit(pp).offset(past_off) - context["past_votes"] = reversed(past_votes.all()) \ - if past_by == "asc" else past_votes.all() + context["past_votes"] = ( + reversed(past_votes.all()) if past_by == "asc" else past_votes.all() + ) context["past_off"] = past_off last_vote = func.max(models.TUVote.VoteID).label("LastVote") - last_votes_by_tu = db.query(models.TUVote).join(models.User).join( - models.TUVoteInfo, - models.TUVoteInfo.ID == models.TUVote.VoteID - ).filter( - and_(models.TUVote.VoteID == models.TUVoteInfo.ID, - models.User.ID == models.TUVote.UserID, - models.TUVoteInfo.End < ts, - or_(models.User.AccountTypeID == 2, - models.User.AccountTypeID == 4)) - ).with_entities( - models.TUVote.UserID, - last_vote, - models.User.Username - ).group_by(models.TUVote.UserID).order_by( - last_vote.desc(), models.User.Username.asc()) + last_votes_by_tu = ( + db.query(models.TUVote) + .join(models.User) + .join(models.TUVoteInfo, models.TUVoteInfo.ID == models.TUVote.VoteID) + .filter( + and_( + models.TUVote.VoteID == models.TUVoteInfo.ID, + models.User.ID == models.TUVote.UserID, + models.TUVoteInfo.End < ts, + or_(models.User.AccountTypeID == 2, models.User.AccountTypeID == 4), + ) + ) + .with_entities(models.TUVote.UserID, last_vote, models.User.Username) + .group_by(models.TUVote.UserID) + .order_by(last_vote.desc(), models.User.Username.asc()) + ) context["last_votes_by_tu"] = last_votes_by_tu.all() context["current_by_next"] = "asc" if current_by == "desc" else "desc" @@ -126,18 +135,22 @@ async def trusted_user(request: Request, "coff": current_off, "cby": current_by, "poff": past_off, - "pby": past_by + "pby": past_by, } return render_template(request, "tu/index.html", context) -def render_proposal(request: Request, context: dict, proposal: int, - voteinfo: models.TUVoteInfo, - voters: typing.Iterable[models.User], - vote: models.TUVote, - status_code: HTTPStatus = HTTPStatus.OK): - """ Render a single TU proposal. """ +def render_proposal( + request: Request, + context: dict, + proposal: int, + voteinfo: models.TUVoteInfo, + voters: typing.Iterable[models.User], + vote: models.TUVote, + status_code: HTTPStatus = HTTPStatus.OK, +): + """Render a single TU proposal.""" context["proposal"] = proposal context["voteinfo"] = voteinfo context["voters"] = voters.all() @@ -146,8 +159,9 @@ def render_proposal(request: Request, context: dict, proposal: int, participation = (total / voteinfo.ActiveTUs) if voteinfo.ActiveTUs else 0 context["participation"] = participation - accepted = (voteinfo.Yes > voteinfo.ActiveTUs / 2) or \ - (participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No) + accepted = (voteinfo.Yes > voteinfo.ActiveTUs / 2) or ( + participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No + ) context["accepted"] = accepted can_vote = voters.filter(models.TUVote.User == request.user).first() is None @@ -159,8 +173,7 @@ def render_proposal(request: Request, context: dict, proposal: int, context["vote"] = vote context["has_voted"] = vote is not None - return render_template(request, "tu/show.html", context, - status_code=status_code) + return render_template(request, "tu/show.html", context, status_code=status_code) @router.get("/tu/{proposal}") @@ -172,16 +185,27 @@ async def trusted_user_proposal(request: Request, proposal: int): context = await make_variable_context(request, "Trusted User") proposal = int(proposal) - voteinfo = db.query(models.TUVoteInfo).filter( - models.TUVoteInfo.ID == proposal).first() + voteinfo = ( + db.query(models.TUVoteInfo).filter(models.TUVoteInfo.ID == proposal).first() + ) if not voteinfo: raise HTTPException(status_code=HTTPStatus.NOT_FOUND) - voters = db.query(models.User).join(models.TUVote).filter( - models.TUVote.VoteID == voteinfo.ID) - vote = db.query(models.TUVote).filter( - and_(models.TUVote.UserID == request.user.ID, - models.TUVote.VoteID == voteinfo.ID)).first() + voters = ( + db.query(models.User) + .join(models.TUVote) + .filter(models.TUVote.VoteID == voteinfo.ID) + ) + vote = ( + db.query(models.TUVote) + .filter( + and_( + models.TUVote.UserID == request.user.ID, + models.TUVote.VoteID == voteinfo.ID, + ) + ) + .first() + ) if not request.user.has_credential(creds.TU_VOTE): context["error"] = "Only Trusted Users are allowed to vote." if voteinfo.User == request.user.Username: @@ -196,24 +220,36 @@ async def trusted_user_proposal(request: Request, proposal: int): @router.post("/tu/{proposal}") @handle_form_exceptions @requires_auth -async def trusted_user_proposal_post(request: Request, proposal: int, - decision: str = Form(...)): +async def trusted_user_proposal_post( + request: Request, proposal: int, decision: str = Form(...) +): if not request.user.has_credential(creds.TU_LIST_VOTES): return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER) context = await make_variable_context(request, "Trusted User") proposal = int(proposal) # Make sure it's an int. - voteinfo = db.query(models.TUVoteInfo).filter( - models.TUVoteInfo.ID == proposal).first() + voteinfo = ( + db.query(models.TUVoteInfo).filter(models.TUVoteInfo.ID == proposal).first() + ) if not voteinfo: raise HTTPException(status_code=HTTPStatus.NOT_FOUND) - voters = db.query(models.User).join(models.TUVote).filter( - models.TUVote.VoteID == voteinfo.ID) - vote = db.query(models.TUVote).filter( - and_(models.TUVote.UserID == request.user.ID, - models.TUVote.VoteID == voteinfo.ID)).first() + voters = ( + db.query(models.User) + .join(models.TUVote) + .filter(models.TUVote.VoteID == voteinfo.ID) + ) + vote = ( + db.query(models.TUVote) + .filter( + and_( + models.TUVote.UserID == request.user.ID, + models.TUVote.VoteID == voteinfo.ID, + ) + ) + .first() + ) status_code = HTTPStatus.OK if not request.user.has_credential(creds.TU_VOTE): @@ -227,16 +263,15 @@ async def trusted_user_proposal_post(request: Request, proposal: int, status_code = HTTPStatus.BAD_REQUEST if status_code != HTTPStatus.OK: - return render_proposal(request, context, proposal, - voteinfo, voters, vote, - status_code=status_code) + return render_proposal( + request, context, proposal, voteinfo, voters, vote, status_code=status_code + ) if decision in {"Yes", "No", "Abstain"}: # Increment whichever decision was given to us. setattr(voteinfo, decision, getattr(voteinfo, decision) + 1) else: - return Response("Invalid 'decision' value.", - status_code=HTTPStatus.BAD_REQUEST) + return Response("Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST) with db.begin(): vote = db.create(models.TUVote, User=request.user, VoteInfo=voteinfo) @@ -247,8 +282,9 @@ async def trusted_user_proposal_post(request: Request, proposal: int, @router.get("/addvote") @requires_auth -async def trusted_user_addvote(request: Request, user: str = str(), - type: str = "add_tu", agenda: str = str()): +async def trusted_user_addvote( + request: Request, user: str = str(), type: str = "add_tu", agenda: str = str() +): if not request.user.has_credential(creds.TU_ADD_VOTE): return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER) @@ -268,10 +304,12 @@ async def trusted_user_addvote(request: Request, user: str = str(), @router.post("/addvote") @handle_form_exceptions @requires_auth -async def trusted_user_addvote_post(request: Request, - user: str = Form(default=str()), - type: str = Form(default=str()), - agenda: str = Form(default=str())): +async def trusted_user_addvote_post( + request: Request, + user: str = Form(default=str()), + type: str = Form(default=str()), + agenda: str = Form(default=str()), +): if not request.user.has_credential(creds.TU_ADD_VOTE): return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER) @@ -283,26 +321,29 @@ async def trusted_user_addvote_post(request: Request, context["agenda"] = agenda def render_addvote(context, status_code): - """ Simplify render_template a bit for this test. """ + """Simplify render_template a bit for this test.""" return render_template(request, "addvote.html", context, status_code) # Alright, get some database records, if we can. if type != "bylaws": - user_record = db.query(models.User).filter( - models.User.Username == user).first() + user_record = db.query(models.User).filter(models.User.Username == user).first() if user_record is None: context["error"] = "Username does not exist." return render_addvote(context, HTTPStatus.NOT_FOUND) utcnow = time.utcnow() - voteinfo = db.query(models.TUVoteInfo).filter( - and_(models.TUVoteInfo.User == user, - models.TUVoteInfo.End > utcnow)).count() + voteinfo = ( + db.query(models.TUVoteInfo) + .filter( + and_(models.TUVoteInfo.User == user, models.TUVoteInfo.End > utcnow) + ) + .count() + ) if voteinfo: _ = l10n.get_translator_for_request(request) - context["error"] = _( - "%s already has proposal running for them.") % ( - html.escape(user),) + context["error"] = _("%s already has proposal running for them.") % ( + html.escape(user), + ) return render_addvote(context, HTTPStatus.BAD_REQUEST) if type not in ADDVOTE_SPECIFICS: @@ -323,16 +364,27 @@ async def trusted_user_addvote_post(request: Request, # Create a new TUVoteInfo (proposal)! with db.begin(): - active_tus = db.query(User).filter( - and_(User.Suspended == 0, - User.InactivityTS.isnot(None), - User.AccountTypeID.in_(types)) - ).count() - voteinfo = db.create(models.TUVoteInfo, User=user, - Agenda=html.escape(agenda), - Submitted=timestamp, End=(timestamp + duration), - Quorum=quorum, ActiveTUs=active_tus, - Submitter=request.user) + active_tus = ( + db.query(User) + .filter( + and_( + User.Suspended == 0, + User.InactivityTS.isnot(None), + User.AccountTypeID.in_(types), + ) + ) + .count() + ) + voteinfo = db.create( + models.TUVoteInfo, + User=user, + Agenda=html.escape(agenda), + Submitted=timestamp, + End=(timestamp + duration), + Quorum=quorum, + ActiveTUs=active_tus, + Submitter=request.user, + ) # Redirect to the new proposal. endpoint = f"/tu/{voteinfo.ID}" diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 3ea7e070..26677f80 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -1,5 +1,4 @@ import os - from collections import defaultdict from typing import Any, Callable, NewType, Union @@ -7,7 +6,6 @@ from fastapi.responses import HTMLResponse from sqlalchemy import and_, literal, orm import aurweb.config as config - from aurweb import db, defaults, models from aurweb.exceptions import RPCError from aurweb.filters import number_format @@ -23,8 +21,7 @@ TYPE_MAPPING = { "replaces": "Replaces", } -DataGenerator = NewType("DataGenerator", - Callable[[models.Package], dict[str, Any]]) +DataGenerator = NewType("DataGenerator", Callable[[models.Package], dict[str, Any]]) def documentation(): @@ -40,7 +37,7 @@ def documentation(): class RPC: - """ RPC API handler class. + """RPC API handler class. There are various pieces to RPC's process, and encapsulating them inside of a class means that external users do not abuse the @@ -66,17 +63,25 @@ class RPC: # A set of RPC types supported by this API. EXPOSED_TYPES = { - "info", "multiinfo", - "search", "msearch", - "suggest", "suggest-pkgbase" + "info", + "multiinfo", + "search", + "msearch", + "suggest", + "suggest-pkgbase", } # A mapping of type aliases. TYPE_ALIASES = {"info": "multiinfo"} EXPOSED_BYS = { - "name-desc", "name", "maintainer", - "depends", "makedepends", "optdepends", "checkdepends" + "name-desc", + "name", + "maintainer", + "depends", + "makedepends", + "optdepends", + "checkdepends", } # A mapping of by aliases. @@ -92,7 +97,7 @@ class RPC: "results": [], "resultcount": 0, "type": "error", - "error": message + "error": message, } def _verify_inputs(self, by: str = [], args: list[str] = []) -> None: @@ -116,7 +121,7 @@ class RPC: raise RPCError("No request type/data specified.") def _get_json_data(self, package: models.Package) -> dict[str, Any]: - """ Produce dictionary data of one Package that can be JSON-serialized. + """Produce dictionary data of one Package that can be JSON-serialized. :param package: Package instance :returns: JSON-serializable dictionary @@ -143,7 +148,7 @@ class RPC: "Popularity": pop, "OutOfDate": package.OutOfDateTS, "FirstSubmitted": package.SubmittedTS, - "LastModified": package.ModifiedTS + "LastModified": package.ModifiedTS, } def _get_info_json_data(self, package: models.Package) -> dict[str, Any]: @@ -151,10 +156,7 @@ class RPC: # All info results have _at least_ an empty list of # License and Keywords. - data.update({ - "License": [], - "Keywords": [] - }) + data.update({"License": [], "Keywords": []}) # If we actually got extra_info records, update data with # them for this particular package. @@ -163,9 +165,9 @@ class RPC: return data - def _assemble_json_data(self, packages: list[models.Package], - data_generator: DataGenerator) \ - -> list[dict[str, Any]]: + def _assemble_json_data( + self, packages: list[models.Package], data_generator: DataGenerator + ) -> list[dict[str, Any]]: """ Assemble JSON data out of a list of packages. @@ -175,7 +177,7 @@ class RPC: return [data_generator(pkg) for pkg in packages] def _entities(self, query: orm.Query) -> orm.Query: - """ Select specific RPC columns on `query`. """ + """Select specific RPC columns on `query`.""" return query.with_entities( models.Package.ID, models.Package.Name, @@ -192,16 +194,22 @@ class RPC: models.User.Username.label("Maintainer"), ).group_by(models.Package.ID) - def _handle_multiinfo_type(self, args: list[str] = [], **kwargs) \ - -> list[dict[str, Any]]: + def _handle_multiinfo_type( + self, args: list[str] = [], **kwargs + ) -> list[dict[str, Any]]: self._enforce_args(args) args = set(args) - packages = db.query(models.Package).join(models.PackageBase).join( - models.User, - models.User.ID == models.PackageBase.MaintainerUID, - isouter=True - ).filter(models.Package.Name.in_(args)) + packages = ( + db.query(models.Package) + .join(models.PackageBase) + .join( + models.User, + models.User.ID == models.PackageBase.MaintainerUID, + isouter=True, + ) + .filter(models.Package.Name.in_(args)) + ) max_results = config.getint("options", "max_rpc_results") packages = self._entities(packages).limit(max_results + 1) @@ -217,65 +225,75 @@ class RPC: subqueries = [ # PackageDependency - db.query( - models.PackageDependency - ).join(models.DependencyType).filter( - models.PackageDependency.PackageID.in_(ids) - ).with_entities( + db.query(models.PackageDependency) + .join(models.DependencyType) + .filter(models.PackageDependency.PackageID.in_(ids)) + .with_entities( models.PackageDependency.PackageID.label("ID"), models.DependencyType.Name.label("Type"), models.PackageDependency.DepName.label("Name"), - models.PackageDependency.DepCondition.label("Cond") - ).distinct().order_by("Name"), - + models.PackageDependency.DepCondition.label("Cond"), + ) + .distinct() + .order_by("Name"), # PackageRelation - db.query( - models.PackageRelation - ).join(models.RelationType).filter( - models.PackageRelation.PackageID.in_(ids) - ).with_entities( + db.query(models.PackageRelation) + .join(models.RelationType) + .filter(models.PackageRelation.PackageID.in_(ids)) + .with_entities( models.PackageRelation.PackageID.label("ID"), models.RelationType.Name.label("Type"), models.PackageRelation.RelName.label("Name"), - models.PackageRelation.RelCondition.label("Cond") - ).distinct().order_by("Name"), - + models.PackageRelation.RelCondition.label("Cond"), + ) + .distinct() + .order_by("Name"), # Groups - db.query(models.PackageGroup).join( + db.query(models.PackageGroup) + .join( models.Group, - and_(models.PackageGroup.GroupID == models.Group.ID, - models.PackageGroup.PackageID.in_(ids)) - ).with_entities( + and_( + models.PackageGroup.GroupID == models.Group.ID, + models.PackageGroup.PackageID.in_(ids), + ), + ) + .with_entities( models.PackageGroup.PackageID.label("ID"), literal("Groups").label("Type"), models.Group.Name.label("Name"), - literal(str()).label("Cond") - ).distinct().order_by("Name"), - + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), # Licenses - db.query(models.PackageLicense).join( - models.License, - models.PackageLicense.LicenseID == models.License.ID - ).filter( - models.PackageLicense.PackageID.in_(ids) - ).with_entities( + db.query(models.PackageLicense) + .join(models.License, models.PackageLicense.LicenseID == models.License.ID) + .filter(models.PackageLicense.PackageID.in_(ids)) + .with_entities( models.PackageLicense.PackageID.label("ID"), literal("License").label("Type"), models.License.Name.label("Name"), - literal(str()).label("Cond") - ).distinct().order_by("Name"), - + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), # Keywords - db.query(models.PackageKeyword).join( + db.query(models.PackageKeyword) + .join( models.Package, - and_(Package.PackageBaseID == PackageKeyword.PackageBaseID, - Package.ID.in_(ids)) - ).with_entities( + and_( + Package.PackageBaseID == PackageKeyword.PackageBaseID, + Package.ID.in_(ids), + ), + ) + .with_entities( models.Package.ID.label("ID"), literal("Keywords").label("Type"), models.PackageKeyword.Keyword.label("Name"), - literal(str()).label("Cond") - ).distinct().order_by("Name") + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), ] # Union all subqueries together. @@ -295,8 +313,9 @@ class RPC: return self._assemble_json_data(packages, self._get_info_json_data) - def _handle_search_type(self, by: str = defaults.RPC_SEARCH_BY, - args: list[str] = []) -> list[dict[str, Any]]: + def _handle_search_type( + self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = [] + ) -> list[dict[str, Any]]: # If `by` isn't maintainer and we don't have any args, raise an error. # In maintainer's case, return all orphans if there are no args, # so we need args to pass through to the handler without errors. @@ -318,50 +337,64 @@ class RPC: return self._assemble_json_data(results, self._get_json_data) - def _handle_msearch_type(self, args: list[str] = [], **kwargs)\ - -> list[dict[str, Any]]: + def _handle_msearch_type( + self, args: list[str] = [], **kwargs + ) -> list[dict[str, Any]]: return self._handle_search_type(by="m", args=args) - def _handle_suggest_type(self, args: list[str] = [], **kwargs)\ - -> list[str]: + def _handle_suggest_type(self, args: list[str] = [], **kwargs) -> list[str]: if not args: return [] arg = args[0] - packages = db.query(models.Package.Name).join( - models.PackageBase - ).filter( - and_(models.PackageBase.PackagerUID.isnot(None), - models.Package.Name.like(f"{arg}%")) - ).order_by(models.Package.Name.asc()).limit(20) + packages = ( + db.query(models.Package.Name) + .join(models.PackageBase) + .filter( + and_( + models.PackageBase.PackagerUID.isnot(None), + models.Package.Name.like(f"{arg}%"), + ) + ) + .order_by(models.Package.Name.asc()) + .limit(20) + ) return [pkg.Name for pkg in packages] - def _handle_suggest_pkgbase_type(self, args: list[str] = [], **kwargs)\ - -> list[str]: + def _handle_suggest_pkgbase_type(self, args: list[str] = [], **kwargs) -> list[str]: if not args: return [] arg = args[0] - packages = db.query(models.PackageBase.Name).filter( - and_(models.PackageBase.PackagerUID.isnot(None), - models.PackageBase.Name.like(f"{arg}%")) - ).order_by(models.PackageBase.Name.asc()).limit(20) + packages = ( + db.query(models.PackageBase.Name) + .filter( + and_( + models.PackageBase.PackagerUID.isnot(None), + models.PackageBase.Name.like(f"{arg}%"), + ) + ) + .order_by(models.PackageBase.Name.asc()) + .limit(20) + ) return [pkg.Name for pkg in packages] def _is_suggestion(self) -> bool: return self.type.startswith("suggest") - def _handle_callback(self, by: str, args: list[str])\ - -> Union[list[dict[str, Any]], list[str]]: + def _handle_callback( + self, by: str, args: list[str] + ) -> Union[list[dict[str, Any]], list[str]]: # Get a handle to our callback and trap an RPCError with # an empty list of results based on callback's execution. callback = getattr(self, f"_handle_{self.type.replace('-', '_')}_type") results = callback(by=by, args=args) return results - def handle(self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = [])\ - -> Union[list[dict[str, Any]], dict[str, Any]]: - """ Request entrypoint. A router should pass v, type and args + def handle( + self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = [] + ) -> Union[list[dict[str, Any]], dict[str, Any]]: + """Request entrypoint. A router should pass v, type and args to this function and expect an output dictionary to be returned. :param v: RPC version argument @@ -392,8 +425,5 @@ class RPC: return results # Return JSON output. - data.update({ - "resultcount": len(results), - "results": results - }) + data.update({"resultcount": len(results), "results": results}) return data diff --git a/aurweb/schema.py b/aurweb/schema.py index d2644541..b3b36195 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -6,7 +6,18 @@ usually be automatically generated. See `migrations/README` for details. """ -from sqlalchemy import CHAR, TIMESTAMP, Column, ForeignKey, Index, MetaData, String, Table, Text, text +from sqlalchemy import ( + CHAR, + TIMESTAMP, + Column, + ForeignKey, + Index, + MetaData, + String, + Table, + Text, + text, +) from sqlalchemy.dialects.mysql import BIGINT, DECIMAL, INTEGER, TINYINT from sqlalchemy.ext.compiler import compiles @@ -15,13 +26,13 @@ import aurweb.config db_backend = aurweb.config.get("database", "backend") -@compiles(TINYINT, 'sqlite') +@compiles(TINYINT, "sqlite") def compile_tinyint_sqlite(type_, compiler, **kw): # pragma: no cover """TINYINT is not supported on SQLite. Substitute it with INTEGER.""" - return 'INTEGER' + return "INTEGER" -@compiles(BIGINT, 'sqlite') +@compiles(BIGINT, "sqlite") def compile_bigint_sqlite(type_, compiler, **kw): # pragma: no cover """ For SQLite's AUTOINCREMENT to work on BIGINT columns, we need to map BIGINT @@ -29,429 +40,567 @@ def compile_bigint_sqlite(type_, compiler, **kw): # pragma: no cover See https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#allowing-autoincrement-behavior-sqlalchemy-types-other-than-integer-integer """ # noqa: E501 - return 'INTEGER' + return "INTEGER" metadata = MetaData() # Define the Account Types for the AUR. AccountTypes = Table( - 'AccountTypes', metadata, - Column('ID', TINYINT(unsigned=True), primary_key=True), - Column('AccountType', String(32), nullable=False, server_default=text("''")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci' + "AccountTypes", + metadata, + Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("AccountType", String(32), nullable=False, server_default=text("''")), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # User information for each user regardless of type. Users = Table( - 'Users', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('AccountTypeID', ForeignKey('AccountTypes.ID', ondelete="NO ACTION"), nullable=False, server_default=text("1")), - Column('Suspended', TINYINT(unsigned=True), nullable=False, server_default=text("0")), - Column('Username', String(32), nullable=False, unique=True), - Column('Email', String(254), nullable=False, unique=True), - Column('BackupEmail', String(254)), - Column('HideEmail', TINYINT(unsigned=True), nullable=False, server_default=text("0")), - Column('Passwd', String(255), nullable=False), - Column('Salt', CHAR(32), nullable=False, server_default=text("''")), - Column('ResetKey', CHAR(32), nullable=False, server_default=text("''")), - Column('RealName', String(64), nullable=False, server_default=text("''")), - Column('LangPreference', String(6), nullable=False, server_default=text("'en'")), - Column('Timezone', String(32), nullable=False, server_default=text("'UTC'")), - Column('Homepage', Text), - Column('IRCNick', String(32), nullable=False, server_default=text("''")), - Column('PGPKey', String(40)), - Column('LastLogin', BIGINT(unsigned=True), nullable=False, server_default=text("0")), - Column('LastLoginIPAddress', String(45)), - Column('LastSSHLogin', BIGINT(unsigned=True), nullable=False, server_default=text("0")), - Column('LastSSHLoginIPAddress', String(45)), - Column('InactivityTS', BIGINT(unsigned=True), nullable=False, server_default=text("0")), - Column('RegistrationTS', TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP")), - Column('CommentNotify', TINYINT(1), nullable=False, server_default=text("1")), - Column('UpdateNotify', TINYINT(1), nullable=False, server_default=text("0")), - Column('OwnershipNotify', TINYINT(1), nullable=False, server_default=text("1")), - Column('SSOAccountID', String(255), nullable=True, unique=True), - Index('UsersAccountTypeID', 'AccountTypeID'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "Users", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column( + "AccountTypeID", + ForeignKey("AccountTypes.ID", ondelete="NO ACTION"), + nullable=False, + server_default=text("1"), + ), + Column( + "Suspended", TINYINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column("Username", String(32), nullable=False, unique=True), + Column("Email", String(254), nullable=False, unique=True), + Column("BackupEmail", String(254)), + Column( + "HideEmail", TINYINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column("Passwd", String(255), nullable=False), + Column("Salt", CHAR(32), nullable=False, server_default=text("''")), + Column("ResetKey", CHAR(32), nullable=False, server_default=text("''")), + Column("RealName", String(64), nullable=False, server_default=text("''")), + Column("LangPreference", String(6), nullable=False, server_default=text("'en'")), + Column("Timezone", String(32), nullable=False, server_default=text("'UTC'")), + Column("Homepage", Text), + Column("IRCNick", String(32), nullable=False, server_default=text("''")), + Column("PGPKey", String(40)), + Column( + "LastLogin", BIGINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column("LastLoginIPAddress", String(45)), + Column( + "LastSSHLogin", BIGINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column("LastSSHLoginIPAddress", String(45)), + Column( + "InactivityTS", BIGINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column( + "RegistrationTS", + TIMESTAMP, + nullable=False, + server_default=text("CURRENT_TIMESTAMP"), + ), + Column("CommentNotify", TINYINT(1), nullable=False, server_default=text("1")), + Column("UpdateNotify", TINYINT(1), nullable=False, server_default=text("0")), + Column("OwnershipNotify", TINYINT(1), nullable=False, server_default=text("1")), + Column("SSOAccountID", String(255), nullable=True, unique=True), + Index("UsersAccountTypeID", "AccountTypeID"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # SSH public keys used for the aurweb SSH/Git interface. SSHPubKeys = Table( - 'SSHPubKeys', metadata, - Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Column('Fingerprint', String(44), primary_key=True), - Column('PubKey', String(4096), nullable=False), - mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', + "SSHPubKeys", + metadata, + Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Column("Fingerprint", String(44), primary_key=True), + Column("PubKey", String(4096), nullable=False), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_bin", ) # Track Users logging in/out of AUR web site. Sessions = Table( - 'Sessions', metadata, - Column('UsersID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Column('SessionID', CHAR(32), nullable=False, unique=True), - Column('LastUpdateTS', BIGINT(unsigned=True), nullable=False), - mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', + "Sessions", + metadata, + Column("UsersID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Column("SessionID", CHAR(32), nullable=False, unique=True), + Column("LastUpdateTS", BIGINT(unsigned=True), nullable=False), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_bin", ) # Information on package bases PackageBases = Table( - 'PackageBases', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Name', String(255), nullable=False, unique=True), - Column('NumVotes', INTEGER(unsigned=True), nullable=False, server_default=text("0")), - Column('Popularity', - DECIMAL(10, 6, unsigned=True) - if db_backend == "mysql" else String(17), - nullable=False, server_default=text("0")), - Column('OutOfDateTS', BIGINT(unsigned=True)), - Column('FlaggerComment', Text, nullable=False), - Column('SubmittedTS', BIGINT(unsigned=True), nullable=False), - Column('ModifiedTS', BIGINT(unsigned=True), nullable=False), - Column('FlaggerUID', ForeignKey('Users.ID', ondelete='SET NULL')), # who flagged the package out-of-date? + "PackageBases", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Name", String(255), nullable=False, unique=True), + Column( + "NumVotes", INTEGER(unsigned=True), nullable=False, server_default=text("0") + ), + Column( + "Popularity", + DECIMAL(10, 6, unsigned=True) if db_backend == "mysql" else String(17), + nullable=False, + server_default=text("0"), + ), + Column("OutOfDateTS", BIGINT(unsigned=True)), + Column("FlaggerComment", Text, nullable=False), + Column("SubmittedTS", BIGINT(unsigned=True), nullable=False), + Column("ModifiedTS", BIGINT(unsigned=True), nullable=False), + Column( + "FlaggerUID", ForeignKey("Users.ID", ondelete="SET NULL") + ), # who flagged the package out-of-date? # deleting a user will cause packages to be orphaned, not deleted - Column('SubmitterUID', ForeignKey('Users.ID', ondelete='SET NULL')), # who submitted it? - Column('MaintainerUID', ForeignKey('Users.ID', ondelete='SET NULL')), # User - Column('PackagerUID', ForeignKey('Users.ID', ondelete='SET NULL')), # Last packager - Index('BasesMaintainerUID', 'MaintainerUID'), - Index('BasesNumVotes', 'NumVotes'), - Index('BasesPackagerUID', 'PackagerUID'), - Index('BasesSubmitterUID', 'SubmitterUID'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + Column( + "SubmitterUID", ForeignKey("Users.ID", ondelete="SET NULL") + ), # who submitted it? + Column("MaintainerUID", ForeignKey("Users.ID", ondelete="SET NULL")), # User + Column("PackagerUID", ForeignKey("Users.ID", ondelete="SET NULL")), # Last packager + Index("BasesMaintainerUID", "MaintainerUID"), + Index("BasesNumVotes", "NumVotes"), + Index("BasesPackagerUID", "PackagerUID"), + Index("BasesSubmitterUID", "SubmitterUID"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Keywords of package bases PackageKeywords = Table( - 'PackageKeywords', metadata, - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='CASCADE'), primary_key=True, nullable=True), - Column('Keyword', String(255), primary_key=True, nullable=False, server_default=text("''")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageKeywords", + metadata, + Column( + "PackageBaseID", + ForeignKey("PackageBases.ID", ondelete="CASCADE"), + primary_key=True, + nullable=True, + ), + Column( + "Keyword", + String(255), + primary_key=True, + nullable=False, + server_default=text("''"), + ), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Information about the actual packages Packages = Table( - 'Packages', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='CASCADE'), nullable=False), - Column('Name', String(255), nullable=False, unique=True), - Column('Version', String(255), nullable=False, server_default=text("''")), - Column('Description', String(255)), - Column('URL', String(8000)), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "Packages", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column( + "PackageBaseID", + ForeignKey("PackageBases.ID", ondelete="CASCADE"), + nullable=False, + ), + Column("Name", String(255), nullable=False, unique=True), + Column("Version", String(255), nullable=False, server_default=text("''")), + Column("Description", String(255)), + Column("URL", String(8000)), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Information about licenses Licenses = Table( - 'Licenses', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Name', String(255), nullable=False, unique=True), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "Licenses", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Name", String(255), nullable=False, unique=True), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Information about package-license-relations PackageLicenses = Table( - 'PackageLicenses', metadata, - Column('PackageID', ForeignKey('Packages.ID', ondelete='CASCADE'), primary_key=True, nullable=True), - Column('LicenseID', ForeignKey('Licenses.ID', ondelete='CASCADE'), primary_key=True, nullable=True), - mysql_engine='InnoDB', + "PackageLicenses", + metadata, + Column( + "PackageID", + ForeignKey("Packages.ID", ondelete="CASCADE"), + primary_key=True, + nullable=True, + ), + Column( + "LicenseID", + ForeignKey("Licenses.ID", ondelete="CASCADE"), + primary_key=True, + nullable=True, + ), + mysql_engine="InnoDB", ) # Information about groups Groups = Table( - 'Groups', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Name', String(255), nullable=False, unique=True), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "Groups", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Name", String(255), nullable=False, unique=True), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Information about package-group-relations PackageGroups = Table( - 'PackageGroups', metadata, - Column('PackageID', ForeignKey('Packages.ID', ondelete='CASCADE'), primary_key=True, nullable=True), - Column('GroupID', ForeignKey('Groups.ID', ondelete='CASCADE'), primary_key=True, nullable=True), - mysql_engine='InnoDB', + "PackageGroups", + metadata, + Column( + "PackageID", + ForeignKey("Packages.ID", ondelete="CASCADE"), + primary_key=True, + nullable=True, + ), + Column( + "GroupID", + ForeignKey("Groups.ID", ondelete="CASCADE"), + primary_key=True, + nullable=True, + ), + mysql_engine="InnoDB", ) # Define the package dependency types DependencyTypes = Table( - 'DependencyTypes', metadata, - Column('ID', TINYINT(unsigned=True), primary_key=True), - Column('Name', String(32), nullable=False, server_default=text("''")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "DependencyTypes", + metadata, + Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("Name", String(32), nullable=False, server_default=text("''")), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Track which dependencies a package has PackageDepends = Table( - 'PackageDepends', metadata, - Column('PackageID', ForeignKey('Packages.ID', ondelete='CASCADE'), nullable=False), - Column('DepTypeID', ForeignKey('DependencyTypes.ID', ondelete="NO ACTION"), nullable=False), - Column('DepName', String(255), nullable=False), - Column('DepDesc', String(255)), - Column('DepCondition', String(255)), - Column('DepArch', String(255)), - Index('DependsDepName', 'DepName'), - Index('DependsPackageID', 'PackageID'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageDepends", + metadata, + Column("PackageID", ForeignKey("Packages.ID", ondelete="CASCADE"), nullable=False), + Column( + "DepTypeID", + ForeignKey("DependencyTypes.ID", ondelete="NO ACTION"), + nullable=False, + ), + Column("DepName", String(255), nullable=False), + Column("DepDesc", String(255)), + Column("DepCondition", String(255)), + Column("DepArch", String(255)), + Index("DependsDepName", "DepName"), + Index("DependsPackageID", "PackageID"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Define the package relation types RelationTypes = Table( - 'RelationTypes', metadata, - Column('ID', TINYINT(unsigned=True), primary_key=True), - Column('Name', String(32), nullable=False, server_default=text("''")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "RelationTypes", + metadata, + Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("Name", String(32), nullable=False, server_default=text("''")), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Track which conflicts, provides and replaces a package has PackageRelations = Table( - 'PackageRelations', metadata, - Column('PackageID', ForeignKey('Packages.ID', ondelete='CASCADE'), nullable=False), - Column('RelTypeID', ForeignKey('RelationTypes.ID', ondelete="NO ACTION"), nullable=False), - Column('RelName', String(255), nullable=False), - Column('RelCondition', String(255)), - Column('RelArch', String(255)), - Index('RelationsPackageID', 'PackageID'), - Index('RelationsRelName', 'RelName'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageRelations", + metadata, + Column("PackageID", ForeignKey("Packages.ID", ondelete="CASCADE"), nullable=False), + Column( + "RelTypeID", + ForeignKey("RelationTypes.ID", ondelete="NO ACTION"), + nullable=False, + ), + Column("RelName", String(255), nullable=False), + Column("RelCondition", String(255)), + Column("RelArch", String(255)), + Index("RelationsPackageID", "PackageID"), + Index("RelationsRelName", "RelName"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Track which sources a package has PackageSources = Table( - 'PackageSources', metadata, - Column('PackageID', ForeignKey('Packages.ID', ondelete='CASCADE'), nullable=False), - Column('Source', String(8000), nullable=False, server_default=text("'/dev/null'")), - Column('SourceArch', String(255)), - Index('SourcesPackageID', 'PackageID'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageSources", + metadata, + Column("PackageID", ForeignKey("Packages.ID", ondelete="CASCADE"), nullable=False), + Column("Source", String(8000), nullable=False, server_default=text("'/dev/null'")), + Column("SourceArch", String(255)), + Index("SourcesPackageID", "PackageID"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Track votes for packages PackageVotes = Table( - 'PackageVotes', metadata, - Column('UsersID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='CASCADE'), nullable=False), - Column('VoteTS', BIGINT(unsigned=True), nullable=False), - Index('VoteUsersIDPackageID', 'UsersID', 'PackageBaseID', unique=True), - Index('VotesPackageBaseID', 'PackageBaseID'), - Index('VotesUsersID', 'UsersID'), - mysql_engine='InnoDB', + "PackageVotes", + metadata, + Column("UsersID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Column( + "PackageBaseID", + ForeignKey("PackageBases.ID", ondelete="CASCADE"), + nullable=False, + ), + Column("VoteTS", BIGINT(unsigned=True), nullable=False), + Index("VoteUsersIDPackageID", "UsersID", "PackageBaseID", unique=True), + Index("VotesPackageBaseID", "PackageBaseID"), + Index("VotesUsersID", "UsersID"), + mysql_engine="InnoDB", ) # Record comments for packages PackageComments = Table( - 'PackageComments', metadata, - Column('ID', BIGINT(unsigned=True), primary_key=True), - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='CASCADE'), nullable=False), - Column('UsersID', ForeignKey('Users.ID', ondelete='SET NULL')), - Column('Comments', Text, nullable=False), - Column('RenderedComment', Text, nullable=False), - Column('CommentTS', BIGINT(unsigned=True), nullable=False, server_default=text("0")), - Column('EditedTS', BIGINT(unsigned=True)), - Column('EditedUsersID', ForeignKey('Users.ID', ondelete='SET NULL')), - Column('DelTS', BIGINT(unsigned=True)), - Column('DelUsersID', ForeignKey('Users.ID', ondelete='CASCADE')), - Column('PinnedTS', BIGINT(unsigned=True), nullable=False, server_default=text("0")), - Index('CommentsPackageBaseID', 'PackageBaseID'), - Index('CommentsUsersID', 'UsersID'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageComments", + metadata, + Column("ID", BIGINT(unsigned=True), primary_key=True), + Column( + "PackageBaseID", + ForeignKey("PackageBases.ID", ondelete="CASCADE"), + nullable=False, + ), + Column("UsersID", ForeignKey("Users.ID", ondelete="SET NULL")), + Column("Comments", Text, nullable=False), + Column("RenderedComment", Text, nullable=False), + Column( + "CommentTS", BIGINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column("EditedTS", BIGINT(unsigned=True)), + Column("EditedUsersID", ForeignKey("Users.ID", ondelete="SET NULL")), + Column("DelTS", BIGINT(unsigned=True)), + Column("DelUsersID", ForeignKey("Users.ID", ondelete="CASCADE")), + Column("PinnedTS", BIGINT(unsigned=True), nullable=False, server_default=text("0")), + Index("CommentsPackageBaseID", "PackageBaseID"), + Index("CommentsUsersID", "UsersID"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Package base co-maintainers PackageComaintainers = Table( - 'PackageComaintainers', metadata, - Column('UsersID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='CASCADE'), nullable=False), - Column('Priority', INTEGER(unsigned=True), nullable=False), - Index('ComaintainersPackageBaseID', 'PackageBaseID'), - Index('ComaintainersUsersID', 'UsersID'), - mysql_engine='InnoDB', + "PackageComaintainers", + metadata, + Column("UsersID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Column( + "PackageBaseID", + ForeignKey("PackageBases.ID", ondelete="CASCADE"), + nullable=False, + ), + Column("Priority", INTEGER(unsigned=True), nullable=False), + Index("ComaintainersPackageBaseID", "PackageBaseID"), + Index("ComaintainersUsersID", "UsersID"), + mysql_engine="InnoDB", ) # Package base notifications PackageNotifications = Table( - 'PackageNotifications', metadata, - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='CASCADE'), nullable=False), - Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Index('NotifyUserIDPkgID', 'UserID', 'PackageBaseID', unique=True), - mysql_engine='InnoDB', + "PackageNotifications", + metadata, + Column( + "PackageBaseID", + ForeignKey("PackageBases.ID", ondelete="CASCADE"), + nullable=False, + ), + Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Index("NotifyUserIDPkgID", "UserID", "PackageBaseID", unique=True), + mysql_engine="InnoDB", ) # Package name blacklist PackageBlacklist = Table( - 'PackageBlacklist', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Name', String(64), nullable=False, unique=True), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageBlacklist", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Name", String(64), nullable=False, unique=True), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Providers in the official repositories OfficialProviders = Table( - 'OfficialProviders', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Name', String(64), nullable=False), - Column('Repo', String(64), nullable=False), - Column('Provides', String(64), nullable=False), - Index('ProviderNameProvides', 'Name', 'Provides', unique=True), - mysql_engine='InnoDB', mysql_charset='utf8mb4', mysql_collate='utf8mb4_bin', + "OfficialProviders", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Name", String(64), nullable=False), + Column("Repo", String(64), nullable=False), + Column("Provides", String(64), nullable=False), + Index("ProviderNameProvides", "Name", "Provides", unique=True), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_bin", ) # Define package request types RequestTypes = Table( - 'RequestTypes', metadata, - Column('ID', TINYINT(unsigned=True), primary_key=True), - Column('Name', String(32), nullable=False, server_default=text("''")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "RequestTypes", + metadata, + Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("Name", String(32), nullable=False, server_default=text("''")), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Package requests PackageRequests = Table( - 'PackageRequests', metadata, - Column('ID', BIGINT(unsigned=True), primary_key=True), - Column('ReqTypeID', ForeignKey('RequestTypes.ID', ondelete="NO ACTION"), nullable=False), - Column('PackageBaseID', ForeignKey('PackageBases.ID', ondelete='SET NULL')), - Column('PackageBaseName', String(255), nullable=False), - Column('MergeBaseName', String(255)), - Column('UsersID', ForeignKey('Users.ID', ondelete='SET NULL')), - Column('Comments', Text, nullable=False), - Column('ClosureComment', Text, nullable=False), - Column('RequestTS', BIGINT(unsigned=True), nullable=False, server_default=text("0")), - Column('ClosedTS', BIGINT(unsigned=True)), - Column('ClosedUID', ForeignKey('Users.ID', ondelete='SET NULL')), - Column('Status', TINYINT(unsigned=True), nullable=False, server_default=text("0")), - Index('RequestsPackageBaseID', 'PackageBaseID'), - Index('RequestsUsersID', 'UsersID'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "PackageRequests", + metadata, + Column("ID", BIGINT(unsigned=True), primary_key=True), + Column( + "ReqTypeID", ForeignKey("RequestTypes.ID", ondelete="NO ACTION"), nullable=False + ), + Column("PackageBaseID", ForeignKey("PackageBases.ID", ondelete="SET NULL")), + Column("PackageBaseName", String(255), nullable=False), + Column("MergeBaseName", String(255)), + Column("UsersID", ForeignKey("Users.ID", ondelete="SET NULL")), + Column("Comments", Text, nullable=False), + Column("ClosureComment", Text, nullable=False), + Column( + "RequestTS", BIGINT(unsigned=True), nullable=False, server_default=text("0") + ), + Column("ClosedTS", BIGINT(unsigned=True)), + Column("ClosedUID", ForeignKey("Users.ID", ondelete="SET NULL")), + Column("Status", TINYINT(unsigned=True), nullable=False, server_default=text("0")), + Index("RequestsPackageBaseID", "PackageBaseID"), + Index("RequestsUsersID", "UsersID"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Vote information TU_VoteInfo = Table( - 'TU_VoteInfo', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Agenda', Text, nullable=False), - Column('User', String(32), nullable=False), - Column('Submitted', BIGINT(unsigned=True), nullable=False), - Column('End', BIGINT(unsigned=True), nullable=False), - Column('Quorum', - DECIMAL(2, 2, unsigned=True) - if db_backend == "mysql" else String(5), - nullable=False), - Column('SubmitterID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Column('Yes', INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), - Column('No', INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), - Column('Abstain', INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), - Column('ActiveTUs', INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "TU_VoteInfo", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Agenda", Text, nullable=False), + Column("User", String(32), nullable=False), + Column("Submitted", BIGINT(unsigned=True), nullable=False), + Column("End", BIGINT(unsigned=True), nullable=False), + Column( + "Quorum", + DECIMAL(2, 2, unsigned=True) if db_backend == "mysql" else String(5), + nullable=False, + ), + Column("SubmitterID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Column("Yes", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), + Column("No", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), + Column( + "Abstain", INTEGER(unsigned=True), nullable=False, server_default=text("'0'") + ), + Column( + "ActiveTUs", INTEGER(unsigned=True), nullable=False, server_default=text("'0'") + ), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Individual vote records TU_Votes = Table( - 'TU_Votes', metadata, - Column('VoteID', ForeignKey('TU_VoteInfo.ID', ondelete='CASCADE'), nullable=False), - Column('UserID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - mysql_engine='InnoDB', + "TU_Votes", + metadata, + Column("VoteID", ForeignKey("TU_VoteInfo.ID", ondelete="CASCADE"), nullable=False), + Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + mysql_engine="InnoDB", ) # Malicious user banning Bans = Table( - 'Bans', metadata, - Column('IPAddress', String(45), primary_key=True), - Column('BanTS', TIMESTAMP, nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "Bans", + metadata, + Column("IPAddress", String(45), primary_key=True), + Column("BanTS", TIMESTAMP, nullable=False), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Terms and Conditions Terms = Table( - 'Terms', metadata, - Column('ID', INTEGER(unsigned=True), primary_key=True), - Column('Description', String(255), nullable=False), - Column('URL', String(8000), nullable=False), - Column('Revision', INTEGER(unsigned=True), nullable=False, server_default=text("1")), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "Terms", + metadata, + Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("Description", String(255), nullable=False), + Column("URL", String(8000), nullable=False), + Column( + "Revision", INTEGER(unsigned=True), nullable=False, server_default=text("1") + ), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) # Terms and Conditions accepted by users AcceptedTerms = Table( - 'AcceptedTerms', metadata, - Column('UsersID', ForeignKey('Users.ID', ondelete='CASCADE'), nullable=False), - Column('TermsID', ForeignKey('Terms.ID', ondelete='CASCADE'), nullable=False), - Column('Revision', INTEGER(unsigned=True), nullable=False, server_default=text("0")), - mysql_engine='InnoDB', + "AcceptedTerms", + metadata, + Column("UsersID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), + Column("TermsID", ForeignKey("Terms.ID", ondelete="CASCADE"), nullable=False), + Column( + "Revision", INTEGER(unsigned=True), nullable=False, server_default=text("0") + ), + mysql_engine="InnoDB", ) # Rate limits for API ApiRateLimit = Table( - 'ApiRateLimit', metadata, - Column('IP', String(45), primary_key=True, unique=True, default=str()), - Column('Requests', INTEGER(11), nullable=False), - Column('WindowStart', BIGINT(20), nullable=False), - Index('ApiRateLimitWindowStart', 'WindowStart'), - mysql_engine='InnoDB', - mysql_charset='utf8mb4', - mysql_collate='utf8mb4_general_ci', + "ApiRateLimit", + metadata, + Column("IP", String(45), primary_key=True, unique=True, default=str()), + Column("Requests", INTEGER(11), nullable=False), + Column("WindowStart", BIGINT(20), nullable=False), + Index("ApiRateLimitWindowStart", "WindowStart"), + mysql_engine="InnoDB", + mysql_charset="utf8mb4", + mysql_collate="utf8mb4_general_ci", ) diff --git a/aurweb/scripts/adduser.py b/aurweb/scripts/adduser.py index 4cc059d1..cf933c71 100644 --- a/aurweb/scripts/adduser.py +++ b/aurweb/scripts/adduser.py @@ -11,7 +11,6 @@ import sys import traceback import aurweb.models.account_type as at - from aurweb import db from aurweb.models.account_type import AccountType from aurweb.models.ssh_pub_key import SSHPubKey, get_fingerprint @@ -30,8 +29,9 @@ def parse_args(): parser.add_argument("--ssh-pubkey", help="SSH PubKey") choices = at.ACCOUNT_TYPE_NAME.values() - parser.add_argument("-t", "--type", help="Account Type", - choices=choices, default=at.USER) + parser.add_argument( + "-t", "--type", help="Account Type", choices=choices, default=at.USER + ) return parser.parse_args() @@ -40,25 +40,29 @@ def main(): args = parse_args() db.get_engine() - type = db.query(AccountType, - AccountType.AccountType == args.type).first() + type = db.query(AccountType, AccountType.AccountType == args.type).first() with db.begin(): - user = db.create(User, Username=args.username, - Email=args.email, Passwd=args.password, - RealName=args.realname, IRCNick=args.ircnick, - PGPKey=args.pgp_key, AccountType=type) + user = db.create( + User, + Username=args.username, + Email=args.email, + Passwd=args.password, + RealName=args.realname, + IRCNick=args.ircnick, + PGPKey=args.pgp_key, + AccountType=type, + ) if args.ssh_pubkey: pubkey = args.ssh_pubkey.strip() # Remove host from the pubkey if it's there. - pubkey = ' '.join(pubkey.split(' ')[:2]) + pubkey = " ".join(pubkey.split(" ")[:2]) with db.begin(): - db.create(SSHPubKey, - User=user, - PubKey=pubkey, - Fingerprint=get_fingerprint(pubkey)) + db.create( + SSHPubKey, User=user, PubKey=pubkey, Fingerprint=get_fingerprint(pubkey) + ) print(user.json()) return 0 diff --git a/aurweb/scripts/aurblup.py b/aurweb/scripts/aurblup.py index 9c9059ec..340d1ccd 100755 --- a/aurweb/scripts/aurblup.py +++ b/aurweb/scripts/aurblup.py @@ -3,11 +3,9 @@ import re import pyalpm - from sqlalchemy import and_ import aurweb.config - from aurweb import db, util from aurweb.models import OfficialProvider @@ -18,8 +16,8 @@ def _main(force: bool = False): repomap = dict() db_path = aurweb.config.get("aurblup", "db-path") - sync_dbs = aurweb.config.get('aurblup', 'sync-dbs').split(' ') - server = aurweb.config.get('aurblup', 'server') + sync_dbs = aurweb.config.get("aurblup", "sync-dbs").split(" ") + server = aurweb.config.get("aurblup", "server") h = pyalpm.Handle("/", db_path) for sync_db in sync_dbs: @@ -35,28 +33,35 @@ def _main(force: bool = False): providers.add((pkg.name, pkg.name)) repomap[(pkg.name, pkg.name)] = repo.name for provision in pkg.provides: - provisionname = re.sub(r'(<|=|>).*', '', provision) + provisionname = re.sub(r"(<|=|>).*", "", provision) providers.add((pkg.name, provisionname)) repomap[(pkg.name, provisionname)] = repo.name with db.begin(): old_providers = set( - db.query(OfficialProvider).with_entities( + db.query(OfficialProvider) + .with_entities( OfficialProvider.Name.label("Name"), - OfficialProvider.Provides.label("Provides") - ).distinct().order_by("Name").all() + OfficialProvider.Provides.label("Provides"), + ) + .distinct() + .order_by("Name") + .all() ) for name, provides in old_providers.difference(providers): - db.delete_all(db.query(OfficialProvider).filter( - and_(OfficialProvider.Name == name, - OfficialProvider.Provides == provides) - )) + db.delete_all( + db.query(OfficialProvider).filter( + and_( + OfficialProvider.Name == name, + OfficialProvider.Provides == provides, + ) + ) + ) for name, provides in providers.difference(old_providers): repo = repomap.get((name, provides)) - db.create(OfficialProvider, Name=name, - Repo=repo, Provides=provides) + db.create(OfficialProvider, Name=name, Repo=repo, Provides=provides) def main(force: bool = False): @@ -64,5 +69,5 @@ def main(force: bool = False): _main(force) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/config.py b/aurweb/scripts/config.py index e7c91dd1..1d90f525 100644 --- a/aurweb/scripts/config.py +++ b/aurweb/scripts/config.py @@ -50,12 +50,12 @@ def parse_args(): actions = ["get", "set", "unset"] parser = argparse.ArgumentParser( description="aurweb configuration tool", - formatter_class=lambda prog: fmt_cls(prog=prog, max_help_position=80)) + formatter_class=lambda prog: fmt_cls(prog=prog, max_help_position=80), + ) parser.add_argument("action", choices=actions, help="script action") parser.add_argument("section", help="config section") parser.add_argument("option", help="config option") - parser.add_argument("value", nargs="?", default=0, - help="config option value") + parser.add_argument("value", nargs="?", default=0, help="config option value") return parser.parse_args() diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 888e346c..7ca171ab 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -25,16 +25,13 @@ import os import shutil import sys import tempfile - from collections import defaultdict from typing import Any import orjson - from sqlalchemy import literal, orm import aurweb.config - from aurweb import db, filters, logging, models, util from aurweb.benchmark import Benchmark from aurweb.models import Package, PackageBase, User @@ -90,65 +87,68 @@ def get_extended_dict(query: orm.Query): def get_extended_fields(): subqueries = [ # PackageDependency - db.query( - models.PackageDependency - ).join(models.DependencyType).with_entities( + db.query(models.PackageDependency) + .join(models.DependencyType) + .with_entities( models.PackageDependency.PackageID.label("ID"), models.DependencyType.Name.label("Type"), models.PackageDependency.DepName.label("Name"), - models.PackageDependency.DepCondition.label("Cond") - ).distinct().order_by("Name"), - + models.PackageDependency.DepCondition.label("Cond"), + ) + .distinct() + .order_by("Name"), # PackageRelation - db.query( - models.PackageRelation - ).join(models.RelationType).with_entities( + db.query(models.PackageRelation) + .join(models.RelationType) + .with_entities( models.PackageRelation.PackageID.label("ID"), models.RelationType.Name.label("Type"), models.PackageRelation.RelName.label("Name"), - models.PackageRelation.RelCondition.label("Cond") - ).distinct().order_by("Name"), - + models.PackageRelation.RelCondition.label("Cond"), + ) + .distinct() + .order_by("Name"), # Groups - db.query(models.PackageGroup).join( - models.Group, - models.PackageGroup.GroupID == models.Group.ID - ).with_entities( + db.query(models.PackageGroup) + .join(models.Group, models.PackageGroup.GroupID == models.Group.ID) + .with_entities( models.PackageGroup.PackageID.label("ID"), literal("Groups").label("Type"), models.Group.Name.label("Name"), - literal(str()).label("Cond") - ).distinct().order_by("Name"), - + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), # Licenses - db.query(models.PackageLicense).join( - models.License, - models.PackageLicense.LicenseID == models.License.ID - ).with_entities( + db.query(models.PackageLicense) + .join(models.License, models.PackageLicense.LicenseID == models.License.ID) + .with_entities( models.PackageLicense.PackageID.label("ID"), literal("License").label("Type"), models.License.Name.label("Name"), - literal(str()).label("Cond") - ).distinct().order_by("Name"), - + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), # Keywords - db.query(models.PackageKeyword).join( - models.Package, - Package.PackageBaseID == models.PackageKeyword.PackageBaseID - ).with_entities( + db.query(models.PackageKeyword) + .join( + models.Package, Package.PackageBaseID == models.PackageKeyword.PackageBaseID + ) + .with_entities( models.Package.ID.label("ID"), literal("Keywords").label("Type"), models.PackageKeyword.Keyword.label("Name"), - literal(str()).label("Cond") - ).distinct().order_by("Name") + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), ] query = subqueries[0].union_all(*subqueries[1:]) return get_extended_dict(query) -EXTENDED_FIELD_HANDLERS = { - "--extended": get_extended_fields -} +EXTENDED_FIELD_HANDLERS = {"--extended": get_extended_fields} def as_dict(package: Package) -> dict[str, Any]: @@ -181,37 +181,38 @@ def _main(): archivedir = aurweb.config.get("mkpkglists", "archivedir") os.makedirs(archivedir, exist_ok=True) - PACKAGES = aurweb.config.get('mkpkglists', 'packagesfile') - META = aurweb.config.get('mkpkglists', 'packagesmetafile') - META_EXT = aurweb.config.get('mkpkglists', 'packagesmetaextfile') - PKGBASE = aurweb.config.get('mkpkglists', 'pkgbasefile') - USERS = aurweb.config.get('mkpkglists', 'userfile') + PACKAGES = aurweb.config.get("mkpkglists", "packagesfile") + META = aurweb.config.get("mkpkglists", "packagesmetafile") + META_EXT = aurweb.config.get("mkpkglists", "packagesmetaextfile") + PKGBASE = aurweb.config.get("mkpkglists", "pkgbasefile") + USERS = aurweb.config.get("mkpkglists", "userfile") bench = Benchmark() logger.info("Started re-creating archives, wait a while...") - query = db.query(Package).join( - PackageBase, - PackageBase.ID == Package.PackageBaseID - ).join( - User, - PackageBase.MaintainerUID == User.ID, - isouter=True - ).filter(PackageBase.PackagerUID.isnot(None)).with_entities( - Package.ID, - Package.Name, - PackageBase.ID.label("PackageBaseID"), - PackageBase.Name.label("PackageBase"), - Package.Version, - Package.Description, - Package.URL, - PackageBase.NumVotes, - PackageBase.Popularity, - PackageBase.OutOfDateTS.label("OutOfDate"), - User.Username.label("Maintainer"), - PackageBase.SubmittedTS.label("FirstSubmitted"), - PackageBase.ModifiedTS.label("LastModified") - ).distinct().order_by("Name") + query = ( + db.query(Package) + .join(PackageBase, PackageBase.ID == Package.PackageBaseID) + .join(User, PackageBase.MaintainerUID == User.ID, isouter=True) + .filter(PackageBase.PackagerUID.isnot(None)) + .with_entities( + Package.ID, + Package.Name, + PackageBase.ID.label("PackageBaseID"), + PackageBase.Name.label("PackageBase"), + Package.Version, + Package.Description, + Package.URL, + PackageBase.NumVotes, + PackageBase.Popularity, + PackageBase.OutOfDateTS.label("OutOfDate"), + User.Username.label("Maintainer"), + PackageBase.SubmittedTS.label("FirstSubmitted"), + PackageBase.ModifiedTS.label("LastModified"), + ) + .distinct() + .order_by("Name") + ) # Produce packages-meta-v1.json.gz output = list() @@ -252,7 +253,7 @@ def _main(): # We stream out package json objects line per line, so # we also need to include the ',' character at the end # of package lines (excluding the last package). - suffix = b",\n" if i < n else b'\n' + suffix = b",\n" if i < n else b"\n" # Write out to packagesmetafile output.append(item) @@ -273,8 +274,7 @@ def _main(): util.apply_all(gzips.values(), lambda gz: gz.close()) # Produce pkgbase.gz - query = db.query(PackageBase.Name).filter( - PackageBase.PackagerUID.isnot(None)).all() + query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all() tmp_pkgbase = os.path.join(tmpdir, os.path.basename(PKGBASE)) with gzip.open(tmp_pkgbase, "wt") as f: f.writelines([f"{base.Name}\n" for i, base in enumerate(query)]) @@ -317,5 +317,5 @@ def main(): _main() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/notify.py b/aurweb/scripts/notify.py index 6afa65ae..f19438bb 100755 --- a/aurweb/scripts/notify.py +++ b/aurweb/scripts/notify.py @@ -13,7 +13,6 @@ import aurweb.config import aurweb.db import aurweb.filters import aurweb.l10n - from aurweb import db, logging from aurweb.models import PackageBase, User from aurweb.models.package_comaintainer import PackageComaintainer @@ -25,15 +24,15 @@ from aurweb.models.tu_vote import TUVote logger = logging.get_logger(__name__) -aur_location = aurweb.config.get('options', 'aur_location') +aur_location = aurweb.config.get("options", "aur_location") def headers_msgid(thread_id): - return {'Message-ID': thread_id} + return {"Message-ID": thread_id} def headers_reply(thread_id): - return {'In-Reply-To': thread_id, 'References': thread_id} + return {"In-Reply-To": thread_id, "References": thread_id} class Notification: @@ -47,67 +46,64 @@ class Notification: return [] def get_body_fmt(self, lang): - body = '' + body = "" for line in self.get_body(lang).splitlines(): - if line == '--': - body += '--\n' + if line == "--": + body += "--\n" continue - body += textwrap.fill(line, break_long_words=False) + '\n' + body += textwrap.fill(line, break_long_words=False) + "\n" for i, ref in enumerate(self.get_refs()): - body += '\n' + '[%d] %s' % (i + 1, ref) + body += "\n" + "[%d] %s" % (i + 1, ref) return body.rstrip() def _send(self) -> None: - sendmail = aurweb.config.get('notifications', 'sendmail') - sender = aurweb.config.get('notifications', 'sender') - reply_to = aurweb.config.get('notifications', 'reply-to') + sendmail = aurweb.config.get("notifications", "sendmail") + sender = aurweb.config.get("notifications", "sender") + reply_to = aurweb.config.get("notifications", "reply-to") reason = self.__class__.__name__ - if reason.endswith('Notification'): - reason = reason[:-len('Notification')] + if reason.endswith("Notification"): + reason = reason[: -len("Notification")] for recipient in self.get_recipients(): to, lang = recipient - msg = email.mime.text.MIMEText(self.get_body_fmt(lang), - 'plain', 'utf-8') - msg['Subject'] = self.get_subject(lang) - msg['From'] = sender - msg['Reply-to'] = reply_to - msg['To'] = to + msg = email.mime.text.MIMEText(self.get_body_fmt(lang), "plain", "utf-8") + msg["Subject"] = self.get_subject(lang) + msg["From"] = sender + msg["Reply-to"] = reply_to + msg["To"] = to if self.get_cc(): - msg['Cc'] = str.join(', ', self.get_cc()) - msg['X-AUR-Reason'] = reason - msg['Date'] = email.utils.formatdate(localtime=True) + msg["Cc"] = str.join(", ", self.get_cc()) + msg["X-AUR-Reason"] = reason + msg["Date"] = email.utils.formatdate(localtime=True) for key, value in self.get_headers().items(): msg[key] = value - sendmail = aurweb.config.get('notifications', 'sendmail') + sendmail = aurweb.config.get("notifications", "sendmail") if sendmail: # send email using the sendmail binary specified in the # configuration file - p = subprocess.Popen([sendmail, '-t', '-oi'], - stdin=subprocess.PIPE) + p = subprocess.Popen([sendmail, "-t", "-oi"], stdin=subprocess.PIPE) p.communicate(msg.as_bytes()) else: # send email using smtplib; no local MTA required - server_addr = aurweb.config.get('notifications', 'smtp-server') - server_port = aurweb.config.getint('notifications', - 'smtp-port') - use_ssl = aurweb.config.getboolean('notifications', - 'smtp-use-ssl') - use_starttls = aurweb.config.getboolean('notifications', - 'smtp-use-starttls') - user = aurweb.config.get('notifications', 'smtp-user') - passwd = aurweb.config.get('notifications', 'smtp-password') + server_addr = aurweb.config.get("notifications", "smtp-server") + server_port = aurweb.config.getint("notifications", "smtp-port") + use_ssl = aurweb.config.getboolean("notifications", "smtp-use-ssl") + use_starttls = aurweb.config.getboolean( + "notifications", "smtp-use-starttls" + ) + user = aurweb.config.get("notifications", "smtp-user") + passwd = aurweb.config.get("notifications", "smtp-password") classes = { False: smtplib.SMTP, True: smtplib.SMTP_SSL, } - smtp_timeout = aurweb.config.getint("notifications", - "smtp-timeout") - server = classes[use_ssl](server_addr, server_port, - timeout=smtp_timeout) + smtp_timeout = aurweb.config.getint("notifications", "smtp-timeout") + server = classes[use_ssl]( + server_addr, server_port, timeout=smtp_timeout + ) if use_starttls: server.ehlo() @@ -126,23 +122,29 @@ class Notification: try: self._send() except OSError as exc: - logger.error("Unable to emit notification due to an " - "OSError (precise exception following).") + logger.error( + "Unable to emit notification due to an " + "OSError (precise exception following)." + ) logger.error(str(exc)) class ResetKeyNotification(Notification): def __init__(self, uid): - user = db.query(User).filter( - and_(User.ID == uid, User.Suspended == 0) - ).with_entities( - User.Username, - User.Email, - User.BackupEmail, - User.LangPreference, - User.ResetKey - ).order_by(User.Username.asc()).first() + user = ( + db.query(User) + .filter(and_(User.ID == uid, User.Suspended == 0)) + .with_entities( + User.Username, + User.Email, + User.BackupEmail, + User.LangPreference, + User.ResetKey, + ) + .order_by(User.Username.asc()) + .first() + ) self._username = user.Username self._to = user.Email @@ -159,55 +161,66 @@ class ResetKeyNotification(Notification): return [(self._to, self._lang)] def get_subject(self, lang): - return aurweb.l10n.translator.translate('AUR Password Reset', lang) + return aurweb.l10n.translator.translate("AUR Password Reset", lang) def get_body(self, lang): return aurweb.l10n.translator.translate( - 'A password reset request was submitted for the account ' - '{user} associated with your email address. If you wish to ' - 'reset your password follow the link [1] below, otherwise ' - 'ignore this message and nothing will happen.', - lang).format(user=self._username) + "A password reset request was submitted for the account " + "{user} associated with your email address. If you wish to " + "reset your password follow the link [1] below, otherwise " + "ignore this message and nothing will happen.", + lang, + ).format(user=self._username) def get_refs(self): - return (aur_location + '/passreset/?resetkey=' + self._resetkey,) + return (aur_location + "/passreset/?resetkey=" + self._resetkey,) class WelcomeNotification(ResetKeyNotification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'Welcome to the Arch User Repository', - lang) + "Welcome to the Arch User Repository", lang + ) def get_body(self, lang): return aurweb.l10n.translator.translate( - 'Welcome to the Arch User Repository! In order to set an ' - 'initial password for your new account, please click the ' - 'link [1] below. If the link does not work, try copying and ' - 'pasting it into your browser.', lang) + "Welcome to the Arch User Repository! In order to set an " + "initial password for your new account, please click the " + "link [1] below. If the link does not work, try copying and " + "pasting it into your browser.", + lang, + ) class CommentNotification(Notification): def __init__(self, uid, pkgbase_id, comment_id): - self._user = db.query(User.Username).filter( - User.ID == uid).first().Username - self._pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == pkgbase_id).first().Name + self._user = db.query(User.Username).filter(User.ID == uid).first().Username + self._pkgbase = ( + db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name + ) - query = db.query(User).join(PackageNotification).filter( - and_(User.CommentNotify == 1, - PackageNotification.UserID != uid, - PackageNotification.PackageBaseID == pkgbase_id, - User.Suspended == 0) - ).with_entities( - User.Email, - User.LangPreference - ).distinct() + query = ( + db.query(User) + .join(PackageNotification) + .filter( + and_( + User.CommentNotify == 1, + PackageNotification.UserID != uid, + PackageNotification.PackageBaseID == pkgbase_id, + User.Suspended == 0, + ) + ) + .with_entities(User.Email, User.LangPreference) + .distinct() + ) self._recipients = [(u.Email, u.LangPreference) for u in query] - pkgcomment = db.query(PackageComment.Comments).filter( - PackageComment.ID == comment_id).first() + pkgcomment = ( + db.query(PackageComment.Comments) + .filter(PackageComment.ID == comment_id) + .first() + ) self._text = pkgcomment.Comments super().__init__() @@ -217,49 +230,56 @@ class CommentNotification(Notification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'AUR Comment for {pkgbase}', - lang).format(pkgbase=self._pkgbase) + "AUR Comment for {pkgbase}", lang + ).format(pkgbase=self._pkgbase) def get_body(self, lang): body = aurweb.l10n.translator.translate( - '{user} [1] added the following comment to {pkgbase} [2]:', - lang).format(user=self._user, pkgbase=self._pkgbase) - body += '\n\n' + self._text + '\n\n--\n' - dnlabel = aurweb.l10n.translator.translate( - 'Disable notifications', lang) + "{user} [1] added the following comment to {pkgbase} [2]:", lang + ).format(user=self._user, pkgbase=self._pkgbase) + body += "\n\n" + self._text + "\n\n--\n" + dnlabel = aurweb.l10n.translator.translate("Disable notifications", lang) body += aurweb.l10n.translator.translate( - 'If you no longer wish to receive notifications about this ' - 'package, please go to the package page [2] and select ' - '"{label}".', lang).format(label=dnlabel) + "If you no longer wish to receive notifications about this " + "package, please go to the package page [2] and select " + '"{label}".', + lang, + ).format(label=dnlabel) return body def get_refs(self): - return (aur_location + '/account/' + self._user + '/', - aur_location + '/pkgbase/' + self._pkgbase + '/') + return ( + aur_location + "/account/" + self._user + "/", + aur_location + "/pkgbase/" + self._pkgbase + "/", + ) def get_headers(self): - thread_id = '' + thread_id = "" return headers_reply(thread_id) class UpdateNotification(Notification): def __init__(self, uid, pkgbase_id): - self._user = db.query(User.Username).filter( - User.ID == uid).first().Username - self._pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == pkgbase_id).first().Name + self._user = db.query(User.Username).filter(User.ID == uid).first().Username + self._pkgbase = ( + db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name + ) - query = db.query(User).join(PackageNotification).filter( - and_(User.UpdateNotify == 1, - PackageNotification.UserID != uid, - PackageNotification.PackageBaseID == pkgbase_id, - User.Suspended == 0) - ).with_entities( - User.Email, - User.LangPreference - ).distinct() + query = ( + db.query(User) + .join(PackageNotification) + .filter( + and_( + User.UpdateNotify == 1, + PackageNotification.UserID != uid, + PackageNotification.PackageBaseID == pkgbase_id, + User.Suspended == 0, + ) + ) + .with_entities(User.Email, User.LangPreference) + .distinct() + ) self._recipients = [(u.Email, u.LangPreference) for u in query] super().__init__() @@ -269,55 +289,63 @@ class UpdateNotification(Notification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'AUR Package Update: {pkgbase}', - lang).format(pkgbase=self._pkgbase) + "AUR Package Update: {pkgbase}", lang + ).format(pkgbase=self._pkgbase) def get_body(self, lang): body = aurweb.l10n.translator.translate( - '{user} [1] pushed a new commit to {pkgbase} [2].', - lang).format(user=self._user, pkgbase=self._pkgbase) - body += '\n\n--\n' - dnlabel = aurweb.l10n.translator.translate( - 'Disable notifications', lang) + "{user} [1] pushed a new commit to {pkgbase} [2].", lang + ).format(user=self._user, pkgbase=self._pkgbase) + body += "\n\n--\n" + dnlabel = aurweb.l10n.translator.translate("Disable notifications", lang) body += aurweb.l10n.translator.translate( - 'If you no longer wish to receive notifications about this ' - 'package, please go to the package page [2] and select ' - '"{label}".', lang).format(label=dnlabel) + "If you no longer wish to receive notifications about this " + "package, please go to the package page [2] and select " + '"{label}".', + lang, + ).format(label=dnlabel) return body def get_refs(self): - return (aur_location + '/account/' + self._user + '/', - aur_location + '/pkgbase/' + self._pkgbase + '/') + return ( + aur_location + "/account/" + self._user + "/", + aur_location + "/pkgbase/" + self._pkgbase + "/", + ) def get_headers(self): - thread_id = '' + thread_id = "" return headers_reply(thread_id) class FlagNotification(Notification): def __init__(self, uid, pkgbase_id): - self._user = db.query(User.Username).filter( - User.ID == uid).first().Username - self._pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == pkgbase_id).first().Name + self._user = db.query(User.Username).filter(User.ID == uid).first().Username + self._pkgbase = ( + db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name + ) - query = db.query(User).join(PackageComaintainer, isouter=True).join( - PackageBase, - or_(PackageBase.MaintainerUID == User.ID, - PackageBase.ID == PackageComaintainer.PackageBaseID) - ).filter( - and_(PackageBase.ID == pkgbase_id, - User.Suspended == 0) - ).with_entities( - User.Email, - User.LangPreference - ).distinct() + query = ( + db.query(User) + .join(PackageComaintainer, isouter=True) + .join( + PackageBase, + or_( + PackageBase.MaintainerUID == User.ID, + PackageBase.ID == PackageComaintainer.PackageBaseID, + ), + ) + .filter(and_(PackageBase.ID == pkgbase_id, User.Suspended == 0)) + .with_entities(User.Email, User.LangPreference) + .distinct() + ) self._recipients = [(u.Email, u.LangPreference) for u in query] - pkgbase = db.query(PackageBase.FlaggerComment).filter( - PackageBase.ID == pkgbase_id).first() + pkgbase = ( + db.query(PackageBase.FlaggerComment) + .filter(PackageBase.ID == pkgbase_id) + .first() + ) self._text = pkgbase.FlaggerComment super().__init__() @@ -327,43 +355,53 @@ class FlagNotification(Notification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'AUR Out-of-date Notification for {pkgbase}', - lang).format(pkgbase=self._pkgbase) + "AUR Out-of-date Notification for {pkgbase}", lang + ).format(pkgbase=self._pkgbase) def get_body(self, lang): body = aurweb.l10n.translator.translate( - 'Your package {pkgbase} [1] has been flagged out-of-date by ' - '{user} [2]:', lang).format(pkgbase=self._pkgbase, - user=self._user) - body += '\n\n' + self._text + "Your package {pkgbase} [1] has been flagged out-of-date by " "{user} [2]:", + lang, + ).format(pkgbase=self._pkgbase, user=self._user) + body += "\n\n" + self._text return body def get_refs(self): - return (aur_location + '/pkgbase/' + self._pkgbase + '/', - aur_location + '/account/' + self._user + '/') + return ( + aur_location + "/pkgbase/" + self._pkgbase + "/", + aur_location + "/account/" + self._user + "/", + ) class OwnershipEventNotification(Notification): def __init__(self, uid, pkgbase_id): - self._user = db.query(User.Username).filter( - User.ID == uid).first().Username - self._pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == pkgbase_id).first().Name + self._user = db.query(User.Username).filter(User.ID == uid).first().Username + self._pkgbase = ( + db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name + ) - query = db.query(User).join(PackageNotification).filter( - and_(User.OwnershipNotify == 1, - PackageNotification.UserID != uid, - PackageNotification.PackageBaseID == pkgbase_id, - User.Suspended == 0) - ).with_entities( - User.Email, - User.LangPreference - ).distinct() + query = ( + db.query(User) + .join(PackageNotification) + .filter( + and_( + User.OwnershipNotify == 1, + PackageNotification.UserID != uid, + PackageNotification.PackageBaseID == pkgbase_id, + User.Suspended == 0, + ) + ) + .with_entities(User.Email, User.LangPreference) + .distinct() + ) self._recipients = [(u.Email, u.LangPreference) for u in query] - pkgbase = db.query(PackageBase.FlaggerComment).filter( - PackageBase.ID == pkgbase_id).first() + pkgbase = ( + db.query(PackageBase.FlaggerComment) + .filter(PackageBase.ID == pkgbase_id) + .first() + ) self._text = pkgbase.FlaggerComment super().__init__() @@ -373,39 +411,43 @@ class OwnershipEventNotification(Notification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'AUR Ownership Notification for {pkgbase}', - lang).format(pkgbase=self._pkgbase) + "AUR Ownership Notification for {pkgbase}", lang + ).format(pkgbase=self._pkgbase) def get_refs(self): - return (aur_location + '/pkgbase/' + self._pkgbase + '/', - aur_location + '/account/' + self._user + '/') + return ( + aur_location + "/pkgbase/" + self._pkgbase + "/", + aur_location + "/account/" + self._user + "/", + ) class AdoptNotification(OwnershipEventNotification): def get_body(self, lang): return aurweb.l10n.translator.translate( - 'The package {pkgbase} [1] was adopted by {user} [2].', - lang).format(pkgbase=self._pkgbase, user=self._user) + "The package {pkgbase} [1] was adopted by {user} [2].", lang + ).format(pkgbase=self._pkgbase, user=self._user) class DisownNotification(OwnershipEventNotification): def get_body(self, lang): return aurweb.l10n.translator.translate( - 'The package {pkgbase} [1] was disowned by {user} ' - '[2].', lang).format(pkgbase=self._pkgbase, - user=self._user) + "The package {pkgbase} [1] was disowned by {user} " "[2].", lang + ).format(pkgbase=self._pkgbase, user=self._user) class ComaintainershipEventNotification(Notification): def __init__(self, uid, pkgbase_id): - self._pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == pkgbase_id).first().Name + self._pkgbase = ( + db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name + ) - user = db.query(User).filter(User.ID == uid).with_entities( - User.Email, - User.LangPreference - ).first() + user = ( + db.query(User) + .filter(User.ID == uid) + .with_entities(User.Email, User.LangPreference) + .first() + ) self._to = user.Email self._lang = user.LangPreference @@ -417,247 +459,59 @@ class ComaintainershipEventNotification(Notification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'AUR Co-Maintainer Notification for {pkgbase}', - lang).format(pkgbase=self._pkgbase) + "AUR Co-Maintainer Notification for {pkgbase}", lang + ).format(pkgbase=self._pkgbase) def get_refs(self): - return (aur_location + '/pkgbase/' + self._pkgbase + '/',) + return (aur_location + "/pkgbase/" + self._pkgbase + "/",) class ComaintainerAddNotification(ComaintainershipEventNotification): def get_body(self, lang): return aurweb.l10n.translator.translate( - 'You were added to the co-maintainer list of {pkgbase} [1].', - lang).format(pkgbase=self._pkgbase) + "You were added to the co-maintainer list of {pkgbase} [1].", lang + ).format(pkgbase=self._pkgbase) class ComaintainerRemoveNotification(ComaintainershipEventNotification): def get_body(self, lang): return aurweb.l10n.translator.translate( - 'You were removed from the co-maintainer list of {pkgbase} ' - '[1].', lang).format(pkgbase=self._pkgbase) + "You were removed from the co-maintainer list of {pkgbase} " "[1].", lang + ).format(pkgbase=self._pkgbase) class DeleteNotification(Notification): def __init__(self, uid, old_pkgbase_id, new_pkgbase_id=None): - self._user = db.query(User.Username).filter( - User.ID == uid).first().Username - self._old_pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == old_pkgbase_id).first().Name + self._user = db.query(User.Username).filter(User.ID == uid).first().Username + self._old_pkgbase = ( + db.query(PackageBase.Name) + .filter(PackageBase.ID == old_pkgbase_id) + .first() + .Name + ) self._new_pkgbase = None if new_pkgbase_id: - self._new_pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == new_pkgbase_id).first().Name + self._new_pkgbase = ( + db.query(PackageBase.Name) + .filter(PackageBase.ID == new_pkgbase_id) + .first() + .Name + ) - query = db.query(User).join(PackageNotification).filter( - and_(PackageNotification.UserID != uid, - PackageNotification.PackageBaseID == old_pkgbase_id, - User.Suspended == 0) - ).with_entities( - User.Email, - User.LangPreference - ).distinct() - self._recipients = [(u.Email, u.LangPreference) for u in query] - - super().__init__() - - def get_recipients(self): - return self._recipients - - def get_subject(self, lang): - return aurweb.l10n.translator.translate( - 'AUR Package deleted: {pkgbase}', - lang).format(pkgbase=self._old_pkgbase) - - def get_body(self, lang): - if self._new_pkgbase: - dnlabel = aurweb.l10n.translator.translate( - 'Disable notifications', lang) - return aurweb.l10n.translator.translate( - '{user} [1] merged {old} [2] into {new} [3].\n\n' - '--\n' - 'If you no longer wish receive notifications about the ' - 'new package, please go to [3] and click "{label}".', - lang).format(user=self._user, old=self._old_pkgbase, - new=self._new_pkgbase, label=dnlabel) - else: - return aurweb.l10n.translator.translate( - '{user} [1] deleted {pkgbase} [2].\n\n' - 'You will no longer receive notifications about this ' - 'package.', lang).format(user=self._user, - pkgbase=self._old_pkgbase) - - def get_refs(self): - refs = (aur_location + '/account/' + self._user + '/', - aur_location + '/pkgbase/' + self._old_pkgbase + '/') - if self._new_pkgbase: - refs += (aur_location + '/pkgbase/' + self._new_pkgbase + '/',) - return refs - - -class RequestOpenNotification(Notification): - def __init__(self, uid, reqid, reqtype, pkgbase_id, merge_into=None): - - self._user = db.query(User.Username).filter( - User.ID == uid).first().Username - self._pkgbase = db.query(PackageBase.Name).filter( - PackageBase.ID == pkgbase_id).first().Name - - self._to = aurweb.config.get('options', 'aur_request_ml') - - query = db.query(PackageRequest).join(PackageBase).join( - PackageComaintainer, - PackageComaintainer.PackageBaseID == PackageRequest.PackageBaseID, - isouter=True - ).join( - User, - or_(User.ID == PackageRequest.UsersID, - User.ID == PackageBase.MaintainerUID, - User.ID == PackageComaintainer.UsersID) - ).filter( - and_(PackageRequest.ID == reqid, - User.Suspended == 0) - ).with_entities( - User.Email - ).distinct() - self._cc = [u.Email for u in query] - - pkgreq = db.query(PackageRequest.Comments).filter( - PackageRequest.ID == reqid).first() - - self._text = pkgreq.Comments - self._reqid = int(reqid) - self._reqtype = reqtype - self._merge_into = merge_into - - def get_recipients(self): - return [(self._to, 'en')] - - def get_cc(self): - return self._cc - - def get_subject(self, lang): - return '[PRQ#%d] %s Request for %s' % \ - (self._reqid, self._reqtype.title(), self._pkgbase) - - def get_body(self, lang): - if self._merge_into: - body = '%s [1] filed a request to merge %s [2] into %s [3]:' % \ - (self._user, self._pkgbase, self._merge_into) - body += '\n\n' + self._text - else: - an = 'an' if self._reqtype[0] in 'aeiou' else 'a' - body = '%s [1] filed %s %s request for %s [2]:' % \ - (self._user, an, self._reqtype, self._pkgbase) - body += '\n\n' + self._text - return body - - def get_refs(self): - refs = (aur_location + '/account/' + self._user + '/', - aur_location + '/pkgbase/' + self._pkgbase + '/') - if self._merge_into: - refs += (aur_location + '/pkgbase/' + self._merge_into + '/',) - return refs - - def get_headers(self): - thread_id = '' - # Use a deterministic Message-ID for the first email referencing a - # request. - headers = headers_msgid(thread_id) - return headers - - -class RequestCloseNotification(Notification): - - def __init__(self, uid, reqid, reason): - user = db.query(User.Username).filter(User.ID == uid).first() - self._user = user.Username if user else None - - self._to = aurweb.config.get('options', 'aur_request_ml') - - query = db.query(PackageRequest).join(PackageBase).join( - PackageComaintainer, - PackageComaintainer.PackageBaseID == PackageRequest.PackageBaseID, - isouter=True - ).join( - User, - or_(User.ID == PackageRequest.UsersID, - User.ID == PackageBase.MaintainerUID, - User.ID == PackageComaintainer.UsersID) - ).filter( - and_(PackageRequest.ID == reqid, - User.Suspended == 0) - ).with_entities( - User.Email - ).distinct() - self._cc = [u.Email for u in query] - - pkgreq = db.query(PackageRequest).join(RequestType).filter( - PackageRequest.ID == reqid - ).with_entities( - PackageRequest.ClosureComment, - RequestType.Name, - PackageRequest.PackageBaseName - ).first() - - self._text = pkgreq.ClosureComment - self._reqtype = pkgreq.Name - self._pkgbase = pkgreq.PackageBaseName - - self._reqid = int(reqid) - self._reason = reason - - def get_recipients(self): - return [(self._to, 'en')] - - def get_cc(self): - return self._cc - - def get_subject(self, lang): - return '[PRQ#%d] %s Request for %s %s' % (self._reqid, - self._reqtype.title(), - self._pkgbase, - self._reason.title()) - - def get_body(self, lang): - if self._user: - body = 'Request #%d has been %s by %s [1]' % \ - (self._reqid, self._reason, self._user) - else: - body = 'Request #%d has been %s automatically by the Arch User ' \ - 'Repository package request system' % \ - (self._reqid, self._reason) - if self._text.strip() == '': - body += '.' - else: - body += ':\n\n' + self._text - return body - - def get_refs(self): - if self._user: - return (aur_location + '/account/' + self._user + '/',) - else: - return () - - def get_headers(self): - thread_id = '' - headers = headers_reply(thread_id) - return headers - - -class TUVoteReminderNotification(Notification): - def __init__(self, vote_id): - self._vote_id = int(vote_id) - - subquery = db.query(TUVote.UserID).filter(TUVote.VoteID == vote_id) - query = db.query(User).filter( - and_(User.AccountTypeID.in_((2, 4)), - ~User.ID.in_(subquery), - User.Suspended == 0) - ).with_entities( - User.Email, User.LangPreference + query = ( + db.query(User) + .join(PackageNotification) + .filter( + and_( + PackageNotification.UserID != uid, + PackageNotification.PackageBaseID == old_pkgbase_id, + User.Suspended == 0, + ) + ) + .with_entities(User.Email, User.LangPreference) + .distinct() ) self._recipients = [(u.Email, u.LangPreference) for u in query] @@ -668,36 +522,280 @@ class TUVoteReminderNotification(Notification): def get_subject(self, lang): return aurweb.l10n.translator.translate( - 'TU Vote Reminder: Proposal {id}', - lang).format(id=self._vote_id) + "AUR Package deleted: {pkgbase}", lang + ).format(pkgbase=self._old_pkgbase) + + def get_body(self, lang): + if self._new_pkgbase: + dnlabel = aurweb.l10n.translator.translate("Disable notifications", lang) + return aurweb.l10n.translator.translate( + "{user} [1] merged {old} [2] into {new} [3].\n\n" + "--\n" + "If you no longer wish receive notifications about the " + 'new package, please go to [3] and click "{label}".', + lang, + ).format( + user=self._user, + old=self._old_pkgbase, + new=self._new_pkgbase, + label=dnlabel, + ) + else: + return aurweb.l10n.translator.translate( + "{user} [1] deleted {pkgbase} [2].\n\n" + "You will no longer receive notifications about this " + "package.", + lang, + ).format(user=self._user, pkgbase=self._old_pkgbase) + + def get_refs(self): + refs = ( + aur_location + "/account/" + self._user + "/", + aur_location + "/pkgbase/" + self._old_pkgbase + "/", + ) + if self._new_pkgbase: + refs += (aur_location + "/pkgbase/" + self._new_pkgbase + "/",) + return refs + + +class RequestOpenNotification(Notification): + def __init__(self, uid, reqid, reqtype, pkgbase_id, merge_into=None): + + self._user = db.query(User.Username).filter(User.ID == uid).first().Username + self._pkgbase = ( + db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name + ) + + self._to = aurweb.config.get("options", "aur_request_ml") + + query = ( + db.query(PackageRequest) + .join(PackageBase) + .join( + PackageComaintainer, + PackageComaintainer.PackageBaseID == PackageRequest.PackageBaseID, + isouter=True, + ) + .join( + User, + or_( + User.ID == PackageRequest.UsersID, + User.ID == PackageBase.MaintainerUID, + User.ID == PackageComaintainer.UsersID, + ), + ) + .filter(and_(PackageRequest.ID == reqid, User.Suspended == 0)) + .with_entities(User.Email) + .distinct() + ) + self._cc = [u.Email for u in query] + + pkgreq = ( + db.query(PackageRequest.Comments).filter(PackageRequest.ID == reqid).first() + ) + + self._text = pkgreq.Comments + self._reqid = int(reqid) + self._reqtype = reqtype + self._merge_into = merge_into + + def get_recipients(self): + return [(self._to, "en")] + + def get_cc(self): + return self._cc + + def get_subject(self, lang): + return "[PRQ#%d] %s Request for %s" % ( + self._reqid, + self._reqtype.title(), + self._pkgbase, + ) + + def get_body(self, lang): + if self._merge_into: + body = "%s [1] filed a request to merge %s [2] into %s [3]:" % ( + self._user, + self._pkgbase, + self._merge_into, + ) + body += "\n\n" + self._text + else: + an = "an" if self._reqtype[0] in "aeiou" else "a" + body = "%s [1] filed %s %s request for %s [2]:" % ( + self._user, + an, + self._reqtype, + self._pkgbase, + ) + body += "\n\n" + self._text + return body + + def get_refs(self): + refs = ( + aur_location + "/account/" + self._user + "/", + aur_location + "/pkgbase/" + self._pkgbase + "/", + ) + if self._merge_into: + refs += (aur_location + "/pkgbase/" + self._merge_into + "/",) + return refs + + def get_headers(self): + thread_id = "" + # Use a deterministic Message-ID for the first email referencing a + # request. + headers = headers_msgid(thread_id) + return headers + + +class RequestCloseNotification(Notification): + def __init__(self, uid, reqid, reason): + user = db.query(User.Username).filter(User.ID == uid).first() + self._user = user.Username if user else None + + self._to = aurweb.config.get("options", "aur_request_ml") + + query = ( + db.query(PackageRequest) + .join(PackageBase) + .join( + PackageComaintainer, + PackageComaintainer.PackageBaseID == PackageRequest.PackageBaseID, + isouter=True, + ) + .join( + User, + or_( + User.ID == PackageRequest.UsersID, + User.ID == PackageBase.MaintainerUID, + User.ID == PackageComaintainer.UsersID, + ), + ) + .filter(and_(PackageRequest.ID == reqid, User.Suspended == 0)) + .with_entities(User.Email) + .distinct() + ) + self._cc = [u.Email for u in query] + + pkgreq = ( + db.query(PackageRequest) + .join(RequestType) + .filter(PackageRequest.ID == reqid) + .with_entities( + PackageRequest.ClosureComment, + RequestType.Name, + PackageRequest.PackageBaseName, + ) + .first() + ) + + self._text = pkgreq.ClosureComment + self._reqtype = pkgreq.Name + self._pkgbase = pkgreq.PackageBaseName + + self._reqid = int(reqid) + self._reason = reason + + def get_recipients(self): + return [(self._to, "en")] + + def get_cc(self): + return self._cc + + def get_subject(self, lang): + return "[PRQ#%d] %s Request for %s %s" % ( + self._reqid, + self._reqtype.title(), + self._pkgbase, + self._reason.title(), + ) + + def get_body(self, lang): + if self._user: + body = "Request #%d has been %s by %s [1]" % ( + self._reqid, + self._reason, + self._user, + ) + else: + body = ( + "Request #%d has been %s automatically by the Arch User " + "Repository package request system" % (self._reqid, self._reason) + ) + if self._text.strip() == "": + body += "." + else: + body += ":\n\n" + self._text + return body + + def get_refs(self): + if self._user: + return (aur_location + "/account/" + self._user + "/",) + else: + return () + + def get_headers(self): + thread_id = "" + headers = headers_reply(thread_id) + return headers + + +class TUVoteReminderNotification(Notification): + def __init__(self, vote_id): + self._vote_id = int(vote_id) + + subquery = db.query(TUVote.UserID).filter(TUVote.VoteID == vote_id) + query = ( + db.query(User) + .filter( + and_( + User.AccountTypeID.in_((2, 4)), + ~User.ID.in_(subquery), + User.Suspended == 0, + ) + ) + .with_entities(User.Email, User.LangPreference) + ) + self._recipients = [(u.Email, u.LangPreference) for u in query] + + super().__init__() + + def get_recipients(self): + return self._recipients + + def get_subject(self, lang): + return aurweb.l10n.translator.translate( + "TU Vote Reminder: Proposal {id}", lang + ).format(id=self._vote_id) def get_body(self, lang): return aurweb.l10n.translator.translate( - 'Please remember to cast your vote on proposal {id} [1]. ' - 'The voting period ends in less than 48 hours.', - lang).format(id=self._vote_id) + "Please remember to cast your vote on proposal {id} [1]. " + "The voting period ends in less than 48 hours.", + lang, + ).format(id=self._vote_id) def get_refs(self): - return (aur_location + '/tu/?id=' + str(self._vote_id),) + return (aur_location + "/tu/?id=" + str(self._vote_id),) def main(): db.get_engine() action = sys.argv[1] action_map = { - 'send-resetkey': ResetKeyNotification, - 'welcome': WelcomeNotification, - 'comment': CommentNotification, - 'update': UpdateNotification, - 'flag': FlagNotification, - 'adopt': AdoptNotification, - 'disown': DisownNotification, - 'comaintainer-add': ComaintainerAddNotification, - 'comaintainer-remove': ComaintainerRemoveNotification, - 'delete': DeleteNotification, - 'request-open': RequestOpenNotification, - 'request-close': RequestCloseNotification, - 'tu-vote-reminder': TUVoteReminderNotification, + "send-resetkey": ResetKeyNotification, + "welcome": WelcomeNotification, + "comment": CommentNotification, + "update": UpdateNotification, + "flag": FlagNotification, + "adopt": AdoptNotification, + "disown": DisownNotification, + "comaintainer-add": ComaintainerAddNotification, + "comaintainer-remove": ComaintainerRemoveNotification, + "delete": DeleteNotification, + "request-open": RequestOpenNotification, + "request-close": RequestCloseNotification, + "tu-vote-reminder": TUVoteReminderNotification, } with db.begin(): @@ -705,5 +803,5 @@ def main(): notification.send() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/pkgmaint.py b/aurweb/scripts/pkgmaint.py index 2a2c638a..9d7cf53b 100755 --- a/aurweb/scripts/pkgmaint.py +++ b/aurweb/scripts/pkgmaint.py @@ -11,8 +11,8 @@ def _main(): limit_to = time.utcnow() - 86400 query = db.query(PackageBase).filter( - and_(PackageBase.SubmittedTS < limit_to, - PackageBase.PackagerUID.is_(None))) + and_(PackageBase.SubmittedTS < limit_to, PackageBase.PackagerUID.is_(None)) + ) db.delete_all(query) @@ -22,5 +22,5 @@ def main(): _main() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/popupdate.py b/aurweb/scripts/popupdate.py index 637173eb..aa163be1 100755 --- a/aurweb/scripts/popupdate.py +++ b/aurweb/scripts/popupdate.py @@ -1,8 +1,7 @@ #!/usr/bin/env python3 from sqlalchemy import and_, func -from sqlalchemy.sql.functions import coalesce -from sqlalchemy.sql.functions import sum as _sum +from sqlalchemy.sql.functions import coalesce, sum as _sum from aurweb import db, time from aurweb.models import PackageBase, PackageVote @@ -20,18 +19,26 @@ def run_variable(pkgbases: list[PackageBase] = []) -> None: now = time.utcnow() # NumVotes subquery. - votes_subq = db.get_session().query( - func.count("*") - ).select_from(PackageVote).filter( - PackageVote.PackageBaseID == PackageBase.ID + votes_subq = ( + db.get_session() + .query(func.count("*")) + .select_from(PackageVote) + .filter(PackageVote.PackageBaseID == PackageBase.ID) ) # Popularity subquery. - pop_subq = db.get_session().query( - coalesce(_sum(func.pow(0.98, (now - PackageVote.VoteTS) / 86400)), 0.0), - ).select_from(PackageVote).filter( - and_(PackageVote.PackageBaseID == PackageBase.ID, - PackageVote.VoteTS.isnot(None)) + pop_subq = ( + db.get_session() + .query( + coalesce(_sum(func.pow(0.98, (now - PackageVote.VoteTS) / 86400)), 0.0), + ) + .select_from(PackageVote) + .filter( + and_( + PackageVote.PackageBaseID == PackageBase.ID, + PackageVote.VoteTS.isnot(None), + ) + ) ) with db.begin(): @@ -42,14 +49,16 @@ def run_variable(pkgbases: list[PackageBase] = []) -> None: ids = {pkgbase.ID for pkgbase in pkgbases} query = query.filter(PackageBase.ID.in_(ids)) - query.update({ - "NumVotes": votes_subq.scalar_subquery(), - "Popularity": pop_subq.scalar_subquery() - }) + query.update( + { + "NumVotes": votes_subq.scalar_subquery(), + "Popularity": pop_subq.scalar_subquery(), + } + ) def run_single(pkgbase: PackageBase) -> None: - """ A single popupdate. The given pkgbase instance will be + """A single popupdate. The given pkgbase instance will be refreshed after the database update is done. NOTE: This function is compatible only with aurweb FastAPI. @@ -65,5 +74,5 @@ def main(): run_variable() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/rendercomment.py b/aurweb/scripts/rendercomment.py index 87f8b89f..ff6fe09c 100755 --- a/aurweb/scripts/rendercomment.py +++ b/aurweb/scripts/rendercomment.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 import sys - from urllib.parse import quote_plus from xml.etree.ElementTree import Element @@ -10,7 +9,6 @@ import markdown import pygit2 import aurweb.config - from aurweb import db, logging, util from aurweb.models import PackageComment @@ -25,13 +23,15 @@ class LinkifyExtension(markdown.extensions.Extension): # Captures http(s) and ftp URLs until the first non URL-ish character. # Excludes trailing punctuation. - _urlre = (r'(\b(?:https?|ftp):\/\/[\w\/\#~:.?+=&%@!\-;,]+?' - r'(?=[.:?\-;,]*(?:[^\w\/\#~:.?+=&%@!\-;,]|$)))') + _urlre = ( + r"(\b(?:https?|ftp):\/\/[\w\/\#~:.?+=&%@!\-;,]+?" + r"(?=[.:?\-;,]*(?:[^\w\/\#~:.?+=&%@!\-;,]|$)))" + ) def extendMarkdown(self, md): processor = markdown.inlinepatterns.AutolinkInlineProcessor(self._urlre, md) # Register it right after the default <>-link processor (priority 120). - md.inlinePatterns.register(processor, 'linkify', 119) + md.inlinePatterns.register(processor, "linkify", 119) class FlysprayLinksInlineProcessor(markdown.inlinepatterns.InlineProcessor): @@ -43,16 +43,16 @@ class FlysprayLinksInlineProcessor(markdown.inlinepatterns.InlineProcessor): """ def handleMatch(self, m, data): - el = Element('a') - el.set('href', f'https://bugs.archlinux.org/task/{m.group(1)}') + el = Element("a") + el.set("href", f"https://bugs.archlinux.org/task/{m.group(1)}") el.text = markdown.util.AtomicString(m.group(0)) return (el, m.start(0), m.end(0)) class FlysprayLinksExtension(markdown.extensions.Extension): def extendMarkdown(self, md): - processor = FlysprayLinksInlineProcessor(r'\bFS#(\d+)\b', md) - md.inlinePatterns.register(processor, 'flyspray-links', 118) + processor = FlysprayLinksInlineProcessor(r"\bFS#(\d+)\b", md) + md.inlinePatterns.register(processor, "flyspray-links", 118) class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor): @@ -65,10 +65,10 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor): """ def __init__(self, md, head): - repo_path = aurweb.config.get('serve', 'repo-path') + repo_path = aurweb.config.get("serve", "repo-path") self._repo = pygit2.Repository(repo_path) self._head = head - super().__init__(r'\b([0-9a-f]{7,40})\b', md) + super().__init__(r"\b([0-9a-f]{7,40})\b", md) def handleMatch(self, m, data): oid = m.group(1) @@ -76,13 +76,12 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor): # Unknown OID; preserve the orginal text. return (None, None, None) - el = Element('a') + el = Element("a") commit_uri = aurweb.config.get("options", "commit_uri") prefixlen = util.git_search(self._repo, oid) - el.set('href', commit_uri % ( - quote_plus(self._head), - quote_plus(oid[:prefixlen]) - )) + el.set( + "href", commit_uri % (quote_plus(self._head), quote_plus(oid[:prefixlen])) + ) el.text = markdown.util.AtomicString(oid[:prefixlen]) return (el, m.start(0), m.end(0)) @@ -97,7 +96,7 @@ class GitCommitsExtension(markdown.extensions.Extension): def extendMarkdown(self, md): try: processor = GitCommitsInlineProcessor(md, self._head) - md.inlinePatterns.register(processor, 'git-commits', 117) + md.inlinePatterns.register(processor, "git-commits", 117) except pygit2.GitError: logger.error(f"No git repository found for '{self._head}'.") @@ -105,16 +104,16 @@ class GitCommitsExtension(markdown.extensions.Extension): class HeadingTreeprocessor(markdown.treeprocessors.Treeprocessor): def run(self, doc): for elem in doc: - if elem.tag == 'h1': - elem.tag = 'h5' - elif elem.tag in ['h2', 'h3', 'h4', 'h5']: - elem.tag = 'h6' + if elem.tag == "h1": + elem.tag = "h5" + elif elem.tag in ["h2", "h3", "h4", "h5"]: + elem.tag = "h6" class HeadingExtension(markdown.extensions.Extension): def extendMarkdown(self, md): # Priority doesn't matter since we don't conflict with other processors. - md.treeprocessors.register(HeadingTreeprocessor(md), 'heading', 30) + md.treeprocessors.register(HeadingTreeprocessor(md), "heading", 30) def save_rendered_comment(comment: PackageComment, html: str): @@ -130,16 +129,26 @@ def update_comment_render(comment: PackageComment) -> None: text = comment.Comments pkgbasename = comment.PackageBase.Name - html = markdown.markdown(text, extensions=[ - 'fenced_code', - LinkifyExtension(), - FlysprayLinksExtension(), - GitCommitsExtension(pkgbasename), - HeadingExtension() - ]) + html = markdown.markdown( + text, + extensions=[ + "fenced_code", + LinkifyExtension(), + FlysprayLinksExtension(), + GitCommitsExtension(pkgbasename), + HeadingExtension(), + ], + ) - allowed_tags = (bleach.sanitizer.ALLOWED_TAGS - + ['p', 'pre', 'h4', 'h5', 'h6', 'br', 'hr']) + allowed_tags = bleach.sanitizer.ALLOWED_TAGS + [ + "p", + "pre", + "h4", + "h5", + "h6", + "br", + "hr", + ] html = bleach.clean(html, tags=allowed_tags) save_rendered_comment(comment, html) db.refresh(comment) @@ -148,11 +157,9 @@ def update_comment_render(comment: PackageComment) -> None: def main(): db.get_engine() comment_id = int(sys.argv[1]) - comment = db.query(PackageComment).filter( - PackageComment.ID == comment_id - ).first() + comment = db.query(PackageComment).filter(PackageComment.ID == comment_id).first() update_comment_render(comment) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/tuvotereminder.py b/aurweb/scripts/tuvotereminder.py index 742fa6d4..aa59d911 100755 --- a/aurweb/scripts/tuvotereminder.py +++ b/aurweb/scripts/tuvotereminder.py @@ -3,12 +3,11 @@ from sqlalchemy import and_ import aurweb.config - from aurweb import db, time from aurweb.models import TUVoteInfo from aurweb.scripts import notify -notify_cmd = aurweb.config.get('notifications', 'notify-cmd') +notify_cmd = aurweb.config.get("notifications", "notify-cmd") def main(): @@ -23,13 +22,12 @@ def main(): filter_to = now + end query = db.query(TUVoteInfo.ID).filter( - and_(TUVoteInfo.End >= filter_from, - TUVoteInfo.End <= filter_to) + and_(TUVoteInfo.End >= filter_from, TUVoteInfo.End <= filter_to) ) for voteinfo in query: notif = notify.TUVoteReminderNotification(voteinfo.ID) notif.send() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/scripts/usermaint.py b/aurweb/scripts/usermaint.py index 69f9db04..fb79aeaf 100755 --- a/aurweb/scripts/usermaint.py +++ b/aurweb/scripts/usermaint.py @@ -9,14 +9,16 @@ from aurweb.models import User def _main(): limit_to = time.utcnow() - 86400 * 7 - update_ = update(User).where( - User.LastLogin < limit_to - ).values(LastLoginIPAddress=None) + update_ = ( + update(User).where(User.LastLogin < limit_to).values(LastLoginIPAddress=None) + ) db.get_session().execute(update_) - update_ = update(User).where( - User.LastSSHLogin < limit_to - ).values(LastSSHLoginIPAddress=None) + update_ = ( + update(User) + .where(User.LastSSHLogin < limit_to) + .values(LastSSHLoginIPAddress=None) + ) db.get_session().execute(update_) @@ -26,5 +28,5 @@ def main(): _main() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/aurweb/spawn.py b/aurweb/spawn.py index c7d54c4e..29162f33 100644 --- a/aurweb/spawn.py +++ b/aurweb/spawn.py @@ -16,18 +16,16 @@ import subprocess import sys import tempfile import time - from typing import Iterable import aurweb.config import aurweb.schema - from aurweb.exceptions import AurwebException children = [] temporary_dir = None verbosity = 0 -asgi_backend = '' +asgi_backend = "" workers = 1 PHP_BINARY = os.environ.get("PHP_BINARY", "php") @@ -60,22 +58,21 @@ def validate_php_config() -> None: :return: None """ try: - proc = subprocess.Popen([PHP_BINARY, "-m"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + proc = subprocess.Popen( + [PHP_BINARY, "-m"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) out, _ = proc.communicate() except FileNotFoundError: - raise AurwebException(f"Unable to locate the '{PHP_BINARY}' " - "executable.") + raise AurwebException(f"Unable to locate the '{PHP_BINARY}' " "executable.") - assert proc.returncode == 0, ("Received non-zero error code " - f"{proc.returncode} from '{PHP_BINARY}'.") + assert proc.returncode == 0, ( + "Received non-zero error code " f"{proc.returncode} from '{PHP_BINARY}'." + ) modules = out.decode().splitlines() for module in PHP_MODULES: if module not in modules: - raise AurwebException( - f"PHP does not have the '{module}' module enabled.") + raise AurwebException(f"PHP does not have the '{module}' module enabled.") def generate_nginx_config(): @@ -91,7 +88,8 @@ def generate_nginx_config(): config_path = os.path.join(temporary_dir, "nginx.conf") config = open(config_path, "w") # We double nginx's braces because they conflict with Python's f-strings. - config.write(f""" + config.write( + f""" events {{}} daemon off; error_log /dev/stderr info; @@ -124,7 +122,8 @@ def generate_nginx_config(): }} }} }} - """) + """ + ) return config_path @@ -146,20 +145,23 @@ def start(): return atexit.register(stop) - if 'AUR_CONFIG' in os.environ: - os.environ['AUR_CONFIG'] = os.path.realpath(os.environ['AUR_CONFIG']) + if "AUR_CONFIG" in os.environ: + os.environ["AUR_CONFIG"] = os.path.realpath(os.environ["AUR_CONFIG"]) try: terminal_width = os.get_terminal_size().columns except OSError: terminal_width = 80 - print("{ruler}\n" - "Spawing PHP and FastAPI, then nginx as a reverse proxy.\n" - "Check out {aur_location}\n" - "Hit ^C to terminate everything.\n" - "{ruler}" - .format(ruler=("-" * terminal_width), - aur_location=aurweb.config.get('options', 'aur_location'))) + print( + "{ruler}\n" + "Spawing PHP and FastAPI, then nginx as a reverse proxy.\n" + "Check out {aur_location}\n" + "Hit ^C to terminate everything.\n" + "{ruler}".format( + ruler=("-" * terminal_width), + aur_location=aurweb.config.get("options", "aur_location"), + ) + ) # PHP php_address = aurweb.config.get("php", "bind_address") @@ -168,8 +170,9 @@ def start(): spawn_child(["php", "-S", php_address, "-t", htmldir]) # FastAPI - fastapi_host, fastapi_port = aurweb.config.get( - "fastapi", "bind_address").rsplit(":", 1) + fastapi_host, fastapi_port = aurweb.config.get("fastapi", "bind_address").rsplit( + ":", 1 + ) # Logging config. aurwebdir = aurweb.config.get("options", "aurwebdir") @@ -178,20 +181,33 @@ def start(): backend_args = { "hypercorn": ["-b", f"{fastapi_host}:{fastapi_port}"], "uvicorn": ["--host", fastapi_host, "--port", fastapi_port], - "gunicorn": ["--bind", f"{fastapi_host}:{fastapi_port}", - "-k", "uvicorn.workers.UvicornWorker", - "-w", str(workers)] + "gunicorn": [ + "--bind", + f"{fastapi_host}:{fastapi_port}", + "-k", + "uvicorn.workers.UvicornWorker", + "-w", + str(workers), + ], } backend_args = backend_args.get(asgi_backend) - spawn_child([ - "python", "-m", asgi_backend, - "--log-config", fastapi_log_config, - ] + backend_args + ["aurweb.asgi:app"]) + spawn_child( + [ + "python", + "-m", + asgi_backend, + "--log-config", + fastapi_log_config, + ] + + backend_args + + ["aurweb.asgi:app"] + ) # nginx spawn_child(["nginx", "-p", temporary_dir, "-c", generate_nginx_config()]) - print(f""" + print( + f""" > Started nginx. > > PHP backend: http://{php_address} @@ -201,11 +217,13 @@ def start(): > FastAPI frontend: http://{fastapi_host}:{FASTAPI_NGINX_PORT} > > Frontends are hosted via nginx and should be preferred. -""") +""" + ) -def _kill_children(children: Iterable, exceptions: list[Exception] = []) \ - -> list[Exception]: +def _kill_children( + children: Iterable, exceptions: list[Exception] = [] +) -> list[Exception]: """ Kill each process found in `children`. @@ -223,8 +241,9 @@ def _kill_children(children: Iterable, exceptions: list[Exception] = []) \ return exceptions -def _wait_for_children(children: Iterable, exceptions: list[Exception] = []) \ - -> list[Exception]: +def _wait_for_children( + children: Iterable, exceptions: list[Exception] = [] +) -> list[Exception]: """ Wait for each process to end found in `children`. @@ -261,21 +280,31 @@ def stop() -> None: exceptions = _wait_for_children(children, exceptions) children = [] if exceptions: - raise ProcessExceptions("Errors terminating the child processes:", - exceptions) + raise ProcessExceptions("Errors terminating the child processes:", exceptions) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - prog='python -m aurweb.spawn', - description='Start aurweb\'s test server.') - parser.add_argument('-v', '--verbose', action='count', default=0, - help='increase verbosity') - choices = ['hypercorn', 'gunicorn', 'uvicorn'] - parser.add_argument('-b', '--backend', choices=choices, default='uvicorn', - help='asgi backend used to launch the python server') - parser.add_argument("-w", "--workers", default=1, type=int, - help="number of workers to use in gunicorn") + prog="python -m aurweb.spawn", description="Start aurweb's test server." + ) + parser.add_argument( + "-v", "--verbose", action="count", default=0, help="increase verbosity" + ) + choices = ["hypercorn", "gunicorn", "uvicorn"] + parser.add_argument( + "-b", + "--backend", + choices=choices, + default="uvicorn", + help="asgi backend used to launch the python server", + ) + parser.add_argument( + "-w", + "--workers", + default=1, + type=int, + help="number of workers to use in gunicorn", + ) args = parser.parse_args() try: diff --git a/aurweb/templates.py b/aurweb/templates.py index 6520bedf..781826ea 100644 --- a/aurweb/templates.py +++ b/aurweb/templates.py @@ -1,28 +1,27 @@ import copy import functools import os - from http import HTTPStatus from typing import Callable import jinja2 - from fastapi import Request from fastapi.responses import HTMLResponse import aurweb.config - from aurweb import cookies, l10n, time # Prepare jinja2 objects. -_loader = jinja2.FileSystemLoader(os.path.join( - aurweb.config.get("options", "aurwebdir"), "templates")) -_env = jinja2.Environment(loader=_loader, autoescape=True, - extensions=["jinja2.ext.i18n"]) +_loader = jinja2.FileSystemLoader( + os.path.join(aurweb.config.get("options", "aurwebdir"), "templates") +) +_env = jinja2.Environment( + loader=_loader, autoescape=True, extensions=["jinja2.ext.i18n"] +) def register_filter(name: str) -> Callable: - """ A decorator that can be used to register a filter. + """A decorator that can be used to register a filter. Example @register_filter("some_filter") @@ -35,31 +34,36 @@ def register_filter(name: str) -> Callable: :param name: Filter name :return: Callable used for filter """ + def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) + _env.filters[name] = wrapper return wrapper + return decorator def register_function(name: str) -> Callable: - """ A decorator that can be used to register a function. - """ + """A decorator that can be used to register a function.""" + def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) + if name in _env.globals: raise KeyError(f"Jinja already has a function named '{name}'") _env.globals[name] = wrapper return wrapper + return decorator def make_context(request: Request, title: str, next: str = None): - """ Create a context for a jinja2 TemplateResponse. """ + """Create a context for a jinja2 TemplateResponse.""" import aurweb.auth.creds commit_url = aurweb.config.get_with_fallback("devel", "commit_url", None) @@ -85,17 +89,19 @@ def make_context(request: Request, title: str, next: str = None): "config": aurweb.config, "creds": aurweb.auth.creds, "next": next if next else request.url.path, - "version": os.environ.get("COMMIT_HASH", aurweb.config.AURWEB_VERSION) + "version": os.environ.get("COMMIT_HASH", aurweb.config.AURWEB_VERSION), } async def make_variable_context(request: Request, title: str, next: str = None): - """ Make a context with variables provided by the user - (query params via GET or form data via POST). """ + """Make a context with variables provided by the user + (query params via GET or form data via POST).""" context = make_context(request, title, next) - to_copy = dict(request.query_params) \ - if request.method.lower() == "get" \ + to_copy = ( + dict(request.query_params) + if request.method.lower() == "get" else dict(await request.form()) + ) for k, v in to_copy.items(): context[k] = v @@ -111,7 +117,7 @@ def base_template(path: str): def render_raw_template(request: Request, path: str, context: dict): - """ Render a Jinja2 multi-lingual template with some context. """ + """Render a Jinja2 multi-lingual template with some context.""" # Create a deep copy of our jinja2 _environment. The _environment in # total by itself is 48 bytes large (according to sys.getsizeof). # This is done so we can install gettext translations on the template @@ -126,11 +132,10 @@ def render_raw_template(request: Request, path: str, context: dict): return template.render(context) -def render_template(request: Request, - path: str, - context: dict, - status_code: HTTPStatus = HTTPStatus.OK): - """ Render a template as an HTMLResponse. """ +def render_template( + request: Request, path: str, context: dict, status_code: HTTPStatus = HTTPStatus.OK +): + """Render a template as an HTMLResponse.""" rendered = render_raw_template(request, path, context) response = HTMLResponse(rendered, status_code=int(status_code)) diff --git a/aurweb/testing/__init__.py b/aurweb/testing/__init__.py index 8261051d..4451eb3a 100644 --- a/aurweb/testing/__init__.py +++ b/aurweb/testing/__init__.py @@ -1,10 +1,9 @@ import aurweb.db - from aurweb import models def setup_test_db(*args): - """ This function is to be used to setup a test database before + """This function is to be used to setup a test database before using it. It takes a variable number of table strings, and for each table in that set of table strings, it deletes all records. diff --git a/aurweb/testing/alpm.py b/aurweb/testing/alpm.py index ce30d042..ddafb710 100644 --- a/aurweb/testing/alpm.py +++ b/aurweb/testing/alpm.py @@ -17,6 +17,7 @@ class AlpmDatabase: This class can be used to add or remove packages from a test repository. """ + repo = "test" def __init__(self, database_root: str): @@ -35,13 +36,14 @@ class AlpmDatabase: os.makedirs(pkgdir) return pkgdir - def add(self, pkgname: str, pkgver: str, arch: str, - provides: list[str] = []) -> None: + def add( + self, pkgname: str, pkgver: str, arch: str, provides: list[str] = [] + ) -> None: context = { "pkgname": pkgname, "pkgver": pkgver, "arch": arch, - "provides": provides + "provides": provides, } template = base_template("testing/alpm_package.j2") pkgdir = self._get_pkgdir(pkgname, pkgver, self.repo) @@ -76,8 +78,9 @@ class AlpmDatabase: self.clean() cmdline = ["bash", "-c", "bsdtar -czvf ../test.db *"] proc = subprocess.run(cmdline, cwd=self.repopath) - assert proc.returncode == 0, \ - f"Bad return code while creating alpm database: {proc.returncode}" + assert ( + proc.returncode == 0 + ), f"Bad return code while creating alpm database: {proc.returncode}" # Print out the md5 hash value of the new test.db. test_db = os.path.join(self.remote, "test.db") diff --git a/aurweb/testing/email.py b/aurweb/testing/email.py index b3e3990b..057ff792 100644 --- a/aurweb/testing/email.py +++ b/aurweb/testing/email.py @@ -5,7 +5,6 @@ import email import os import re import sys - from typing import TextIO @@ -28,6 +27,7 @@ class Email: print(email.headers) """ + TEST_DIR = "test-emails" def __init__(self, serial: int = 1, autoparse: bool = True): @@ -61,7 +61,7 @@ class Email: value = os.environ.get("PYTEST_CURRENT_TEST", "email").split(" ")[0] if suite: value = value.split(":")[0] - return re.sub(r'(\/|\.|,|:)', "_", value) + return re.sub(r"(\/|\.|,|:)", "_", value) @staticmethod def count() -> int: @@ -159,6 +159,6 @@ class Email: lines += [ f"== Email #{i + 1} ==", email.glue(), - f"== End of Email #{i + 1}" + f"== End of Email #{i + 1}", ] print("\n".join(lines), file=file) diff --git a/aurweb/testing/filelock.py b/aurweb/testing/filelock.py index 3a18c153..33b42cb3 100644 --- a/aurweb/testing/filelock.py +++ b/aurweb/testing/filelock.py @@ -1,6 +1,5 @@ import hashlib import os - from typing import Callable from posix_ipc import O_CREAT, Semaphore diff --git a/aurweb/testing/git.py b/aurweb/testing/git.py index 019d870f..216515c8 100644 --- a/aurweb/testing/git.py +++ b/aurweb/testing/git.py @@ -1,6 +1,5 @@ import os import shlex - from subprocess import PIPE, Popen from typing import Tuple diff --git a/aurweb/testing/html.py b/aurweb/testing/html.py index 8c923438..16b7322b 100644 --- a/aurweb/testing/html.py +++ b/aurweb/testing/html.py @@ -6,7 +6,7 @@ parser = etree.HTMLParser() def parse_root(html: str) -> etree.Element: - """ Parse an lxml.etree.ElementTree root from html content. + """Parse an lxml.etree.ElementTree root from html content. :param html: HTML markup :return: etree.Element diff --git a/aurweb/testing/requests.py b/aurweb/testing/requests.py index c97d1532..98312e9e 100644 --- a/aurweb/testing/requests.py +++ b/aurweb/testing/requests.py @@ -2,7 +2,8 @@ import aurweb.config class User: - """ A fake User model. """ + """A fake User model.""" + # Fake columns. LangPreference = aurweb.config.get("options", "default_lang") Timezone = aurweb.config.get("options", "default_timezone") @@ -15,7 +16,8 @@ class User: class Client: - """ A fake FastAPI Request.client object. """ + """A fake FastAPI Request.client object.""" + # A fake host. host = "127.0.0.1" @@ -25,16 +27,19 @@ class URL: class Request: - """ A fake Request object which mimics a FastAPI Request for tests. """ + """A fake Request object which mimics a FastAPI Request for tests.""" + client = Client() url = URL() - def __init__(self, - user: User = User(), - authenticated: bool = False, - method: str = "GET", - headers: dict[str, str] = dict(), - cookies: dict[str, str] = dict()) -> "Request": + def __init__( + self, + user: User = User(), + authenticated: bool = False, + method: str = "GET", + headers: dict[str, str] = dict(), + cookies: dict[str, str] = dict(), + ) -> "Request": self.user = user self.user.authenticated = authenticated diff --git a/aurweb/testing/smtp.py b/aurweb/testing/smtp.py index e5d67991..7596fbe9 100644 --- a/aurweb/testing/smtp.py +++ b/aurweb/testing/smtp.py @@ -2,7 +2,7 @@ class FakeSMTP: - """ A fake version of smtplib.SMTP used for testing. """ + """A fake version of smtplib.SMTP used for testing.""" starttls_enabled = False use_ssl = False @@ -41,5 +41,6 @@ class FakeSMTP: class FakeSMTP_SSL(FakeSMTP): - """ A fake version of smtplib.SMTP_SSL used for testing. """ + """A fake version of smtplib.SMTP_SSL used for testing.""" + use_ssl = True diff --git a/aurweb/time.py b/aurweb/time.py index a97ca986..505f17f5 100644 --- a/aurweb/time.py +++ b/aurweb/time.py @@ -1,5 +1,4 @@ import zoneinfo - from collections import OrderedDict from datetime import datetime from urllib.parse import unquote @@ -11,7 +10,7 @@ import aurweb.config def tz_offset(name: str): - """ Get a timezone offset in the form "+00:00" by its name. + """Get a timezone offset in the form "+00:00" by its name. Example: tz_offset('America/Los_Angeles') @@ -24,7 +23,7 @@ def tz_offset(name: str): offset = dt.utcoffset().total_seconds() / 60 / 60 # Prefix the offset string with a - or +. - offset_string = '-' if offset < 0 else '+' + offset_string = "-" if offset < 0 else "+" # Remove any negativity from the offset. We want a good offset. :) offset = abs(offset) @@ -42,19 +41,25 @@ def tz_offset(name: str): return offset_string -SUPPORTED_TIMEZONES = OrderedDict({ - # Flatten out the list of tuples into an OrderedDict. - timezone: offset for timezone, offset in sorted([ - # Comprehend a list of tuples (timezone, offset display string) - # and sort them by (offset, timezone). - (tz, "(UTC%s) %s" % (tz_offset(tz), tz)) - for tz in zoneinfo.available_timezones() - ], key=lambda element: (tz_offset(element[0]), element[0])) -}) +SUPPORTED_TIMEZONES = OrderedDict( + { + # Flatten out the list of tuples into an OrderedDict. + timezone: offset + for timezone, offset in sorted( + [ + # Comprehend a list of tuples (timezone, offset display string) + # and sort them by (offset, timezone). + (tz, "(UTC%s) %s" % (tz_offset(tz), tz)) + for tz in zoneinfo.available_timezones() + ], + key=lambda element: (tz_offset(element[0]), element[0]), + ) + } +) def get_request_timezone(request: Request): - """ Get a request's timezone by its AURTZ cookie. We use the + """Get a request's timezone by its AURTZ cookie. We use the configuration's [options] default_timezone otherwise. @param request FastAPI request diff --git a/aurweb/users/update.py b/aurweb/users/update.py index ffea1f2f..51f2d2e0 100644 --- a/aurweb/users/update.py +++ b/aurweb/users/update.py @@ -8,12 +8,23 @@ from aurweb.models.ssh_pub_key import get_fingerprint from aurweb.util import strtobool -def simple(U: str = str(), E: str = str(), H: bool = False, - BE: str = str(), R: str = str(), HP: str = str(), - I: str = str(), K: str = str(), J: bool = False, - CN: bool = False, UN: bool = False, ON: bool = False, - S: bool = False, user: models.User = None, - **kwargs) -> None: +def simple( + U: str = str(), + E: str = str(), + H: bool = False, + BE: str = str(), + R: str = str(), + HP: str = str(), + I: str = str(), + K: str = str(), + J: bool = False, + CN: bool = False, + UN: bool = False, + ON: bool = False, + S: bool = False, + user: models.User = None, + **kwargs, +) -> None: now = time.utcnow() with db.begin(): user.Username = U or user.Username @@ -31,22 +42,26 @@ def simple(U: str = str(), E: str = str(), H: bool = False, user.OwnershipNotify = strtobool(ON) -def language(L: str = str(), - request: Request = None, - user: models.User = None, - context: dict[str, Any] = {}, - **kwargs) -> None: +def language( + L: str = str(), + request: Request = None, + user: models.User = None, + context: dict[str, Any] = {}, + **kwargs, +) -> None: if L and L != user.LangPreference: with db.begin(): user.LangPreference = L context["language"] = L -def timezone(TZ: str = str(), - request: Request = None, - user: models.User = None, - context: dict[str, Any] = {}, - **kwargs) -> None: +def timezone( + TZ: str = str(), + request: Request = None, + user: models.User = None, + context: dict[str, Any] = {}, + **kwargs, +) -> None: if TZ and TZ != user.Timezone: with db.begin(): user.Timezone = TZ @@ -67,8 +82,7 @@ def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None: with db.begin(): # Delete any existing keys we can't find. - to_remove = user.ssh_pub_keys.filter( - ~SSHPubKey.Fingerprint.in_(fprints)) + to_remove = user.ssh_pub_keys.filter(~SSHPubKey.Fingerprint.in_(fprints)) db.delete_all(to_remove) # For each key, if it does not yet exist, create it. @@ -79,24 +93,27 @@ def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None: ).exists() if not db.query(exists).scalar(): # No public key exists, create one. - db.create(models.SSHPubKey, UserID=user.ID, - PubKey=" ".join([prefix, key]), - Fingerprint=fprints[i]) + db.create( + models.SSHPubKey, + UserID=user.ID, + PubKey=" ".join([prefix, key]), + Fingerprint=fprints[i], + ) -def account_type(T: int = None, - user: models.User = None, - **kwargs) -> None: +def account_type(T: int = None, user: models.User = None, **kwargs) -> None: if T is not None and (T := int(T)) != user.AccountTypeID: with db.begin(): user.AccountTypeID = T -def password(P: str = str(), - request: Request = None, - user: models.User = None, - context: dict[str, Any] = {}, - **kwargs) -> None: +def password( + P: str = str(), + request: Request = None, + user: models.User = None, + context: dict[str, Any] = {}, + **kwargs, +) -> None: if P and not user.valid_password(P): # Remove the fields we consumed for passwords. context["P"] = context["C"] = str() diff --git a/aurweb/users/validate.py b/aurweb/users/validate.py index de51e3ff..6c27a0b7 100644 --- a/aurweb/users/validate.py +++ b/aurweb/users/validate.py @@ -25,42 +25,44 @@ def invalid_fields(E: str = str(), U: str = str(), **kwargs) -> None: raise ValidationError(["Missing a required field."]) -def invalid_suspend_permission(request: Request = None, - user: models.User = None, - S: str = "False", - **kwargs) -> None: +def invalid_suspend_permission( + request: Request = None, user: models.User = None, S: str = "False", **kwargs +) -> None: if not request.user.is_elevated() and strtobool(S) != bool(user.Suspended): - raise ValidationError([ - "You do not have permission to suspend accounts."]) + raise ValidationError(["You do not have permission to suspend accounts."]) -def invalid_username(request: Request = None, U: str = str(), - _: l10n.Translator = None, - **kwargs) -> None: +def invalid_username( + request: Request = None, U: str = str(), _: l10n.Translator = None, **kwargs +) -> None: if not util.valid_username(U): username_min_len = config.getint("options", "username_min_len") username_max_len = config.getint("options", "username_max_len") - raise ValidationError([ - "The username is invalid.", + raise ValidationError( [ - _("It must be between %s and %s characters long") % ( - username_min_len, username_max_len), - "Start and end with a letter or number", - "Can contain only one period, underscore or hyphen.", + "The username is invalid.", + [ + _("It must be between %s and %s characters long") + % (username_min_len, username_max_len), + "Start and end with a letter or number", + "Can contain only one period, underscore or hyphen.", + ], ] - ]) + ) -def invalid_password(P: str = str(), C: str = str(), - _: l10n.Translator = None, **kwargs) -> None: +def invalid_password( + P: str = str(), C: str = str(), _: l10n.Translator = None, **kwargs +) -> None: if P: if not util.valid_password(P): - username_min_len = config.getint( - "options", "username_min_len") - raise ValidationError([ - _("Your password must be at least %s characters.") % ( - username_min_len) - ]) + username_min_len = config.getint("options", "username_min_len") + raise ValidationError( + [ + _("Your password must be at least %s characters.") + % (username_min_len) + ] + ) elif not C: raise ValidationError(["Please confirm your new password."]) elif P != C: @@ -71,15 +73,18 @@ def is_banned(request: Request = None, **kwargs) -> None: host = request.client.host exists = db.query(models.Ban, models.Ban.IPAddress == host).exists() if db.query(exists).scalar(): - raise ValidationError([ - "Account registration has been disabled for your " - "IP address, probably due to sustained spam attacks. " - "Sorry for the inconvenience." - ]) + raise ValidationError( + [ + "Account registration has been disabled for your " + "IP address, probably due to sustained spam attacks. " + "Sorry for the inconvenience." + ] + ) -def invalid_user_password(request: Request = None, passwd: str = str(), - **kwargs) -> None: +def invalid_user_password( + request: Request = None, passwd: str = str(), **kwargs +) -> None: if request.user.is_authenticated(): if not request.user.valid_password(passwd): raise ValidationError(["Invalid password."]) @@ -97,8 +102,9 @@ def invalid_backup_email(BE: str = str(), **kwargs) -> None: def invalid_homepage(HP: str = str(), **kwargs) -> None: if HP and not util.valid_homepage(HP): - raise ValidationError([ - "The home page is invalid, please specify the full HTTP(s) URL."]) + raise ValidationError( + ["The home page is invalid, please specify the full HTTP(s) URL."] + ) def invalid_pgp_key(K: str = str(), **kwargs) -> None: @@ -106,8 +112,9 @@ def invalid_pgp_key(K: str = str(), **kwargs) -> None: raise ValidationError(["The PGP key fingerprint is invalid."]) -def invalid_ssh_pubkey(PK: str = str(), user: models.User = None, - _: l10n.Translator = None, **kwargs) -> None: +def invalid_ssh_pubkey( + PK: str = str(), user: models.User = None, _: l10n.Translator = None, **kwargs +) -> None: if not PK: return @@ -119,15 +126,23 @@ def invalid_ssh_pubkey(PK: str = str(), user: models.User = None, for prefix, key in keys: fingerprint = get_fingerprint(f"{prefix} {key}") - exists = db.query(models.SSHPubKey).filter( - and_(models.SSHPubKey.UserID != user.ID, - models.SSHPubKey.Fingerprint == fingerprint) - ).exists() + exists = ( + db.query(models.SSHPubKey) + .filter( + and_( + models.SSHPubKey.UserID != user.ID, + models.SSHPubKey.Fingerprint == fingerprint, + ) + ) + .exists() + ) if db.query(exists).scalar(): - raise ValidationError([ - _("The SSH public key, %s%s%s, is already in use.") % ( - "", fingerprint, "") - ]) + raise ValidationError( + [ + _("The SSH public key, %s%s%s, is already in use.") + % ("", fingerprint, "") + ] + ) def invalid_language(L: str = str(), **kwargs) -> None: @@ -140,60 +155,78 @@ def invalid_timezone(TZ: str = str(), **kwargs) -> None: raise ValidationError(["Timezone is not currently supported."]) -def username_in_use(U: str = str(), user: models.User = None, - _: l10n.Translator = None, **kwargs) -> None: - exists = db.query(models.User).filter( - and_(models.User.ID != user.ID, - models.User.Username == U) - ).exists() +def username_in_use( + U: str = str(), user: models.User = None, _: l10n.Translator = None, **kwargs +) -> None: + exists = ( + db.query(models.User) + .filter(and_(models.User.ID != user.ID, models.User.Username == U)) + .exists() + ) if db.query(exists).scalar(): # If the username already exists... - raise ValidationError([ - _("The username, %s%s%s, is already in use.") % ( - "", U, "") - ]) + raise ValidationError( + [ + _("The username, %s%s%s, is already in use.") + % ("", U, "") + ] + ) -def email_in_use(E: str = str(), user: models.User = None, - _: l10n.Translator = None, **kwargs) -> None: - exists = db.query(models.User).filter( - and_(models.User.ID != user.ID, - models.User.Email == E) - ).exists() +def email_in_use( + E: str = str(), user: models.User = None, _: l10n.Translator = None, **kwargs +) -> None: + exists = ( + db.query(models.User) + .filter(and_(models.User.ID != user.ID, models.User.Email == E)) + .exists() + ) if db.query(exists).scalar(): # If the email already exists... - raise ValidationError([ - _("The address, %s%s%s, is already in use.") % ( - "", E, "") - ]) + raise ValidationError( + [ + _("The address, %s%s%s, is already in use.") + % ("", E, "") + ] + ) -def invalid_account_type(T: int = None, request: Request = None, - user: models.User = None, - _: l10n.Translator = None, - **kwargs) -> None: +def invalid_account_type( + T: int = None, + request: Request = None, + user: models.User = None, + _: l10n.Translator = None, + **kwargs, +) -> None: if T is not None and (T := int(T)) != user.AccountTypeID: name = ACCOUNT_TYPE_NAME.get(T, None) has_cred = request.user.has_credential(creds.ACCOUNT_CHANGE_TYPE) if name is None: raise ValidationError(["Invalid account type provided."]) elif not has_cred: - raise ValidationError([ - "You do not have permission to change account types."]) + raise ValidationError( + ["You do not have permission to change account types."] + ) elif T > request.user.AccountTypeID: # If the chosen account type is higher than the editor's account # type, the editor doesn't have permission to set the new type. - error = _("You do not have permission to change " - "this user's account type to %s.") % name + error = ( + _( + "You do not have permission to change " + "this user's account type to %s." + ) + % name + ) raise ValidationError([error]) - logger.debug(f"Trusted User '{request.user.Username}' has " - f"modified '{user.Username}' account's type to" - f" {name}.") + logger.debug( + f"Trusted User '{request.user.Username}' has " + f"modified '{user.Username}' account's type to" + f" {name}." + ) -def invalid_captcha(captcha_salt: str = None, captcha: str = None, - **kwargs) -> None: +def invalid_captcha(captcha_salt: str = None, captcha: str = None, **kwargs) -> None: if captcha_salt and captcha_salt not in get_captcha_salts(): raise ValidationError(["This CAPTCHA has expired. Please try again."]) diff --git a/aurweb/util.py b/aurweb/util.py index 8291b578..4f1bd64e 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -2,7 +2,6 @@ import math import re import secrets import string - from datetime import datetime from http import HTTPStatus from subprocess import PIPE, Popen @@ -11,12 +10,10 @@ from urllib.parse import urlparse import fastapi import pygit2 - from email_validator import EmailSyntaxError, validate_email from fastapi.responses import JSONResponse import aurweb.config - from aurweb import defaults, logging logger = logging.get_logger(__name__) @@ -24,15 +21,15 @@ logger = logging.get_logger(__name__) def make_random_string(length: int) -> str: alphanumerics = string.ascii_lowercase + string.digits - return ''.join([secrets.choice(alphanumerics) for i in range(length)]) + return "".join([secrets.choice(alphanumerics) for i in range(length)]) def make_nonce(length: int = 8): - """ Generate a single random nonce. Here, token_hex generates a hex + """Generate a single random nonce. Here, token_hex generates a hex string of 2 hex characters per byte, where the length give is nbytes. This means that to get our proper string length, we need to cut it in half and truncate off any remaining (in the case that - length was uneven). """ + length was uneven).""" return secrets.token_hex(math.ceil(length / 2))[:length] @@ -45,7 +42,7 @@ def valid_username(username): # Check that username contains: one or more alphanumeric # characters, an optional separator of '.', '-' or '_', followed # by alphanumeric characters. - return re.match(r'^[a-zA-Z0-9]+[.\-_]?[a-zA-Z0-9]+$', username) + return re.match(r"^[a-zA-Z0-9]+[.\-_]?[a-zA-Z0-9]+$", username) def valid_email(email): @@ -82,7 +79,7 @@ def valid_pgp_fingerprint(fp): def jsonify(obj): - """ Perform a conversion on obj if it's needed. """ + """Perform a conversion on obj if it's needed.""" if isinstance(obj, datetime): obj = int(obj.timestamp()) return obj @@ -151,8 +148,7 @@ def git_search(repo: pygit2.Repository, commit_hash: str) -> int: return prefixlen -async def error_or_result(next: Callable, *args, **kwargs) \ - -> fastapi.Response: +async def error_or_result(next: Callable, *args, **kwargs) -> fastapi.Response: """ Try to return a response from `next`. @@ -174,9 +170,9 @@ async def error_or_result(next: Callable, *args, **kwargs) \ def parse_ssh_key(string: str) -> Tuple[str, str]: - """ Parse an SSH public key. """ + """Parse an SSH public key.""" invalid_exc = ValueError("The SSH public key is invalid.") - parts = re.sub(r'\s\s+', ' ', string.strip()).split() + parts = re.sub(r"\s\s+", " ", string.strip()).split() if len(parts) < 2: raise invalid_exc @@ -185,8 +181,7 @@ def parse_ssh_key(string: str) -> Tuple[str, str]: if prefix not in prefixes: raise invalid_exc - proc = Popen(["ssh-keygen", "-l", "-f", "-"], stdin=PIPE, stdout=PIPE, - stderr=PIPE) + proc = Popen(["ssh-keygen", "-l", "-f", "-"], stdin=PIPE, stdout=PIPE, stderr=PIPE) out, _ = proc.communicate(f"{prefix} {key}".encode()) if proc.returncode: raise invalid_exc @@ -195,5 +190,5 @@ def parse_ssh_key(string: str) -> Tuple[str, str]: def parse_ssh_keys(string: str) -> list[Tuple[str, str]]: - """ Parse a list of SSH public keys. """ + """Parse a list of SSH public keys.""" return [parse_ssh_key(e) for e in string.splitlines()] diff --git a/doc/web-auth.md b/doc/web-auth.md index 17284889..dbb4403d 100644 --- a/doc/web-auth.md +++ b/doc/web-auth.md @@ -108,4 +108,3 @@ The following list of steps describes exactly how this verification works: - `options.disable_http_login: 1` - `options.login_timeout: ` - `options.persistent_cookie_timeout: ` - diff --git a/docker-compose.yml b/docker-compose.yml index 9edffeeb..a1c2bb42 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -43,7 +43,7 @@ services: healthcheck: test: "bash /docker/health/memcached.sh" interval: 3s - + redis: image: aurweb:latest init: true diff --git a/docker/config/nginx.conf b/docker/config/nginx.conf index 9fdf6015..99804d1d 100644 --- a/docker/config/nginx.conf +++ b/docker/config/nginx.conf @@ -147,4 +147,3 @@ http { '' close; } } - diff --git a/migrations/env.py b/migrations/env.py index 774ecdeb..dcc0329d 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -2,7 +2,6 @@ import logging import logging.config import sqlalchemy - from alembic import context import aurweb.db @@ -69,9 +68,7 @@ def run_migrations_online(): ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/po/ar.po b/po/ar.po index 676a5025..ea0e03cf 100644 --- a/po/ar.po +++ b/po/ar.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # safa1996alfulaij , 2015 # ØµÙØ§ الÙليج , 2015-2016 diff --git a/po/ast.po b/po/ast.po index 16c363a6..2075edc1 100644 --- a/po/ast.po +++ b/po/ast.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # enolp , 2014-2015,2017 # Ḷḷumex03 , 2014 diff --git a/po/az.po b/po/az.po index 7e534b4c..1c7ca207 100644 --- a/po/az.po +++ b/po/az.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/az_AZ.po b/po/az_AZ.po index e903027b..2f5ceabd 100644 --- a/po/az_AZ.po +++ b/po/az_AZ.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/bg.po b/po/bg.po index 7864f5dc..c7c70021 100644 --- a/po/bg.po +++ b/po/bg.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/ca.po b/po/ca.po index 391dd146..d43c84dc 100644 --- a/po/ca.po +++ b/po/ca.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Adolfo Jayme-Barrientos, 2014 # Hector Mtz-Seara , 2011,2013 diff --git a/po/ca_ES.po b/po/ca_ES.po index bad69bd1..aac7b03f 100644 --- a/po/ca_ES.po +++ b/po/ca_ES.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/cs.po b/po/cs.po index b9bd739a..59a24007 100644 --- a/po/cs.po +++ b/po/cs.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Daniel Milde , 2017 # Daniel Peukert , 2021 diff --git a/po/da.po b/po/da.po index a6f290ea..822b5506 100644 --- a/po/da.po +++ b/po/da.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Linuxbruger , 2018 # Louis Tim Larsen , 2015 diff --git a/po/de.po b/po/de.po index ec0a0fbe..a0f8fb0f 100644 --- a/po/de.po +++ b/po/de.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # 9d91e189c22376bb4ee81489bc27fc28, 2013 # 9d91e189c22376bb4ee81489bc27fc28, 2013-2014 diff --git a/po/el.po b/po/el.po index f1fe704e..37db785c 100644 --- a/po/el.po +++ b/po/el.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Achilleas Pipinellis, 2014 # Achilleas Pipinellis, 2013 diff --git a/po/es.po b/po/es.po index ea7ac099..9cbe98a6 100644 --- a/po/es.po +++ b/po/es.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Adolfo Jayme-Barrientos, 2015 # Angel Velasquez , 2011 diff --git a/po/es_419.po b/po/es_419.po index 444eccb7..e2b96ae6 100644 --- a/po/es_419.po +++ b/po/es_419.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Angel Velasquez , 2011 # juantascon , 2011 diff --git a/po/et.po b/po/et.po index 9b6493b5..44f2b3a0 100644 --- a/po/et.po +++ b/po/et.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/fi.po b/po/fi.po index 39cfe626..636681b7 100644 --- a/po/fi.po +++ b/po/fi.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Elias Autio, 2016 # Jesse Jaara , 2011-2012,2015 diff --git a/po/fi_FI.po b/po/fi_FI.po index f3253433..17a58b4a 100644 --- a/po/fi_FI.po +++ b/po/fi_FI.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/fr.po b/po/fr.po index 99d01460..03192d48 100644 --- a/po/fr.po +++ b/po/fr.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Alexandre Macabies , 2018 # Antoine Lubineau , 2012 diff --git a/po/he.po b/po/he.po index cd4a0f87..936e93a1 100644 --- a/po/he.po +++ b/po/he.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # GenghisKhan , 2016 # Lukas Fleischer , 2011 diff --git a/po/hi_IN.po b/po/hi_IN.po index 37fd082e..114c9461 100644 --- a/po/hi_IN.po +++ b/po/hi_IN.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Panwar108 , 2018,2020-2021 msgid "" diff --git a/po/hr.po b/po/hr.po index 4932bd7e..fe1857c1 100644 --- a/po/hr.po +++ b/po/hr.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 msgid "" diff --git a/po/hu.po b/po/hu.po index 51894457..e6ebd451 100644 --- a/po/hu.po +++ b/po/hu.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Balló György , 2013 # Balló György , 2011,2013-2016 diff --git a/po/id.po b/po/id.po index 75a6c98b..103c47e6 100644 --- a/po/id.po +++ b/po/id.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # se7entime , 2013 # se7entime , 2016 diff --git a/po/id_ID.po b/po/id_ID.po index d01294c8..c3acb167 100644 --- a/po/id_ID.po +++ b/po/id_ID.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/is.po b/po/is.po index a7a88b04..aee80ce5 100644 --- a/po/is.po +++ b/po/is.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/it.po b/po/it.po index 436b6459..f583cb2f 100644 --- a/po/it.po +++ b/po/it.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Fanfurlio Farolfi , 2021-2022 # Giovanni Scafora , 2011-2015 diff --git a/po/ja.po b/po/ja.po index 55d056bf..280edb46 100644 --- a/po/ja.po +++ b/po/ja.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # kusakata, 2013 # kusakata, 2013 diff --git a/po/ko.po b/po/ko.po index 808ffe27..6da57759 100644 --- a/po/ko.po +++ b/po/ko.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/lt.po b/po/lt.po index d126f193..c9f55632 100644 --- a/po/lt.po +++ b/po/lt.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/nb.po b/po/nb.po index 1cc090f1..307a80d6 100644 --- a/po/nb.po +++ b/po/nb.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Alexander F. Rødseth , 2015,2017-2019 # Alexander F. Rødseth , 2011,2013-2014 diff --git a/po/nb_NO.po b/po/nb_NO.po index 74af6936..5d958172 100644 --- a/po/nb_NO.po +++ b/po/nb_NO.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Kim Nordmo , 2017,2019 # Lukas Fleischer , 2011 diff --git a/po/nl.po b/po/nl.po index 282b5b40..54519d21 100644 --- a/po/nl.po +++ b/po/nl.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Heimen Stoffels , 2021-2022 # Heimen Stoffels , 2015,2021 diff --git a/po/pl.po b/po/pl.po index 4856f22b..94a6fb67 100644 --- a/po/pl.po +++ b/po/pl.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # BartÅ‚omiej Piotrowski , 2011 # BartÅ‚omiej Piotrowski , 2014 diff --git a/po/pt.po b/po/pt.po index b2cf86b2..aed32031 100644 --- a/po/pt.po +++ b/po/pt.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 msgid "" diff --git a/po/pt_BR.po b/po/pt_BR.po index c9c15d72..d29a9448 100644 --- a/po/pt_BR.po +++ b/po/pt_BR.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Albino Biasutti Neto Bino , 2011 # Fábio Nogueira , 2016 diff --git a/po/pt_PT.po b/po/pt_PT.po index 3518cb7b..7f6ea67a 100644 --- a/po/pt_PT.po +++ b/po/pt_PT.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Christophe Silva , 2018 # Gaspar Santos , 2011 diff --git a/po/ro.po b/po/ro.po index fa159928..4409b698 100644 --- a/po/ro.po +++ b/po/ro.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Arthur ÈšiÈ›eică , 2013-2015 # Lukas Fleischer , 2011 diff --git a/po/ru.po b/po/ru.po index 75550c8c..44f000dd 100644 --- a/po/ru.po +++ b/po/ru.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Alex , 2021 # Evgeniy Alekseev , 2014-2015 diff --git a/po/sk.po b/po/sk.po index 76d3d1a8..853fc198 100644 --- a/po/sk.po +++ b/po/sk.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # archetyp , 2013-2016 # Jose Riha , 2018 diff --git a/po/sr.po b/po/sr.po index dae37bcd..426ce599 100644 --- a/po/sr.po +++ b/po/sr.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 # Slobodan Terzić , 2011-2012,2015-2017 diff --git a/po/sr_RS.po b/po/sr_RS.po index 985ee007..b7560965 100644 --- a/po/sr_RS.po +++ b/po/sr_RS.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Nikola Stojković , 2013 msgid "" diff --git a/po/sv_SE.po b/po/sv_SE.po index 6abb8452..4887fdde 100644 --- a/po/sv_SE.po +++ b/po/sv_SE.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Johannes Löthberg , 2015-2016 # Kevin Morris , 2022 diff --git a/po/tr.po b/po/tr.po index 83b1e4df..559a0008 100644 --- a/po/tr.po +++ b/po/tr.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # tarakbumba , 2011,2013-2015 # tarakbumba , 2012,2014 diff --git a/po/uk.po b/po/uk.po index a4410185..3bffe4f6 100644 --- a/po/uk.po +++ b/po/uk.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 # Rax Garfield , 2012 diff --git a/po/vi.po b/po/vi.po index 3ea5bad3..87f7faac 100644 --- a/po/vi.po +++ b/po/vi.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/zh.po b/po/zh.po index 04fe06f3..c932df9c 100644 --- a/po/zh.po +++ b/po/zh.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" diff --git a/po/zh_CN.po b/po/zh_CN.po index 53d42bc8..675d15a3 100644 --- a/po/zh_CN.po +++ b/po/zh_CN.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Feng Chao , 2015-2016 # dongfengweixiao , 2015 diff --git a/po/zh_TW.po b/po/zh_TW.po index e7399a19..1526b4a9 100644 --- a/po/zh_TW.po +++ b/po/zh_TW.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # pan93412 , 2018 # 黃æŸè«º , 2014-2017 diff --git a/schema/gendummydata.py b/schema/gendummydata.py index aedfda7e..fa59855f 100755 --- a/schema/gendummydata.py +++ b/schema/gendummydata.py @@ -15,27 +15,26 @@ import os import random import sys import time - from datetime import datetime import bcrypt LOG_LEVEL = logging.DEBUG # logging level. set to logging.INFO to reduce output SEED_FILE = "/usr/share/dict/words" -USER_ID = 5 # Users.ID of first bogus user -PKG_ID = 1 # Packages.ID of first package +USER_ID = 5 # Users.ID of first bogus user +PKG_ID = 1 # Packages.ID of first package # how many users to 'register' MAX_USERS = int(os.environ.get("MAX_USERS", 38000)) -MAX_DEVS = .1 # what percentage of MAX_USERS are Developers -MAX_TUS = .2 # what percentage of MAX_USERS are Trusted Users +MAX_DEVS = 0.1 # what percentage of MAX_USERS are Developers +MAX_TUS = 0.2 # what percentage of MAX_USERS are Trusted Users # how many packages to load MAX_PKGS = int(os.environ.get("MAX_PKGS", 32000)) -PKG_DEPS = (1, 15) # min/max depends a package has -PKG_RELS = (1, 5) # min/max relations a package has -PKG_SRC = (1, 3) # min/max sources a package has -PKG_CMNTS = (1, 5) # min/max number of comments a package has +PKG_DEPS = (1, 15) # min/max depends a package has +PKG_RELS = (1, 5) # min/max relations a package has +PKG_SRC = (1, 3) # min/max sources a package has +PKG_CMNTS = (1, 5) # min/max number of comments a package has CATEGORIES_COUNT = 17 # the number of categories from aur-schema -VOTING = (0, .001) # percentage range for package voting +VOTING = (0, 0.001) # percentage range for package voting # number of open trusted user proposals OPEN_PROPOSALS = int(os.environ.get("OPEN_PROPOSALS", 15)) # number of closed trusted user proposals @@ -113,10 +112,10 @@ if not len(contents) - MAX_USERS > MAX_PKGS: def normalize(unicode_data): - """ We only accept ascii for usernames. Also use this to normalize + """We only accept ascii for usernames. Also use this to normalize package names; our database utf8mb4 collations compare with Unicode - Equivalence. """ - return unicode_data.encode('ascii', 'ignore').decode('ascii') + Equivalence.""" + return unicode_data.encode("ascii", "ignore").decode("ascii") # select random usernames @@ -196,10 +195,12 @@ for u in user_keys: # "{salt}{username}" to_hash = f"{salt}{u}" - h = hashlib.new('md5') + h = hashlib.new("md5") h.update(to_hash.encode()) - s = ("INSERT INTO Users (ID, AccountTypeID, Username, Email, Passwd, Salt)" - " VALUES (%d, %d, '%s', '%s@example.com', '%s', '%s');\n") + s = ( + "INSERT INTO Users (ID, AccountTypeID, Username, Email, Passwd, Salt)" + " VALUES (%d, %d, '%s', '%s@example.com', '%s', '%s');\n" + ) s = s % (seen_users[u], account_type, u, u, h.hexdigest(), salt) out.write(s) @@ -230,13 +231,17 @@ for p in list(seen_pkgs.keys()): uuid = genUID() # the submitter/user - s = ("INSERT INTO PackageBases (ID, Name, FlaggerComment, SubmittedTS, ModifiedTS, " - "SubmitterUID, MaintainerUID, PackagerUID) VALUES (%d, '%s', '', %d, %d, %d, %s, %s);\n") + s = ( + "INSERT INTO PackageBases (ID, Name, FlaggerComment, SubmittedTS, ModifiedTS, " + "SubmitterUID, MaintainerUID, PackagerUID) VALUES (%d, '%s', '', %d, %d, %d, %s, %s);\n" + ) s = s % (seen_pkgs[p], p, NOW, NOW, uuid, muid, puid) out.write(s) - s = ("INSERT INTO Packages (ID, PackageBaseID, Name, Version) VALUES " - "(%d, %d, '%s', '%s');\n") + s = ( + "INSERT INTO Packages (ID, PackageBaseID, Name, Version) VALUES " + "(%d, %d, '%s', '%s');\n" + ) s = s % (seen_pkgs[p], seen_pkgs[p], p, genVersion()) out.write(s) @@ -247,8 +252,10 @@ for p in list(seen_pkgs.keys()): num_comments = random.randrange(PKG_CMNTS[0], PKG_CMNTS[1]) for i in range(0, num_comments): now = NOW + random.randrange(400, 86400 * 3) - s = ("INSERT INTO PackageComments (PackageBaseID, UsersID," - " Comments, RenderedComment, CommentTS) VALUES (%d, %d, '%s', '', %d);\n") + s = ( + "INSERT INTO PackageComments (PackageBaseID, UsersID," + " Comments, RenderedComment, CommentTS) VALUES (%d, %d, '%s', '', %d);\n" + ) s = s % (seen_pkgs[p], genUID(), genFortune(), now) out.write(s) @@ -258,14 +265,17 @@ utcnow = int(datetime.utcnow().timestamp()) track_votes = {} log.debug("Casting votes for packages.") for u in user_keys: - num_votes = random.randrange(int(len(seen_pkgs) * VOTING[0]), - int(len(seen_pkgs) * VOTING[1])) + num_votes = random.randrange( + int(len(seen_pkgs) * VOTING[0]), int(len(seen_pkgs) * VOTING[1]) + ) pkgvote = {} for v in range(num_votes): pkg = random.randrange(1, len(seen_pkgs) + 1) if pkg not in pkgvote: - s = ("INSERT INTO PackageVotes (UsersID, PackageBaseID, VoteTS)" - " VALUES (%d, %d, %d);\n") + s = ( + "INSERT INTO PackageVotes (UsersID, PackageBaseID, VoteTS)" + " VALUES (%d, %d, %d);\n" + ) s = s % (seen_users[u], pkg, utcnow) pkgvote[pkg] = 1 if pkg not in track_votes: @@ -310,9 +320,12 @@ for p in seen_pkgs_keys: src_file = user_keys[random.randrange(0, len(user_keys))] src = "%s%s.%s/%s/%s-%s.tar.gz" % ( RANDOM_URL[random.randrange(0, len(RANDOM_URL))], - p, RANDOM_TLDS[random.randrange(0, len(RANDOM_TLDS))], + p, + RANDOM_TLDS[random.randrange(0, len(RANDOM_TLDS))], RANDOM_LOCS[random.randrange(0, len(RANDOM_LOCS))], - src_file, genVersion()) + src_file, + genVersion(), + ) s = "INSERT INTO PackageSources(PackageID, Source) VALUES (%d, '%s');\n" s = s % (seen_pkgs[p], src) out.write(s) @@ -334,8 +347,10 @@ for t in range(0, OPEN_PROPOSALS + CLOSE_PROPOSALS): else: user = user_keys[random.randrange(0, len(user_keys))] suid = trustedusers[random.randrange(0, len(trustedusers))] - s = ("INSERT INTO TU_VoteInfo (Agenda, User, Submitted, End," - " Quorum, SubmitterID) VALUES ('%s', '%s', %d, %d, 0.0, %d);\n") + s = ( + "INSERT INTO TU_VoteInfo (Agenda, User, Submitted, End," + " Quorum, SubmitterID) VALUES ('%s', '%s', %d, %d, 0.0, %d);\n" + ) s = s % (genFortune(), user, start, end, suid) out.write(s) count += 1 diff --git a/templates/addvote.html b/templates/addvote.html index 4d2b0292..8777cbf3 100644 --- a/templates/addvote.html +++ b/templates/addvote.html @@ -65,4 +65,3 @@ {% endblock %} - diff --git a/templates/home.html b/templates/home.html index c1f172f4..6a5fca69 100644 --- a/templates/home.html +++ b/templates/home.html @@ -5,7 +5,7 @@ | tr | format('', "", '', "") - | safe + | safe }} {{ "Contributed PKGBUILDs %smust%s conform to the %sArch Packaging Standards%s otherwise they will be deleted!" | tr @@ -61,7 +61,7 @@ {% trans %}The following SSH fingerprints are used for the AUR:{% endtrans %}

      - {% for keytype in ssh_fingerprints %} + {% for keytype in ssh_fingerprints %}
    • {{ keytype }}: {{ ssh_fingerprints[keytype] }} {% endfor %}
    @@ -85,7 +85,7 @@ | tr | format('', "", "", "") - | safe + | safe }}

    diff --git a/templates/packages/index.html b/templates/packages/index.html index 6034d2f6..58ce8648 100644 --- a/templates/packages/index.html +++ b/templates/packages/index.html @@ -12,7 +12,7 @@ {% elif not packages_count %} - {% include "partials/packages/search.html" %} + {% include "partials/packages/search.html" %}

    {{ "No packages matched your search criteria." | tr }}

    diff --git a/templates/partials/account/results.html b/templates/partials/account/results.html index 1c398ce1..ef8d927a 100644 --- a/templates/partials/account/results.html +++ b/templates/partials/account/results.html @@ -79,4 +79,3 @@
  • - diff --git a/templates/tu/show.html b/templates/tu/show.html index c36a3e8f..f4214018 100644 --- a/templates/tu/show.html +++ b/templates/tu/show.html @@ -4,7 +4,7 @@
    {% include "partials/tu/proposal/details.html" %}
    - + {% if utcnow >= voteinfo.End %}
    {% include "partials/tu/proposal/voters.html" %} diff --git a/test/conftest.py b/test/conftest.py index 283c979a..aac221f7 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -39,12 +39,10 @@ ahead of each function takes too long when compared to this method. """ import os import pathlib - from multiprocessing import Lock import py import pytest - from posix_ipc import O_CREAT, Semaphore from sqlalchemy import create_engine from sqlalchemy.engine import URL @@ -54,7 +52,6 @@ from sqlalchemy.orm import scoped_session import aurweb.config import aurweb.db - from aurweb import initdb, logging, testing from aurweb.testing.email import Email from aurweb.testing.filelock import FileLock @@ -78,13 +75,10 @@ def test_engine() -> Engine: unix_socket = aurweb.config.get_with_fallback("database", "socket", None) kwargs = { "username": aurweb.config.get("database", "user"), - "password": aurweb.config.get_with_fallback( - "database", "password", None), + "password": aurweb.config.get_with_fallback("database", "password", None), "host": aurweb.config.get("database", "host"), "port": aurweb.config.get_with_fallback("database", "port", None), - "query": { - "unix_socket": unix_socket - } + "query": {"unix_socket": unix_socket}, } backend = aurweb.config.get("database", "backend") @@ -99,6 +93,7 @@ class AlembicArgs: This structure is needed to pass conftest-specific arguments to initdb.run duration database creation. """ + verbose = False use_alembic = True @@ -156,7 +151,7 @@ def setup_email(): @pytest.fixture(scope="module") def setup_database(tmp_path_factory: pathlib.Path, worker_id: str) -> None: - """ Create and drop a database for the suite this fixture is used in. """ + """Create and drop a database for the suite this fixture is used in.""" engine = test_engine() dbname = aurweb.db.name() diff --git a/test/test_accepted_term.py b/test/test_accepted_term.py index 2af7127b..9af19105 100644 --- a/test/test_accepted_term.py +++ b/test/test_accepted_term.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -17,17 +16,21 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def term() -> Term: with db.begin(): - term = db.create(Term, Description="Test term", - URL="https://test.term") + term = db.create(Term, Description="Test term", URL="https://test.term") yield term diff --git a/test/test_account_type.py b/test/test_account_type.py index 1d71f878..4b56b7ff 100644 --- a/test/test_account_type.py +++ b/test/test_account_type.py @@ -22,26 +22,30 @@ def account_type() -> AccountType: def test_account_type(account_type): - """ Test creating an AccountType, and reading its columns. """ + """Test creating an AccountType, and reading its columns.""" # Make sure it got db.created and was given an ID. assert bool(account_type.ID) # Next, test our string functions. assert str(account_type) == "TestUser" - assert repr(account_type) == \ - "" % ( - account_type.ID) + assert repr(account_type) == "" % ( + account_type.ID + ) - record = db.query(AccountType, - AccountType.AccountType == "TestUser").first() + record = db.query(AccountType, AccountType.AccountType == "TestUser").first() assert account_type == record def test_user_account_type_relationship(account_type): with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountType=account_type) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountType=account_type, + ) assert user.AccountType == account_type diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index 37b3d130..eab8fa4f 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -1,6 +1,5 @@ import re import tempfile - from datetime import datetime from http import HTTPStatus from logging import DEBUG @@ -8,17 +7,21 @@ from subprocess import Popen import lxml.html import pytest - from fastapi.testclient import TestClient import aurweb.models.account_type as at - from aurweb import captcha, db, logging, time from aurweb.asgi import app from aurweb.db import create, query from aurweb.models.accepted_term import AcceptedTerm -from aurweb.models.account_type import (DEVELOPER_ID, TRUSTED_USER, TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID, USER_ID, - AccountType) +from aurweb.models.account_type import ( + DEVELOPER_ID, + TRUSTED_USER, + TRUSTED_USER_AND_DEV_ID, + TRUSTED_USER_ID, + USER_ID, + AccountType, +) from aurweb.models.ban import Ban from aurweb.models.session import Session from aurweb.models.ssh_pub_key import SSHPubKey, get_fingerprint @@ -39,8 +42,11 @@ def make_ssh_pubkey(): # dependency to passing this test). with tempfile.TemporaryDirectory() as tmpdir: with open("/dev/null", "w") as null: - proc = Popen(["ssh-keygen", "-f", f"{tmpdir}/test.ssh", "-N", ""], - stdout=null, stderr=null) + proc = Popen( + ["ssh-keygen", "-f", f"{tmpdir}/test.ssh", "-N", ""], + stdout=null, + stderr=null, + ) proc.wait() assert proc.returncode == 0 @@ -60,9 +66,13 @@ def client() -> TestClient: def create_user(username: str) -> User: email = f"{username}@example.org" - user = create(User, Username=username, Email=email, - Passwd="testPassword", - AccountTypeID=USER_ID) + user = create( + User, + Username=username, + Email=email, + Passwd="testPassword", + AccountTypeID=USER_ID, + ) return user @@ -85,8 +95,9 @@ def test_get_passreset_authed_redirects(client: TestClient, user: User): assert sid is not None with client as request: - response = request.get("/passreset", cookies={"AURSID": sid}, - allow_redirects=False) + response = request.get( + "/passreset", cookies={"AURSID": sid}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -129,10 +140,12 @@ def test_post_passreset_authed_redirects(client: TestClient, user: User): assert sid is not None with client as request: - response = request.post("/passreset", - cookies={"AURSID": sid}, - data={"user": "blah"}, - allow_redirects=False) + response = request.post( + "/passreset", + cookies={"AURSID": sid}, + data={"user": "blah"}, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -166,8 +179,9 @@ def test_post_passreset_user_suspended(client: TestClient, user: User): def test_post_passreset_resetkey(client: TestClient, user: User): with db.begin(): - user.session = Session(UsersID=user.ID, SessionID="blah", - LastUpdateTS=time.utcnow()) + user.session = Session( + UsersID=user.ID, SessionID="blah", LastUpdateTS=time.utcnow() + ) # Prepare a password reset. with client as request: @@ -182,7 +196,7 @@ def test_post_passreset_resetkey(client: TestClient, user: User): "user": TEST_USERNAME, "resetkey": resetkey, "password": "abcd1234", - "confirm": "abcd1234" + "confirm": "abcd1234", } with client as request: @@ -200,10 +214,7 @@ def make_resetkey(client: TestClient, user: User): def make_passreset_data(user: User, resetkey: str): - return { - "user": user.Username, - "resetkey": resetkey - } + return {"user": user.Username, "resetkey": resetkey} def test_post_passreset_error_invalid_email(client: TestClient, user: User): @@ -240,8 +251,7 @@ def test_post_passreset_error_missing_field(client: TestClient, user: User): assert error in response.content.decode("utf-8") -def test_post_passreset_error_password_mismatch(client: TestClient, - user: User): +def test_post_passreset_error_password_mismatch(client: TestClient, user: User): resetkey = make_resetkey(client, user) post_data = make_passreset_data(user, resetkey) @@ -257,8 +267,7 @@ def test_post_passreset_error_password_mismatch(client: TestClient, assert error in response.content.decode("utf-8") -def test_post_passreset_error_password_requirements(client: TestClient, - user: User): +def test_post_passreset_error_password_requirements(client: TestClient, user: User): resetkey = make_resetkey(client, user) post_data = make_passreset_data(user, resetkey) @@ -284,7 +293,7 @@ def test_get_register(client: TestClient): def post_register(request, **kwargs): - """ A simple helper that allows overrides to test defaults. """ + """A simple helper that allows overrides to test defaults.""" salt = captcha.get_captcha_salts()[0] token = captcha.get_captcha_token(salt) answer = captcha.get_captcha_answer(token) @@ -297,7 +306,7 @@ def post_register(request, **kwargs): "L": "en", "TZ": "UTC", "captcha": answer, - "captcha_salt": salt + "captcha_salt": salt, } # For any kwargs given, override their k:v pairs in data. @@ -380,9 +389,11 @@ def test_post_register_error_ip_banned(client: TestClient): assert response.status_code == int(HTTPStatus.BAD_REQUEST) content = response.content.decode() - assert ("Account registration has been disabled for your IP address, " + - "probably due to sustained spam attacks. Sorry for the " + - "inconvenience.") in content + assert ( + "Account registration has been disabled for your IP address, " + + "probably due to sustained spam attacks. Sorry for the " + + "inconvenience." + ) in content def test_post_register_error_missing_username(client: TestClient): @@ -489,7 +500,7 @@ def test_post_register_error_invalid_pgp_fingerprints(client: TestClient): expected = "The PGP key fingerprint is invalid." assert expected in content - pk = 'z' + ('a' * 39) + pk = "z" + ("a" * 39) with client as request: response = post_register(request, K=pk) @@ -569,8 +580,11 @@ def test_post_register_error_ssh_pubkey_taken(client: TestClient, user: User): # dependency to passing this test). with tempfile.TemporaryDirectory() as tmpdir: with open("/dev/null", "w") as null: - proc = Popen(["ssh-keygen", "-f", f"{tmpdir}/test.ssh", "-N", ""], - stdout=null, stderr=null) + proc = Popen( + ["ssh-keygen", "-f", f"{tmpdir}/test.ssh", "-N", ""], + stdout=null, + stderr=null, + ) proc.wait() assert proc.returncode == 0 @@ -602,8 +616,11 @@ def test_post_register_with_ssh_pubkey(client: TestClient): # dependency to passing this test). with tempfile.TemporaryDirectory() as tmpdir: with open("/dev/null", "w") as null: - proc = Popen(["ssh-keygen", "-f", f"{tmpdir}/test.ssh", "-N", ""], - stdout=null, stderr=null) + proc = Popen( + ["ssh-keygen", "-f", f"{tmpdir}/test.ssh", "-N", ""], + stdout=null, + stderr=null, + ) proc.wait() assert proc.returncode == 0 @@ -617,7 +634,7 @@ def test_post_register_with_ssh_pubkey(client: TestClient): def test_get_account_edit_tu_as_tu(client: TestClient, tu_user: User): - """ Test edit get route of another TU as a TU. """ + """Test edit get route of another TU as a TU.""" with db.begin(): user2 = create_user("test2") user2.AccountTypeID = at.TRUSTED_USER_ID @@ -643,7 +660,7 @@ def test_get_account_edit_tu_as_tu(client: TestClient, tu_user: User): def test_get_account_edit_as_tu(client: TestClient, tu_user: User): - """ Test edit get route of another user as a TU. """ + """Test edit get route of another user as a TU.""" with db.begin(): user2 = create_user("test2") @@ -669,7 +686,7 @@ def test_get_account_edit_as_tu(client: TestClient, tu_user: User): def test_get_account_edit_type(client: TestClient, user: User): - """ Test that users do not have an Account Type field. """ + """Test that users do not have an Account Type field.""" cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/account/{user.Username}/edit" @@ -700,14 +717,18 @@ def test_get_account_edit_unauthorized(client: TestClient, user: User): sid = user.login(request, "testPassword") with db.begin(): - user2 = create(User, Username="test2", Email="test2@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user2 = create( + User, + Username="test2", + Email="test2@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) endpoint = f"/account/{user2.Username}/edit" with client as request: # Try to edit `test2` while authenticated as `test`. - response = request.get(endpoint, cookies={"AURSID": sid}, - allow_redirects=False) + response = request.get(endpoint, cookies={"AURSID": sid}, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/account/{user2.Username}" @@ -718,16 +739,15 @@ def test_post_account_edit(client: TestClient, user: User): request = Request() sid = user.login(request, "testPassword") - post_data = { - "U": "test", - "E": "test666@example.org", - "passwd": "testPassword" - } + post_data = {"U": "test", "E": "test666@example.org", "passwd": "testPassword"} with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -772,8 +792,7 @@ def test_post_account_edit_type_as_dev(client: TestClient, tu_user: User): assert user2.AccountTypeID == at.DEVELOPER_ID -def test_post_account_edit_invalid_type_as_tu(client: TestClient, - tu_user: User): +def test_post_account_edit_invalid_type_as_tu(client: TestClient, tu_user: User): with db.begin(): user2 = create_user("test_tu") tu_user.AccountTypeID = at.TRUSTED_USER_ID @@ -792,8 +811,10 @@ def test_post_account_edit_invalid_type_as_tu(client: TestClient, assert user2.AccountTypeID == at.USER_ID errors = get_errors(resp.text) - expected = ("You do not have permission to change this user's " - f"account type to {at.DEVELOPER}.") + expected = ( + "You do not have permission to change this user's " + f"account type to {at.DEVELOPER}." + ) assert errors[0].text.strip() == expected @@ -807,16 +828,13 @@ def test_post_account_edit_dev(client: TestClient, tu_user: User): request = Request() sid = tu_user.login(request, "testPassword") - post_data = { - "U": "test", - "E": "test666@example.org", - "passwd": "testPassword" - } + post_data = {"U": "test", "E": "test666@example.org", "passwd": "testPassword"} endpoint = f"/account/{tu_user.Username}/edit" with client as request: - response = request.post(endpoint, cookies={"AURSID": sid}, - data=post_data, allow_redirects=False) + response = request.post( + endpoint, cookies={"AURSID": sid}, data=post_data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) expected = "The account, test, " @@ -832,13 +850,16 @@ def test_post_account_edit_language(client: TestClient, user: User): "U": "test", "E": "test@example.org", "L": "de", # German - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -859,33 +880,33 @@ def test_post_account_edit_timezone(client: TestClient, user: User): "U": "test", "E": "test@example.org", "TZ": "CET", - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) -def test_post_account_edit_error_missing_password(client: TestClient, - user: User): +def test_post_account_edit_error_missing_password(client: TestClient, user: User): request = Request() sid = user.login(request, "testPassword") - post_data = { - "U": "test", - "E": "test@example.org", - "TZ": "CET", - "passwd": "" - } + post_data = {"U": "test", "E": "test@example.org", "TZ": "CET", "passwd": ""} with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -893,22 +914,19 @@ def test_post_account_edit_error_missing_password(client: TestClient, assert "Invalid password." in content -def test_post_account_edit_error_invalid_password(client: TestClient, - user: User): +def test_post_account_edit_error_invalid_password(client: TestClient, user: User): request = Request() sid = user.login(request, "testPassword") - post_data = { - "U": "test", - "E": "test@example.org", - "TZ": "CET", - "passwd": "invalid" - } + post_data = {"U": "test", "E": "test@example.org", "TZ": "CET", "passwd": "invalid"} with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -916,18 +934,18 @@ def test_post_account_edit_error_invalid_password(client: TestClient, assert "Invalid password." in content -def test_post_account_edit_suspend_unauthorized(client: TestClient, - user: User): +def test_post_account_edit_suspend_unauthorized(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} post_data = { "U": "test", "E": "test@example.org", "S": True, - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - resp = request.post(f"/account/{user.Username}/edit", data=post_data, - cookies=cookies) + resp = request.post( + f"/account/{user.Username}/edit", data=post_data, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -945,11 +963,12 @@ def test_post_account_edit_inactivity(client: TestClient, user: User): "U": "test", "E": "test@example.org", "J": True, - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - resp = request.post(f"/account/{user.Username}/edit", data=post_data, - cookies=cookies) + resp = request.post( + f"/account/{user.Username}/edit", data=post_data, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.OK) # Make sure the user record got updated correctly. @@ -957,8 +976,9 @@ def test_post_account_edit_inactivity(client: TestClient, user: User): post_data.update({"J": False}) with client as request: - resp = request.post(f"/account/{user.Username}/edit", data=post_data, - cookies=cookies) + resp = request.post( + f"/account/{user.Username}/edit", data=post_data, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.OK) assert user.InactivityTS == 0 @@ -974,7 +994,7 @@ def test_post_account_edit_suspended(client: TestClient, user: User): "U": "test", "E": "test@example.org", "S": True, - "passwd": "testPassword" + "passwd": "testPassword", } endpoint = f"/account/{user.Username}/edit" with client as request: @@ -997,21 +1017,27 @@ def test_post_account_edit_error_unauthorized(client: TestClient, user: User): sid = user.login(request, "testPassword") with db.begin(): - user2 = create(User, Username="test2", Email="test2@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user2 = create( + User, + Username="test2", + Email="test2@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) post_data = { "U": "test", "E": "test@example.org", "TZ": "CET", - "passwd": "testPassword" + "passwd": "testPassword", } endpoint = f"/account/{user2.Username}/edit" with client as request: # Attempt to edit 'test2' while logged in as 'test'. - response = request.post(endpoint, cookies={"AURSID": sid}, - data=post_data, allow_redirects=False) + response = request.post( + endpoint, cookies={"AURSID": sid}, data=post_data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/account/{user2.Username}" @@ -1026,13 +1052,16 @@ def test_post_account_edit_ssh_pub_key(client: TestClient, user: User): "U": "test", "E": "test@example.org", "PK": make_ssh_pubkey(), - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -1040,9 +1069,12 @@ def test_post_account_edit_ssh_pub_key(client: TestClient, user: User): post_data["PK"] = make_ssh_pubkey() with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -1055,13 +1087,16 @@ def test_post_account_edit_missing_ssh_pubkey(client: TestClient, user: User): "U": user.Username, "E": user.Email, "PK": make_ssh_pubkey(), - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -1069,13 +1104,16 @@ def test_post_account_edit_missing_ssh_pubkey(client: TestClient, user: User): "U": user.Username, "E": user.Email, "PK": str(), # Pass an empty string now to walk the delete path. - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -1087,12 +1125,13 @@ def test_post_account_edit_invalid_ssh_pubkey(client: TestClient, user: User): "U": "test", "E": "test@example.org", "PK": pubkey, - "passwd": "testPassword" + "passwd": "testPassword", } cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.post("/account/test/edit", data=data, - cookies=cookies, allow_redirects=False) + response = request.post( + "/account/test/edit", data=data, cookies=cookies, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1106,13 +1145,16 @@ def test_post_account_edit_password(client: TestClient, user: User): "E": "test@example.org", "P": "newPassword", "C": "newPassword", - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: - response = request.post("/account/test/edit", cookies={ - "AURSID": sid - }, data=post_data, allow_redirects=False) + response = request.post( + "/account/test/edit", + cookies={"AURSID": sid}, + data=post_data, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) @@ -1132,7 +1174,7 @@ def test_post_account_edit_self_type_as_user(client: TestClient, user: User): "U": user.Username, "E": user.Email, "T": TRUSTED_USER_ID, - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: resp = request.post(endpoint, data=data, cookies=cookies) @@ -1151,8 +1193,7 @@ def test_post_account_edit_other_user_as_user(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: - resp = request.get(endpoint, cookies=cookies, - allow_redirects=False) + resp = request.get(endpoint, cookies=cookies, allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/account/{user2.Username}" @@ -1172,7 +1213,7 @@ def test_post_account_edit_self_type_as_tu(client: TestClient, tu_user: User): "U": tu_user.Username, "E": tu_user.Email, "T": USER_ID, - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: resp = request.post(endpoint, data=data, cookies=cookies) @@ -1182,7 +1223,8 @@ def test_post_account_edit_self_type_as_tu(client: TestClient, tu_user: User): def test_post_account_edit_other_user_type_as_tu( - client: TestClient, tu_user: User, caplog: pytest.LogCaptureFixture): + client: TestClient, tu_user: User, caplog: pytest.LogCaptureFixture +): caplog.set_level(DEBUG) with db.begin(): @@ -1202,7 +1244,7 @@ def test_post_account_edit_other_user_type_as_tu( "U": user2.Username, "E": user2.Email, "T": TRUSTED_USER_ID, - "passwd": "testPassword" + "passwd": "testPassword", } with client as request: resp = request.post(endpoint, data=data, cookies=cookies) @@ -1212,14 +1254,17 @@ def test_post_account_edit_other_user_type_as_tu( assert user2.AccountTypeID == TRUSTED_USER_ID # and also that this got logged out at DEBUG level. - expected = (f"Trusted User '{tu_user.Username}' has " - f"modified '{user2.Username}' account's type to" - f" {TRUSTED_USER}.") + expected = ( + f"Trusted User '{tu_user.Username}' has " + f"modified '{user2.Username}' account's type to" + f" {TRUSTED_USER}." + ) assert expected in caplog.text def test_post_account_edit_other_user_type_as_tu_invalid_type( - client: TestClient, tu_user: User, caplog: pytest.LogCaptureFixture): + client: TestClient, tu_user: User, caplog: pytest.LogCaptureFixture +): with db.begin(): user2 = create_user("test2") @@ -1227,12 +1272,7 @@ def test_post_account_edit_other_user_type_as_tu_invalid_type( endpoint = f"/account/{user2.Username}/edit" # As a TU, we can modify other user's account types. - data = { - "U": user2.Username, - "E": user2.Email, - "T": 0, - "passwd": "testPassword" - } + data = {"U": user2.Username, "E": user2.Email, "T": 0, "passwd": "testPassword"} with client as request: resp = request.post(endpoint, data=data, cookies=cookies) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1247,8 +1287,9 @@ def test_get_account(client: TestClient, user: User): sid = user.login(request, "testPassword") with client as request: - response = request.get("/account/test", cookies={"AURSID": sid}, - allow_redirects=False) + response = request.get( + "/account/test", cookies={"AURSID": sid}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) @@ -1258,8 +1299,9 @@ def test_get_account_not_found(client: TestClient, user: User): sid = user.login(request, "testPassword") with client as request: - response = request.get("/account/not_found", cookies={"AURSID": sid}, - allow_redirects=False) + response = request.get( + "/account/not_found", cookies={"AURSID": sid}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -1274,8 +1316,8 @@ def test_get_account_unauthenticated(client: TestClient, user: User): def test_get_accounts(client: TestClient, user: User, tu_user: User): - """ Test that we can GET request /accounts and receive - a form which can be used to POST /accounts. """ + """Test that we can GET request /accounts and receive + a form which can be used to POST /accounts.""" sid = user.login(Request(), "testPassword") cookies = {"AURSID": sid} @@ -1296,8 +1338,8 @@ def test_get_accounts(client: TestClient, user: User, tu_user: User): assert form.attrib.get("action") == "/accounts" def field(element): - """ Return the given element string as a valid - selector in the form. """ + """Return the given element string as a valid + selector in the form.""" return f"./fieldset/p/{element}" username = form.xpath(field('input[@id="id_username"]')) @@ -1360,8 +1402,7 @@ def test_post_accounts(client: TestClient, user: User, tu_user: User): columns = rows[i].xpath("./td") assert len(columns) == 7 - username, atype, suspended, real_name, \ - irc_nick, pgp_key, edit = columns + username, atype, suspended, real_name, irc_nick, pgp_key, edit = columns username = next(iter(username.xpath("./a"))) assert username.text.strip() == _user.Username @@ -1379,8 +1420,10 @@ def test_post_accounts(client: TestClient, user: User, tu_user: User): else: assert not edit - logger.debug('Checked user row {"id": %s, "username": "%s"}.' - % (_user.ID, _user.Username)) + logger.debug( + 'Checked user row {"id": %s, "username": "%s"}.' + % (_user.ID, _user.Username) + ) def test_post_accounts_username(client: TestClient, user: User, tu_user: User): @@ -1389,8 +1432,7 @@ def test_post_accounts_username(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies, - data={"U": user.Username}) + response = request.post("/accounts", cookies=cookies, data={"U": user.Username}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1403,34 +1445,33 @@ def test_post_accounts_username(client: TestClient, user: User, tu_user: User): assert username.text.strip() == user.Username -def test_post_accounts_account_type(client: TestClient, user: User, - tu_user: User): +def test_post_accounts_account_type(client: TestClient, user: User, tu_user: User): # Check the different account type options. sid = user.login(Request(), "testPassword") cookies = {"AURSID": sid} # Make a user with the "User" role here so we can # test the `u` parameter. - account_type = query(AccountType, - AccountType.AccountType == "User").first() + account_type = query(AccountType, AccountType.AccountType == "User").first() with db.begin(): - create(User, Username="test_2", - Email="test_2@example.org", - RealName="Test User 2", - Passwd="testPassword", - AccountType=account_type) + create( + User, + Username="test_2", + Email="test_2@example.org", + RealName="Test User 2", + Passwd="testPassword", + AccountType=account_type, + ) # Expect no entries; we marked our only user as a User type. with client as request: - response = request.post("/accounts", cookies=cookies, - data={"T": "t"}) + response = request.post("/accounts", cookies=cookies, data={"T": "t"}) assert response.status_code == int(HTTPStatus.OK) assert len(get_rows(response.text)) == 0 # So, let's also ensure that specifying "u" returns our user. with client as request: - response = request.post("/accounts", cookies=cookies, - data={"T": "u"}) + response = request.post("/accounts", cookies=cookies, data={"T": "u"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1443,13 +1484,12 @@ def test_post_accounts_account_type(client: TestClient, user: User, # Set our only user to a Trusted User. with db.begin(): - user.AccountType = query(AccountType).filter( - AccountType.ID == TRUSTED_USER_ID - ).first() + user.AccountType = ( + query(AccountType).filter(AccountType.ID == TRUSTED_USER_ID).first() + ) with client as request: - response = request.post("/accounts", cookies=cookies, - data={"T": "t"}) + response = request.post("/accounts", cookies=cookies, data={"T": "t"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1461,13 +1501,12 @@ def test_post_accounts_account_type(client: TestClient, user: User, assert type.text.strip() == "Trusted User" with db.begin(): - user.AccountType = query(AccountType).filter( - AccountType.ID == DEVELOPER_ID - ).first() + user.AccountType = ( + query(AccountType).filter(AccountType.ID == DEVELOPER_ID).first() + ) with client as request: - response = request.post("/accounts", cookies=cookies, - data={"T": "d"}) + response = request.post("/accounts", cookies=cookies, data={"T": "d"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1479,13 +1518,12 @@ def test_post_accounts_account_type(client: TestClient, user: User, assert type.text.strip() == "Developer" with db.begin(): - user.AccountType = query(AccountType).filter( - AccountType.ID == TRUSTED_USER_AND_DEV_ID - ).first() + user.AccountType = ( + query(AccountType).filter(AccountType.ID == TRUSTED_USER_AND_DEV_ID).first() + ) with client as request: - response = request.post("/accounts", cookies=cookies, - data={"T": "td"}) + response = request.post("/accounts", cookies=cookies, data={"T": "td"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1517,8 +1555,7 @@ def test_post_accounts_status(client: TestClient, user: User, tu_user: User): user.Suspended = True with client as request: - response = request.post("/accounts", cookies=cookies, - data={"S": True}) + response = request.post("/accounts", cookies=cookies, data={"S": True}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1535,8 +1572,7 @@ def test_post_accounts_email(client: TestClient, user: User, tu_user: User): # Search via email. with client as request: - response = request.post("/accounts", cookies=cookies, - data={"E": user.Email}) + response = request.post("/accounts", cookies=cookies, data={"E": user.Email}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1549,8 +1585,7 @@ def test_post_accounts_realname(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies, - data={"R": user.RealName}) + response = request.post("/accounts", cookies=cookies, data={"R": user.RealName}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1563,8 +1598,7 @@ def test_post_accounts_irc(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies, - data={"I": user.IRCNick}) + response = request.post("/accounts", cookies=cookies, data={"I": user.IRCNick}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1589,22 +1623,19 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): first_rows = rows with client as request: - response = request.post("/accounts", cookies=cookies, - data={"SB": "u"}) + response = request.post("/accounts", cookies=cookies, data={"SB": "u"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 def compare_text_values(column, lhs, rhs): - return [row[column].text for row in lhs] \ - == [row[column].text for row in rhs] + return [row[column].text for row in lhs] == [row[column].text for row in rhs] # Test the username rows are ordered the same. assert compare_text_values(0, first_rows, rows) is True with client as request: - response = request.post("/accounts", cookies=cookies, - data={"SB": "i"}) + response = request.post("/accounts", cookies=cookies, data={"SB": "i"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1614,8 +1645,7 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): # Sort by "i" -> RealName. with client as request: - response = request.post("/accounts", cookies=cookies, - data={"SB": "r"}) + response = request.post("/accounts", cookies=cookies, data={"SB": "r"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1624,9 +1654,9 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): assert compare_text_values(4, first_rows, reversed(rows)) is True with db.begin(): - user.AccountType = query(AccountType).filter( - AccountType.ID == TRUSTED_USER_AND_DEV_ID - ).first() + user.AccountType = ( + query(AccountType).filter(AccountType.ID == TRUSTED_USER_AND_DEV_ID).first() + ) # Fetch first_rows again with our new AccountType ordering. with client as request: @@ -1638,8 +1668,7 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): # Sort by "t" -> AccountType. with client as request: - response = request.post("/accounts", cookies=cookies, - data={"SB": "t"}) + response = request.post("/accounts", cookies=cookies, data={"SB": "t"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1657,8 +1686,7 @@ def test_post_accounts_pgp_key(client: TestClient, user: User, tu_user: User): # Search via PGPKey. with client as request: - response = request.post("/accounts", cookies=cookies, - data={"K": user.PGPKey}) + response = request.post("/accounts", cookies=cookies, data={"K": user.PGPKey}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1668,15 +1696,17 @@ def test_post_accounts_pgp_key(client: TestClient, user: User, tu_user: User): def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): # Create 150 users. users = [user] - account_type = query(AccountType, - AccountType.AccountType == "User").first() + account_type = query(AccountType, AccountType.AccountType == "User").first() with db.begin(): for i in range(150): - _user = create(User, Username=f"test_#{i}", - Email=f"test_#{i}@example.org", - RealName=f"Test User #{i}", - Passwd="testPassword", - AccountType=account_type) + _user = create( + User, + Username=f"test_#{i}", + Email=f"test_#{i}@example.org", + RealName=f"Test User #{i}", + Passwd="testPassword", + AccountType=account_type, + ) users.append(_user) sid = user.login(Request(), "testPassword") @@ -1709,8 +1739,9 @@ def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): assert "disabled" not in page_next.attrib with client as request: - response = request.post("/accounts", cookies=cookies, - data={"O": 50}) # +50 offset. + response = request.post( + "/accounts", cookies=cookies, data={"O": 50} + ) # +50 offset. assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1724,8 +1755,9 @@ def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): assert username.text.strip() == _user.Username with client as request: - response = request.post("/accounts", cookies=cookies, - data={"O": 101}) # Last page. + response = request.post( + "/accounts", cookies=cookies, data={"O": 101} + ) # Last page. assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1741,8 +1773,9 @@ def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): def test_get_terms_of_service(client: TestClient, user: User): with db.begin(): - term = create(Term, Description="Test term.", - URL="http://localhost", Revision=1) + term = create( + Term, Description="Test term.", URL="http://localhost", Revision=1 + ) with client as request: response = request.get("/tos", allow_redirects=False) @@ -1764,8 +1797,9 @@ def test_get_terms_of_service(client: TestClient, user: User): assert response.status_code == int(HTTPStatus.OK) with db.begin(): - accepted_term = create(AcceptedTerm, User=user, - Term=term, Revision=term.Revision) + accepted_term = create( + AcceptedTerm, User=user, Term=term, Revision=term.Revision + ) with client as request: response = request.get("/tos", cookies=cookies, allow_redirects=False) @@ -1800,8 +1834,9 @@ def test_post_terms_of_service(client: TestClient, user: User): # Create a fresh Term. with db.begin(): - term = create(Term, Description="Test term.", - URL="http://localhost", Revision=1) + term = create( + Term, Description="Test term.", URL="http://localhost", Revision=1 + ) # Test that the term we just created is listed. with client as request: @@ -1810,8 +1845,7 @@ def test_post_terms_of_service(client: TestClient, user: User): # Make a POST request to /tos with the agree checkbox disabled (False). with client as request: - response = request.post("/tos", data={"accept": False}, - cookies=cookies) + response = request.post("/tos", data={"accept": False}, cookies=cookies) assert response.status_code == int(HTTPStatus.OK) # Make a POST request to /tos with the agree checkbox enabled (True). @@ -1820,8 +1854,7 @@ def test_post_terms_of_service(client: TestClient, user: User): assert response.status_code == int(HTTPStatus.SEE_OTHER) # Query the db for the record created by the post request. - accepted_term = query(AcceptedTerm, - AcceptedTerm.TermsID == term.ID).first() + accepted_term = query(AcceptedTerm, AcceptedTerm.TermsID == term.ID).first() assert accepted_term.User == user assert accepted_term.Term == term diff --git a/test/test_adduser.py b/test/test_adduser.py index 65968d40..2cb71f3b 100644 --- a/test/test_adduser.py +++ b/test/test_adduser.py @@ -3,16 +3,17 @@ from unittest import mock import pytest import aurweb.models.account_type as at - from aurweb import db from aurweb.models import User from aurweb.scripts import adduser from aurweb.testing.requests import Request -TEST_SSH_PUBKEY = ("ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAI" - "bmlzdHAyNTYAAABBBEURnkiY6JoLyqDE8Li1XuAW+LHmkmLDMW/GL5wY" - "7k4/A+Ta7bjA3MOKrF9j4EuUTvCuNXULxvpfSqheTFWZc+g= " - "kevr@volcano") +TEST_SSH_PUBKEY = ( + "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAI" + "bmlzdHAyNTYAAABBBEURnkiY6JoLyqDE8Li1XuAW+LHmkmLDMW/GL5wY" + "7k4/A+Ta7bjA3MOKrF9j4EuUTvCuNXULxvpfSqheTFWZc+g= " + "kevr@volcano" +) @pytest.fixture(autouse=True) @@ -38,18 +39,36 @@ def test_adduser(): def test_adduser_tu(): - run_main([ - "-u", "test", "-e", "test@example.org", "-p", "abcd1234", - "-t", at.TRUSTED_USER - ]) + run_main( + [ + "-u", + "test", + "-e", + "test@example.org", + "-p", + "abcd1234", + "-t", + at.TRUSTED_USER, + ] + ) test = db.query(User).filter(User.Username == "test").first() assert test is not None assert test.AccountTypeID == at.TRUSTED_USER_ID def test_adduser_ssh_pk(): - run_main(["-u", "test", "-e", "test@example.org", "-p", "abcd1234", - "--ssh-pubkey", TEST_SSH_PUBKEY]) + run_main( + [ + "-u", + "test", + "-e", + "test@example.org", + "-p", + "abcd1234", + "--ssh-pubkey", + TEST_SSH_PUBKEY, + ] + ) test = db.query(User).filter(User.Username == "test").first() assert test is not None assert TEST_SSH_PUBKEY.startswith(test.ssh_pub_keys.first().PubKey) diff --git a/test/test_api_rate_limit.py b/test/test_api_rate_limit.py index 82805ecf..c67aa57d 100644 --- a/test/test_api_rate_limit.py +++ b/test/test_api_rate_limit.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -13,8 +12,7 @@ def setup(db_test): def test_api_rate_key_creation(): with db.begin(): - rate = db.create(ApiRateLimit, IP="127.0.0.1", Requests=10, - WindowStart=1) + rate = db.create(ApiRateLimit, IP="127.0.0.1", Requests=10, WindowStart=1) assert rate.IP == "127.0.0.1" assert rate.Requests == 10 assert rate.WindowStart == 1 diff --git a/test/test_asgi.py b/test/test_asgi.py index c693a3a9..6ff80fa3 100644 --- a/test/test_asgi.py +++ b/test/test_asgi.py @@ -1,20 +1,17 @@ import http import os import re - from typing import Callable from unittest import mock import fastapi import pytest - from fastapi import HTTPException from fastapi.testclient import TestClient import aurweb.asgi import aurweb.config import aurweb.redis - from aurweb.exceptions import handle_form_exceptions from aurweb.testing.requests import Request @@ -33,7 +30,9 @@ def mock_glab_request(monkeypatch): if side_effect: return side_effect # pragma: no cover return return_value + monkeypatch.setattr("requests.post", what_to_return) + return wrapped @@ -47,13 +46,14 @@ def mock_glab_config(project: str = "test/project", token: str = "test-token"): elif key == "error-token": return token return config_get(section, key) + return wrapper @pytest.mark.asyncio async def test_asgi_startup_session_secret_exception(monkeypatch): - """ Test that we get an IOError on app_startup when we cannot - connect to options.redis_address. """ + """Test that we get an IOError on app_startup when we cannot + connect to options.redis_address.""" redis_addr = aurweb.config.get("options", "redis_address") @@ -110,8 +110,9 @@ async def test_asgi_app_disabled_metrics(caplog: pytest.LogCaptureFixture): with mock.patch.dict(os.environ, env): await aurweb.asgi.app_startup() - expected = ("$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics " - "endpoint is disabled.") + expected = ( + "$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics " "endpoint is disabled." + ) assert expected in caplog.text @@ -134,9 +135,12 @@ class FakeResponse: self.text = text -def test_internal_server_error_bad_glab(setup: None, use_traceback: None, - mock_glab_request: Callable, - caplog: pytest.LogCaptureFixture): +def test_internal_server_error_bad_glab( + setup: None, + use_traceback: None, + mock_glab_request: Callable, + caplog: pytest.LogCaptureFixture, +): @aurweb.asgi.app.get("/internal_server_error") async def internal_server_error(request: fastapi.Request): raise ValueError("test exception") @@ -154,9 +158,12 @@ def test_internal_server_error_bad_glab(setup: None, use_traceback: None, assert re.search(expr, caplog.text) -def test_internal_server_error_no_token(setup: None, use_traceback: None, - mock_glab_request: Callable, - caplog: pytest.LogCaptureFixture): +def test_internal_server_error_no_token( + setup: None, + use_traceback: None, + mock_glab_request: Callable, + caplog: pytest.LogCaptureFixture, +): @aurweb.asgi.app.get("/internal_server_error") async def internal_server_error(request: fastapi.Request): raise ValueError("test exception") @@ -175,9 +182,12 @@ def test_internal_server_error_no_token(setup: None, use_traceback: None, assert re.search(expr, caplog.text) -def test_internal_server_error(setup: None, use_traceback: None, - mock_glab_request: Callable, - caplog: pytest.LogCaptureFixture): +def test_internal_server_error( + setup: None, + use_traceback: None, + mock_glab_request: Callable, + caplog: pytest.LogCaptureFixture, +): @aurweb.asgi.app.get("/internal_server_error") async def internal_server_error(request: fastapi.Request): raise ValueError("test exception") @@ -203,9 +213,12 @@ def test_internal_server_error(setup: None, use_traceback: None, assert "FATAL" not in caplog.text -def test_internal_server_error_post(setup: None, use_traceback: None, - mock_glab_request: Callable, - caplog: pytest.LogCaptureFixture): +def test_internal_server_error_post( + setup: None, + use_traceback: None, + mock_glab_request: Callable, + caplog: pytest.LogCaptureFixture, +): @aurweb.asgi.app.post("/internal_server_error") @handle_form_exceptions async def internal_server_error(request: fastapi.Request): diff --git a/test/test_aurblup.py b/test/test_aurblup.py index 0b499d57..93a832f9 100644 --- a/test/test_aurblup.py +++ b/test/test_aurblup.py @@ -1,5 +1,4 @@ import tempfile - from unittest import mock import py @@ -32,7 +31,7 @@ def setup(db_test, alpm_db: AlpmDatabase, tempdir: py.path.local) -> None: if key == "db-path": return alpm_db.local elif key == "server": - return f'file://{alpm_db.remote}' + return f"file://{alpm_db.remote}" elif key == "sync-dbs": return alpm_db.repo return value @@ -51,8 +50,7 @@ def test_aurblup(alpm_db: AlpmDatabase): # Test that the package got added to the database. for name in ("pkg", "pkg2"): - pkg = db.query(OfficialProvider).filter( - OfficialProvider.Name == name).first() + pkg = db.query(OfficialProvider).filter(OfficialProvider.Name == name).first() assert pkg is not None # Test that we can remove the package. @@ -62,11 +60,9 @@ def test_aurblup(alpm_db: AlpmDatabase): aurblup.main(True) # Expect that the database got updated accordingly. - pkg = db.query(OfficialProvider).filter( - OfficialProvider.Name == "pkg").first() + pkg = db.query(OfficialProvider).filter(OfficialProvider.Name == "pkg").first() assert pkg is None - pkg2 = db.query(OfficialProvider).filter( - OfficialProvider.Name == "pkg2").first() + pkg2 = db.query(OfficialProvider).filter(OfficialProvider.Name == "pkg2").first() assert pkg2 is not None @@ -78,14 +74,16 @@ def test_aurblup_cleanup(alpm_db: AlpmDatabase): # Now, let's insert an OfficialPackage that doesn't exist, # then exercise the old provider deletion path. with db.begin(): - db.create(OfficialProvider, Name="fake package", - Repo="test", Provides="package") + db.create( + OfficialProvider, Name="fake package", Repo="test", Provides="package" + ) # Run aurblup again. aurblup.main() # Expect that the fake package got deleted because it's # not in alpm_db anymore. - providers = db.query(OfficialProvider).filter( - OfficialProvider.Name == "fake package").all() + providers = ( + db.query(OfficialProvider).filter(OfficialProvider.Name == "fake package").all() + ) assert len(providers) == 0 diff --git a/test/test_auth.py b/test/test_auth.py index b8221c19..4a4318e8 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -1,11 +1,15 @@ import fastapi import pytest - from fastapi import HTTPException from sqlalchemy.exc import IntegrityError from aurweb import config, db, time -from aurweb.auth import AnonymousUser, BasicAuthBackend, _auth_required, account_type_required +from aurweb.auth import ( + AnonymousUser, + BasicAuthBackend, + _auth_required, + account_type_required, +) from aurweb.models.account_type import USER, USER_ID from aurweb.models.session import Session from aurweb.models.user import User @@ -20,9 +24,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.com", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.com", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -55,8 +64,7 @@ async def test_auth_backend_invalid_user_id(): # Create a new session with a fake user id. now_ts = time.utcnow() with pytest.raises(IntegrityError): - Session(UsersID=666, SessionID="realSession", - LastUpdateTS=now_ts + 5) + Session(UsersID=666, SessionID="realSession", LastUpdateTS=now_ts + 5) @pytest.mark.asyncio @@ -65,8 +73,9 @@ async def test_basic_auth_backend(user: User, backend: BasicAuthBackend): # equal the real_user. now_ts = time.utcnow() with db.begin(): - db.create(Session, UsersID=user.ID, SessionID="realSession", - LastUpdateTS=now_ts + 5) + db.create( + Session, UsersID=user.ID, SessionID="realSession", LastUpdateTS=now_ts + 5 + ) request = Request() request.cookies["AURSID"] = "realSession" @@ -76,7 +85,7 @@ async def test_basic_auth_backend(user: User, backend: BasicAuthBackend): @pytest.mark.asyncio async def test_expired_session(backend: BasicAuthBackend, user: User): - """ Login, expire the session manually, then authenticate. """ + """Login, expire the session manually, then authenticate.""" # First, build a Request with a logged in user. request = Request() request.user = user @@ -115,8 +124,8 @@ async def test_auth_required_redirection_bad_referrer(): def test_account_type_required(): - """ This test merely asserts that a few different paths - do not raise exceptions. """ + """This test merely asserts that a few different paths + do not raise exceptions.""" # This one shouldn't raise. account_type_required({USER}) @@ -125,7 +134,7 @@ def test_account_type_required(): # But this one should! We have no "FAKE" key. with pytest.raises(KeyError): - account_type_required({'FAKE'}) + account_type_required({"FAKE"}) def test_is_trusted_user(): diff --git a/test/test_auth_routes.py b/test/test_auth_routes.py index 5942edcf..87ad86f6 100644 --- a/test/test_auth_routes.py +++ b/test/test_auth_routes.py @@ -1,14 +1,11 @@ import re - from http import HTTPStatus from unittest import mock import pytest - from fastapi.testclient import TestClient import aurweb.config - from aurweb import db, time from aurweb.asgi import app from aurweb.models.account_type import USER_ID @@ -42,39 +39,41 @@ def client() -> TestClient: @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username=TEST_USERNAME, Email=TEST_EMAIL, - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username=TEST_USERNAME, + Email=TEST_EMAIL, + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user def test_login_logout(client: TestClient, user: User): - post_data = { - "user": "test", - "passwd": "testPassword", - "next": "/" - } + post_data = {"user": "test", "passwd": "testPassword", "next": "/"} with client as request: # First, let's test get /login. response = request.get("/login") assert response.status_code == int(HTTPStatus.OK) - response = request.post("/login", data=post_data, - allow_redirects=False) + response = request.post("/login", data=post_data, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) # Simulate following the redirect location from above's response. response = request.get(response.headers.get("location")) assert response.status_code == int(HTTPStatus.OK) - response = request.post("/logout", data=post_data, - allow_redirects=False) + response = request.post("/logout", data=post_data, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) - response = request.post("/logout", data=post_data, cookies={ - "AURSID": response.cookies.get("AURSID") - }, allow_redirects=False) + response = request.post( + "/logout", + data=post_data, + cookies={"AURSID": response.cookies.get("AURSID")}, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert "AURSID" not in response.cookies @@ -84,11 +83,7 @@ def test_login_suspended(client: TestClient, user: User): with db.begin(): user.Suspended = 1 - data = { - "user": user.Username, - "passwd": "testPassword", - "next": "/" - } + data = {"user": user.Username, "passwd": "testPassword", "next": "/"} with client as request: resp = request.post("/login", data=data) errors = get_errors(resp.text) @@ -96,23 +91,17 @@ def test_login_suspended(client: TestClient, user: User): def test_login_email(client: TestClient, user: user): - post_data = { - "user": user.Email, - "passwd": "testPassword", - "next": "/" - } + post_data = {"user": user.Email, "passwd": "testPassword", "next": "/"} with client as request: - resp = request.post("/login", data=post_data, - allow_redirects=False) + resp = request.post("/login", data=post_data, allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert "AURSID" in resp.cookies def mock_getboolean(**overrided_configs): mocked_config = { - tuple(config.split("__")): value - for config, value in overrided_configs.items() + tuple(config.split("__")): value for config, value in overrided_configs.items() } def side_effect(*args): @@ -123,19 +112,14 @@ def mock_getboolean(**overrided_configs): @mock.patch( "aurweb.config.getboolean", - side_effect=mock_getboolean(options__disable_http_login=False) + side_effect=mock_getboolean(options__disable_http_login=False), ) def test_insecure_login(getboolean: mock.Mock, client: TestClient, user: User): - post_data = { - "user": user.Username, - "passwd": "testPassword", - "next": "/" - } + post_data = {"user": user.Username, "passwd": "testPassword", "next": "/"} # Perform a login request with the data matching our user. with client as request: - response = request.post("/login", data=post_data, - allow_redirects=False) + response = request.post("/login", data=post_data, allow_redirects=False) # Make sure we got the expected status out of it. assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -152,17 +136,17 @@ def test_insecure_login(getboolean: mock.Mock, client: TestClient, user: User): @mock.patch( "aurweb.config.getboolean", - side_effect=mock_getboolean(options__disable_http_login=True) + side_effect=mock_getboolean(options__disable_http_login=True), ) def test_secure_login(getboolean: mock.Mock, client: TestClient, user: User): - """ In this test, we check to verify the course of action taken + """In this test, we check to verify the course of action taken by starlette when providing secure=True to a response cookie. This is achieved by mocking aurweb.config.getboolean to return True (or 1) when looking for `options.disable_http_login`. When we receive a response with `disable_http_login` enabled, we check the fields in cookies received for the secure and httponly fields, in addition to the rest of the fields given - on such a request. """ + on such a request.""" # Create a local TestClient here since we mocked configuration. # client = TestClient(app) @@ -172,16 +156,11 @@ def test_secure_login(getboolean: mock.Mock, client: TestClient, user: User): # client.headers.update(TEST_REFERER) # Data used for our upcoming http post request. - post_data = { - "user": user.Username, - "passwd": "testPassword", - "next": "/" - } + post_data = {"user": user.Username, "passwd": "testPassword", "next": "/"} # Perform a login request with the data matching our user. with client as request: - response = request.post("/login", data=post_data, - allow_redirects=False) + response = request.post("/login", data=post_data, allow_redirects=False) # Make sure we got the expected status out of it. assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -203,16 +182,11 @@ def test_secure_login(getboolean: mock.Mock, client: TestClient, user: User): def test_authenticated_login(client: TestClient, user: User): - post_data = { - "user": user.Username, - "passwd": "testPassword", - "next": "/" - } + post_data = {"user": user.Username, "passwd": "testPassword", "next": "/"} with client as request: # Try to login. - response = request.post("/login", data=post_data, - allow_redirects=False) + response = request.post("/login", data=post_data, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -220,8 +194,9 @@ def test_authenticated_login(client: TestClient, user: User): # when requesting GET /login as an authenticated user. # Now, let's verify that we receive 403 Forbidden when we # try to get /login as an authenticated user. - response = request.get("/login", cookies=response.cookies, - allow_redirects=False) + response = request.get( + "/login", cookies=response.cookies, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) assert "Logged-in as: test" in response.text @@ -236,10 +211,7 @@ def test_unauthenticated_logout_unauthorized(client: TestClient): def test_login_missing_username(client: TestClient): - post_data = { - "passwd": "testPassword", - "next": "/" - } + post_data = {"passwd": "testPassword", "next": "/"} with client as request: response = request.post("/login", data=post_data) @@ -256,17 +228,15 @@ def test_login_remember_me(client: TestClient, user: User): "user": "test", "passwd": "testPassword", "next": "/", - "remember_me": True + "remember_me": True, } with client as request: - response = request.post("/login", data=post_data, - allow_redirects=False) + response = request.post("/login", data=post_data, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert "AURSID" in response.cookies - cookie_timeout = aurweb.config.getint( - "options", "persistent_cookie_timeout") + cookie_timeout = aurweb.config.getint("options", "persistent_cookie_timeout") now_ts = time.utcnow() session = db.query(Session).filter(Session.UsersID == user.ID).first() @@ -280,7 +250,7 @@ def test_login_incorrect_password_remember_me(client: TestClient, user: User): "user": "test", "passwd": "badPassword", "next": "/", - "remember_me": "on" + "remember_me": "on", } with client as request: @@ -295,10 +265,7 @@ def test_login_incorrect_password_remember_me(client: TestClient, user: User): def test_login_missing_password(client: TestClient): - post_data = { - "user": "test", - "next": "/" - } + post_data = {"user": "test", "next": "/"} with client as request: response = request.post("/login", data=post_data) @@ -310,11 +277,7 @@ def test_login_missing_password(client: TestClient): def test_login_incorrect_password(client: TestClient): - post_data = { - "user": "test", - "passwd": "badPassword", - "next": "/" - } + post_data = {"user": "test", "passwd": "badPassword", "next": "/"} with client as request: response = request.post("/login", data=post_data) @@ -350,8 +313,9 @@ def test_login_bad_referer(client: TestClient): assert "AURSID" not in response.cookies -def test_generate_unique_sid_exhausted(client: TestClient, user: User, - caplog: pytest.LogCaptureFixture): +def test_generate_unique_sid_exhausted( + client: TestClient, user: User, caplog: pytest.LogCaptureFixture +): """ In this test, we mock up generate_unique_sid() to infinitely return the same SessionID given to `user`. Within that mocking, we try @@ -364,13 +328,17 @@ def test_generate_unique_sid_exhausted(client: TestClient, user: User, now = time.utcnow() with db.begin(): # Create a second user; we'll login with this one. - user2 = db.create(User, Username="test2", Email="test2@example.org", - ResetKey="testReset", Passwd="testPassword", - AccountTypeID=USER_ID) + user2 = db.create( + User, + Username="test2", + Email="test2@example.org", + ResetKey="testReset", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) # Create a session with ID == "testSession" for `user`. - db.create(Session, User=user, SessionID="testSession", - LastUpdateTS=now) + db.create(Session, User=user, SessionID="testSession", LastUpdateTS=now) # Mock out generate_unique_sid; always return "testSession" which # causes us to eventually error out and raise an internal error. diff --git a/test/test_ban.py b/test/test_ban.py index ff49f7e2..9db62296 100644 --- a/test/test_ban.py +++ b/test/test_ban.py @@ -1,9 +1,7 @@ import warnings - from datetime import datetime, timedelta import pytest - from sqlalchemy import exc as sa_exc from aurweb import db diff --git a/test/test_cache.py b/test/test_cache.py index b49ee386..83a9755a 100644 --- a/test/test_cache.py +++ b/test/test_cache.py @@ -11,7 +11,7 @@ def setup(db_test): class StubRedis: - """ A class which acts as a RedisConnection without using Redis. """ + """A class which acts as a RedisConnection without using Redis.""" cache = dict() expires = dict() @@ -39,10 +39,13 @@ def redis(): @pytest.mark.asyncio async def test_db_count_cache(redis): - db.create(User, Username="user1", - Email="user1@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + db.create( + User, + Username="user1", + Email="user1@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) query = db.query(User) @@ -57,10 +60,13 @@ async def test_db_count_cache(redis): @pytest.mark.asyncio async def test_db_count_cache_expires(redis): - db.create(User, Username="user1", - Email="user1@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + db.create( + User, + Username="user1", + Email="user1@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) query = db.query(User) diff --git a/test/test_captcha.py b/test/test_captcha.py index e5f8c71a..fa6fcbcc 100644 --- a/test/test_captcha.py +++ b/test/test_captcha.py @@ -11,14 +11,14 @@ def setup(db_test): def test_captcha_salts(): - """ Make sure we can get some captcha salts. """ + """Make sure we can get some captcha salts.""" salts = captcha.get_captcha_salts() assert len(salts) == 6 def test_captcha_token(): - """ Make sure getting a captcha salt's token matches up against - the first three digits of the md5 hash of the salt. """ + """Make sure getting a captcha salt's token matches up against + the first three digits of the md5 hash of the salt.""" salts = captcha.get_captcha_salts() salt = salts[0] @@ -29,9 +29,9 @@ def test_captcha_token(): def test_captcha_challenge_answer(): - """ Make sure that executing the captcha challenge via shell + """Make sure that executing the captcha challenge via shell produces the correct result by comparing it against a straight - up token conversion. """ + up token conversion.""" salts = captcha.get_captcha_salts() salt = salts[0] @@ -44,7 +44,7 @@ def test_captcha_challenge_answer(): def test_captcha_salt_filter(): - """ Make sure captcha_salt_filter returns the first salt from + """Make sure captcha_salt_filter returns the first salt from get_captcha_salts(). Example usage: @@ -55,7 +55,7 @@ def test_captcha_salt_filter(): def test_captcha_cmdline_filter(): - """ Make sure that the captcha_cmdline filter gives us the + """Make sure that the captcha_cmdline filter gives us the same challenge that get_captcha_challenge does. Example usage: diff --git a/test/test_config.py b/test/test_config.py index f451d8b3..c7a3610e 100644 --- a/test/test_config.py +++ b/test/test_config.py @@ -2,7 +2,6 @@ import configparser import io import os import re - from unittest import mock import py @@ -35,6 +34,7 @@ def mock_config_get(): if option == "salt_rounds": return "666" return config_get(section, option) + return _mock_config_get @@ -59,7 +59,7 @@ def test_config_main_get_unknown_section(get: str): main() # With an invalid section, we should get a usage error. - expected = r'^error: no section found$' + expected = r"^error: no section found$" assert re.match(expected, stderr.getvalue().strip()) @@ -140,8 +140,7 @@ def test_config_main_set_immutable(): args = ["aurweb-config", "set", "options", "salt_rounds", "666"] with mock.patch.dict(os.environ, {"AUR_CONFIG_IMMUTABLE": "1"}): with mock.patch("sys.argv", args): - with mock.patch("aurweb.config.set_option", - side_effect=mock_set_option): + with mock.patch("aurweb.config.set_option", side_effect=mock_set_option): main() expected = None @@ -170,8 +169,7 @@ def test_config_main_set_unknown_section(save: None): args = ["aurweb-config", "set", "options", "salt_rounds", "666"] with mock.patch("sys.argv", args): with mock.patch("sys.stderr", stderr): - with mock.patch("aurweb.config.set_option", - side_effect=mock_set_option): + with mock.patch("aurweb.config.set_option", side_effect=mock_set_option): main() assert stderr.getvalue().strip() == "error: no section found" diff --git a/test/test_db.py b/test/test_db.py index f36fff2c..8ac5607d 100644 --- a/test/test_db.py +++ b/test/test_db.py @@ -2,26 +2,26 @@ import os import re import sqlite3 import tempfile - from unittest import mock import pytest import aurweb.config import aurweb.initdb - from aurweb import db from aurweb.models.account_type import AccountType class Args: - """ Stub arguments used for running aurweb.initdb. """ + """Stub arguments used for running aurweb.initdb.""" + use_alembic = True verbose = True class DBCursor: - """ A fake database cursor object used in tests. """ + """A fake database cursor object used in tests.""" + items = [] def execute(self, *args, **kwargs): @@ -33,7 +33,8 @@ class DBCursor: class DBConnection: - """ A fake database connection object used in tests. """ + """A fake database connection object used in tests.""" + @staticmethod def cursor(): return DBCursor() @@ -44,7 +45,7 @@ class DBConnection: def make_temp_config(*replacements): - """ Generate a temporary config file with a set of replacements. + """Generate a temporary config file with a set of replacements. :param *replacements: A variable number of tuple regex replacement pairs :return: A tuple containing (temp directory, temp config file) @@ -85,13 +86,16 @@ def make_temp_config(*replacements): def make_temp_sqlite_config(): - return make_temp_config((r"backend = .*", "backend = sqlite"), - (r"name = .*", "name = /tmp/aurweb.sqlite3")) + return make_temp_config( + (r"backend = .*", "backend = sqlite"), + (r"name = .*", "name = /tmp/aurweb.sqlite3"), + ) def make_temp_mysql_config(): - return make_temp_config((r"backend = .*", "backend = mysql"), - (r"name = .*", "name = aurweb_test")) + return make_temp_config( + (r"backend = .*", "backend = mysql"), (r"name = .*", "name = aurweb_test") + ) @pytest.fixture(autouse=True) @@ -150,7 +154,7 @@ def test_sqlalchemy_unknown_backend(): def test_db_connects_without_fail(): - """ This only tests the actual config supplied to pytest. """ + """This only tests the actual config supplied to pytest.""" db.connect() diff --git a/test/test_dependency_type.py b/test/test_dependency_type.py index c5afd38d..e172782b 100644 --- a/test/test_dependency_type.py +++ b/test/test_dependency_type.py @@ -12,8 +12,7 @@ def setup(db_test): def test_dependency_types(): dep_types = ["depends", "makedepends", "checkdepends", "optdepends"] for dep_type in dep_types: - dependency_type = query(DependencyType, - DependencyType.Name == dep_type).first() + dependency_type = query(DependencyType, DependencyType.Name == dep_type).first() assert dependency_type is not None diff --git a/test/test_email.py b/test/test_email.py index 873feffe..81abd507 100644 --- a/test/test_email.py +++ b/test/test_email.py @@ -1,5 +1,4 @@ import io - from subprocess import PIPE, Popen import pytest @@ -23,7 +22,7 @@ def sendmail(from_: str, to_: str, content: str) -> Email: def test_email_glue(): - """ Test that Email.glue() decodes both base64 and decoded content. """ + """Test that Email.glue() decodes both base64 and decoded content.""" body = "Test email." sendmail("test@example.org", "test@example.org", body) assert Email.count() == 1 @@ -34,7 +33,7 @@ def test_email_glue(): def test_email_dump(): - """ Test that Email.dump() dumps a single email. """ + """Test that Email.dump() dumps a single email.""" body = "Test email." sendmail("test@example.org", "test@example.org", body) assert Email.count() == 1 @@ -46,7 +45,7 @@ def test_email_dump(): def test_email_dump_multiple(): - """ Test that Email.dump() dumps multiple emails. """ + """Test that Email.dump() dumps multiple emails.""" body = "Test email." sendmail("test@example.org", "test@example.org", body) sendmail("test2@example.org", "test2@example.org", body) diff --git a/test/test_filelock.py b/test/test_filelock.py index 70aa7580..c0580642 100644 --- a/test/test_filelock.py +++ b/test/test_filelock.py @@ -1,5 +1,4 @@ import py - from _pytest.logging import LogCaptureFixture from aurweb.testing.filelock import FileLock diff --git a/test/test_filters.py b/test/test_filters.py index 558911f5..e74ddb87 100644 --- a/test/test_filters.py +++ b/test/test_filters.py @@ -22,7 +22,7 @@ def test_number_format(): def test_extend_query(): - """ Test extension of a query via extend_query. """ + """Test extension of a query via extend_query.""" query = {"a": "b"} extended = filters.extend_query(query, ("a", "c"), ("b", "d")) assert extended.get("a") == "c" @@ -30,7 +30,7 @@ def test_extend_query(): def test_to_qs(): - """ Test conversion from a query dictionary to a query string. """ + """Test conversion from a query dictionary to a query string.""" query = {"a": "b", "c": [1, 2, 3]} qs = filters.to_qs(query) assert qs == "a=b&c=1&c=2&c=3" diff --git a/test/test_group.py b/test/test_group.py index 82b82464..a1c563b6 100644 --- a/test/test_group.py +++ b/test/test_group.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db diff --git a/test/test_homepage.py b/test/test_homepage.py index 63b832e3..5490a244 100644 --- a/test/test_homepage.py +++ b/test/test_homepage.py @@ -1,10 +1,8 @@ import re - from http import HTTPStatus from unittest.mock import patch import pytest - from fastapi.testclient import TestClient from aurweb import db, time @@ -31,16 +29,26 @@ def setup(db_test): @pytest.fixture def user(): with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def user2(): with db.begin(): - user = db.create(User, Username="test2", Email="test2@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user = db.create( + User, + Username="test2", + Email="test2@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -50,10 +58,17 @@ def redis(): def delete_keys(): # Cleanup keys if they exist. - for key in ("package_count", "orphan_count", "user_count", - "trusted_user_count", "seven_days_old_added", - "seven_days_old_updated", "year_old_updated", - "never_updated", "package_updates"): + for key in ( + "package_count", + "orphan_count", + "user_count", + "trusted_user_count", + "seven_days_old_added", + "seven_days_old_updated", + "year_old_updated", + "never_updated", + "package_updates", + ): if redis.get(key) is not None: redis.delete(key) @@ -66,16 +81,21 @@ def redis(): def package(user: User) -> Package: now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, Name="test-pkg", - Maintainer=user, Packager=user, - SubmittedTS=now, ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Name="test-pkg", + Maintainer=user, + Packager=user, + SubmittedTS=now, + ModifiedTS=now, + ) pkg = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name) yield pkg @pytest.fixture def packages(user): - """ Yield a list of num_packages Package objects maintained by user. """ + """Yield a list of num_packages Package objects maintained by user.""" num_packages = 50 # Tunable # For i..num_packages, create a package named pkg_{i}. @@ -83,9 +103,14 @@ def packages(user): now = time.utcnow() with db.begin(): for i in range(num_packages): - pkgbase = db.create(PackageBase, Name=f"pkg_{i}", - Maintainer=user, Packager=user, - SubmittedTS=now, ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Name=f"pkg_{i}", + Maintainer=user, + Packager=user, + SubmittedTS=now, + ModifiedTS=now, + ) pkg = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name) pkgs.append(pkg) now += 1 @@ -99,9 +124,9 @@ def test_homepage(): assert response.status_code == int(HTTPStatus.OK) -@patch('aurweb.util.get_ssh_fingerprints') +@patch("aurweb.util.get_ssh_fingerprints") def test_homepage_ssh_fingerprints(get_ssh_fingerprints_mock): - fingerprints = {'Ed25519': "SHA256:RFzBCUItH9LZS0cKB5UE6ceAYhBD5C8GeOBip8Z11+4"} + fingerprints = {"Ed25519": "SHA256:RFzBCUItH9LZS0cKB5UE6ceAYhBD5C8GeOBip8Z11+4"} get_ssh_fingerprints_mock.return_value = fingerprints with client as request: @@ -110,17 +135,23 @@ def test_homepage_ssh_fingerprints(get_ssh_fingerprints_mock): for key, value in fingerprints.items(): assert key in response.content.decode() assert value in response.content.decode() - assert 'The following SSH fingerprints are used for the AUR' in response.content.decode() + assert ( + "The following SSH fingerprints are used for the AUR" + in response.content.decode() + ) -@patch('aurweb.util.get_ssh_fingerprints') +@patch("aurweb.util.get_ssh_fingerprints") def test_homepage_no_ssh_fingerprints(get_ssh_fingerprints_mock): get_ssh_fingerprints_mock.return_value = {} with client as request: response = request.get("/") - assert 'The following SSH fingerprints are used for the AUR' not in response.content.decode() + assert ( + "The following SSH fingerprints are used for the AUR" + not in response.content.decode() + ) def test_homepage_stats(redis, packages): @@ -131,20 +162,20 @@ def test_homepage_stats(redis, packages): root = parse_root(response.text) expectations = [ - ("Packages", r'\d+'), - ("Orphan Packages", r'\d+'), - ("Packages added in the past 7 days", r'\d+'), - ("Packages updated in the past 7 days", r'\d+'), - ("Packages updated in the past year", r'\d+'), - ("Packages never updated", r'\d+'), - ("Registered Users", r'\d+'), - ("Trusted Users", r'\d+') + ("Packages", r"\d+"), + ("Orphan Packages", r"\d+"), + ("Packages added in the past 7 days", r"\d+"), + ("Packages updated in the past 7 days", r"\d+"), + ("Packages updated in the past year", r"\d+"), + ("Packages never updated", r"\d+"), + ("Registered Users", r"\d+"), + ("Trusted Users", r"\d+"), ] stats = root.xpath('//div[@id="pkg-stats"]//tr') for i, expected in enumerate(expectations): expected_key, expected_regex = expected - key, value = stats[i].xpath('./td') + key, value = stats[i].xpath("./td") assert key.text.strip() == expected_key assert re.match(expected_regex, value.text.strip()) @@ -165,7 +196,7 @@ def test_homepage_updates(redis, packages): expectations = [f"pkg_{i}" for i in range(50 - 1, 50 - 1 - 15, -1)] updates = root.xpath('//div[@id="pkg-updates"]/table/tbody/tr') for i, expected in enumerate(expectations): - pkgname = updates[i].xpath('./td/a').pop(0) + pkgname = updates[i].xpath("./td/a").pop(0) assert pkgname.text.strip() == expected @@ -173,9 +204,9 @@ def test_homepage_dashboard(redis, packages, user): # Create Comaintainer records for all of the packages. with db.begin(): for pkg in packages: - db.create(PackageComaintainer, - PackageBase=pkg.PackageBase, - User=user, Priority=1) + db.create( + PackageComaintainer, PackageBase=pkg.PackageBase, User=user, Priority=1 + ) cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: @@ -189,16 +220,18 @@ def test_homepage_dashboard(redis, packages, user): expectations = [f"pkg_{i}" for i in range(50 - 1, 0, -1)] my_packages = root.xpath('//table[@id="my-packages"]/tbody/tr') for i, expected in enumerate(expectations): - name, version, votes, pop, voted, notify, desc, maint \ - = my_packages[i].xpath('./td') - assert name.xpath('./a').pop(0).text.strip() == expected + name, version, votes, pop, voted, notify, desc, maint = my_packages[i].xpath( + "./td" + ) + assert name.xpath("./a").pop(0).text.strip() == expected # Do the same for the Comaintained Packages table. my_packages = root.xpath('//table[@id="comaintained-packages"]/tbody/tr') for i, expected in enumerate(expectations): - name, version, votes, pop, voted, notify, desc, maint \ - = my_packages[i].xpath('./td') - assert name.xpath('./a').pop(0).text.strip() == expected + name, version, votes, pop, voted, notify, desc, maint = my_packages[i].xpath( + "./td" + ) + assert name.xpath("./a").pop(0).text.strip() == expected def test_homepage_dashboard_requests(redis, packages, user): @@ -207,11 +240,16 @@ def test_homepage_dashboard_requests(redis, packages, user): pkg = packages[0] reqtype = db.query(RequestType, RequestType.ID == DELETION_ID).first() with db.begin(): - pkgreq = db.create(PackageRequest, PackageBase=pkg.PackageBase, - PackageBaseName=pkg.PackageBase.Name, - User=user, Comments=str(), - ClosureComment=str(), RequestTS=now, - RequestType=reqtype) + pkgreq = db.create( + PackageRequest, + PackageBase=pkg.PackageBase, + PackageBaseName=pkg.PackageBase.Name, + User=user, + Comments=str(), + ClosureComment=str(), + RequestTS=now, + RequestType=reqtype, + ) cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: @@ -220,7 +258,7 @@ def test_homepage_dashboard_requests(redis, packages, user): root = parse_root(response.text) request = root.xpath('//table[@id="pkgreq-results"]/tbody/tr').pop(0) - pkgname = request.xpath('./td/a').pop(0) + pkgname = request.xpath("./td/a").pop(0) assert pkgname.text.strip() == pkgreq.PackageBaseName @@ -238,7 +276,7 @@ def test_homepage_dashboard_flagged_packages(redis, packages, user): # Check to see that the package showed up in the Flagged Packages table. root = parse_root(response.text) flagged_pkg = root.xpath('//table[@id="flagged-packages"]/tbody/tr').pop(0) - flagged_name = flagged_pkg.xpath('./td/a').pop(0) + flagged_name = flagged_pkg.xpath("./td/a").pop(0) assert flagged_name.text.strip() == pkg.Name @@ -247,8 +285,7 @@ def test_homepage_dashboard_flagged(user: User, user2: User, package: Package): now = time.utcnow() with db.begin(): - db.create(PackageComaintainer, User=user2, - PackageBase=pkgbase, Priority=1) + db.create(PackageComaintainer, User=user2, PackageBase=pkgbase, Priority=1) pkgbase.OutOfDateTS = now - 5 pkgbase.Flagger = user diff --git a/test/test_html.py b/test/test_html.py index ffe2a9f2..88c75a7c 100644 --- a/test/test_html.py +++ b/test/test_html.py @@ -2,13 +2,11 @@ import hashlib import os import tempfile - from http import HTTPStatus from unittest import mock import fastapi import pytest - from fastapi import HTTPException from fastapi.testclient import TestClient @@ -33,8 +31,13 @@ def client() -> TestClient: @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -53,12 +56,7 @@ def pkgbase(user: User) -> PackageBase: def test_archdev_navbar(client: TestClient): - expected = [ - "AUR Home", - "Packages", - "Register", - "Login" - ] + expected = ["AUR Home", "Packages", "Register", "Login"] with client as request: resp = request.get("/") assert resp.status_code == int(HTTPStatus.OK) @@ -70,13 +68,7 @@ def test_archdev_navbar(client: TestClient): def test_archdev_navbar_authenticated(client: TestClient, user: User): - expected = [ - "Dashboard", - "Packages", - "Requests", - "My Account", - "Logout" - ] + expected = ["Dashboard", "Packages", "Requests", "My Account", "Logout"] cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get("/", cookies=cookies) @@ -88,8 +80,7 @@ def test_archdev_navbar_authenticated(client: TestClient, user: User): assert item.text.strip() == expected[i] -def test_archdev_navbar_authenticated_tu(client: TestClient, - trusted_user: User): +def test_archdev_navbar_authenticated_tu(client: TestClient, trusted_user: User): expected = [ "Dashboard", "Packages", @@ -97,7 +88,7 @@ def test_archdev_navbar_authenticated_tu(client: TestClient, "Accounts", "My Account", "Trusted User", - "Logout" + "Logout", ] cookies = {"AURSID": trusted_user.login(Request(), "testPassword")} with client as request: @@ -131,7 +122,7 @@ def test_get_successes(): def test_archive_sig(client: TestClient): - hash_value = hashlib.sha256(b'test').hexdigest() + hash_value = hashlib.sha256(b"test").hexdigest() with tempfile.TemporaryDirectory() as tmpdir: packages_sha256 = os.path.join(tmpdir, "packages.gz.sha256") @@ -179,12 +170,7 @@ def test_disabled_metrics(client: TestClient): def test_rtl(client: TestClient): responses = {} - expected = [ - [], - [], - ['rtl'], - ['rtl'] - ] + expected = [[], [], ["rtl"], ["rtl"]] with client as request: responses["default"] = request.get("/") responses["de"] = request.get("/", cookies={"AURLANG": "de"}) @@ -193,11 +179,11 @@ def test_rtl(client: TestClient): for i, (lang, resp) in enumerate(responses.items()): assert resp.status_code == int(HTTPStatus.OK) t = parse_root(resp.text) - assert t.xpath('//html/@dir') == expected[i] + assert t.xpath("//html/@dir") == expected[i] def test_404_with_valid_pkgbase(client: TestClient, pkgbase: PackageBase): - """ Test HTTPException with status_code == 404 and valid pkgbase. """ + """Test HTTPException with status_code == 404 and valid pkgbase.""" endpoint = f"/{pkgbase.Name}" with client as request: response = request.get(endpoint) @@ -209,7 +195,7 @@ def test_404_with_valid_pkgbase(client: TestClient, pkgbase: PackageBase): def test_404(client: TestClient): - """ Test HTTPException with status_code == 404 without a valid pkgbase. """ + """Test HTTPException with status_code == 404 without a valid pkgbase.""" with client as request: response = request.get("/nonexistentroute") assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -221,7 +207,8 @@ def test_404(client: TestClient): def test_503(client: TestClient): - """ Test HTTPException with status_code == 503 (Service Unavailable). """ + """Test HTTPException with status_code == 503 (Service Unavailable).""" + @asgi.app.get("/raise-503") async def raise_503(request: fastapi.Request): raise HTTPException(status_code=HTTPStatus.SERVICE_UNAVAILABLE) diff --git a/test/test_initdb.py b/test/test_initdb.py index 44681d8e..db5edf74 100644 --- a/test/test_initdb.py +++ b/test/test_initdb.py @@ -3,7 +3,6 @@ import pytest import aurweb.config import aurweb.db import aurweb.initdb - from aurweb.models.account_type import AccountType @@ -19,11 +18,11 @@ class Args: def test_run(): from aurweb.schema import metadata + aurweb.db.kill_engine() metadata.drop_all(aurweb.db.get_engine()) aurweb.initdb.run(Args()) # Check that constant table rows got added via initdb. - record = aurweb.db.query(AccountType, - AccountType.AccountType == "User").first() + record = aurweb.db.query(AccountType, AccountType.AccountType == "User").first() assert record is not None diff --git a/test/test_l10n.py b/test/test_l10n.py index c24c5f55..818d517f 100644 --- a/test/test_l10n.py +++ b/test/test_l10n.py @@ -4,13 +4,13 @@ from aurweb.testing.requests import Request def test_translator(): - """ Test creating l10n translation tools. """ + """Test creating l10n translation tools.""" de_home = l10n.translator.translate("Home", "de") assert de_home == "Startseite" def test_get_request_language(): - """ First, tests default_lang, then tests a modified AURLANG cookie. """ + """First, tests default_lang, then tests a modified AURLANG cookie.""" request = Request() assert l10n.get_request_language(request) == "en" @@ -19,18 +19,17 @@ def test_get_request_language(): def test_get_raw_translator_for_request(): - """ Make sure that get_raw_translator_for_request is giving us - the translator we expect. """ + """Make sure that get_raw_translator_for_request is giving us + the translator we expect.""" request = Request() request.cookies["AURLANG"] = "de" translator = l10n.get_raw_translator_for_request(request) - assert translator.gettext("Home") == \ - l10n.translator.translate("Home", "de") + assert translator.gettext("Home") == l10n.translator.translate("Home", "de") def test_get_translator_for_request(): - """ Make sure that get_translator_for_request is giving us back - our expected translation function. """ + """Make sure that get_translator_for_request is giving us back + our expected translation function.""" request = Request() request.cookies["AURLANG"] = "de" @@ -43,10 +42,8 @@ def test_tn_filter(): request.cookies["AURLANG"] = "en" context = {"language": "en", "request": request} - translated = filters.tn(context, 1, "%d package found.", - "%d packages found.") + translated = filters.tn(context, 1, "%d package found.", "%d packages found.") assert translated == "%d package found." - translated = filters.tn(context, 2, "%d package found.", - "%d packages found.") + translated = filters.tn(context, 2, "%d package found.", "%d packages found.") assert translated == "%d packages found." diff --git a/test/test_license.py b/test/test_license.py index b34bd260..cea76e7d 100644 --- a/test/test_license.py +++ b/test/test_license.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db diff --git a/test/test_mkpkglists.py b/test/test_mkpkglists.py index 9bc1073b..3c105817 100644 --- a/test/test_mkpkglists.py +++ b/test/test_mkpkglists.py @@ -1,14 +1,20 @@ import gzip import json import os - from unittest import mock import py import pytest from aurweb import config, db -from aurweb.models import License, Package, PackageBase, PackageDependency, PackageLicense, User +from aurweb.models import ( + License, + Package, + PackageBase, + PackageDependency, + PackageLicense, + User, +) from aurweb.models.account_type import USER_ID from aurweb.models.dependency_type import DEPENDS_ID @@ -38,10 +44,13 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", - Email="test@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -52,16 +61,18 @@ def packages(user: User) -> list[Package]: lic = db.create(License, Name="GPL") for i in range(5): # Create the package. - pkgbase = db.create(PackageBase, Name=f"pkgbase_{i}", - Packager=user) - pkg = db.create(Package, PackageBase=pkgbase, - Name=f"pkg_{i}") + pkgbase = db.create(PackageBase, Name=f"pkgbase_{i}", Packager=user) + pkg = db.create(Package, PackageBase=pkgbase, Name=f"pkg_{i}") # Create some related records. db.create(PackageLicense, Package=pkg, License=lic) - db.create(PackageDependency, DepTypeID=DEPENDS_ID, - Package=pkg, DepName=f"dep_{i}", - DepCondition=">=1.0") + db.create( + PackageDependency, + DepTypeID=DEPENDS_ID, + Package=pkg, + DepName=f"dep_{i}", + DepCondition=">=1.0", + ) # Add the package to our output list. output.append(pkg) @@ -88,8 +99,11 @@ def config_mock(tmpdir: py.path.local) -> None: config.rehash() -def test_mkpkglists(tmpdir: py.path.local, config_mock: None, user: User, packages: list[Package]): +def test_mkpkglists( + tmpdir: py.path.local, config_mock: None, user: User, packages: list[Package] +): from aurweb.scripts import mkpkglists + mkpkglists.main() PACKAGES = config.get("mkpkglists", "packagesfile") @@ -106,10 +120,7 @@ def test_mkpkglists(tmpdir: py.path.local, config_mock: None, user: User, packag PKGBASE, "pkgbase_0\npkgbase_1\npkgbase_2\npkgbase_3\npkgbase_4\n", ), - ( - USERS, - "test\n" - ), + (USERS, "test\n"), ] for (file, expected_content) in expectations: @@ -136,6 +147,7 @@ def test_mkpkglists(tmpdir: py.path.local, config_mock: None, user: User, packag @mock.patch("sys.argv", ["mkpkglists", "--extended"]) def test_mkpkglists_extended_empty(config_mock: None): from aurweb.scripts import mkpkglists + mkpkglists.main() PACKAGES = config.get("mkpkglists", "packagesfile") @@ -166,9 +178,9 @@ def test_mkpkglists_extended_empty(config_mock: None): @mock.patch("sys.argv", ["mkpkglists", "--extended"]) -def test_mkpkglists_extended(config_mock: None, user: User, - packages: list[Package]): +def test_mkpkglists_extended(config_mock: None, user: User, packages: list[Package]): from aurweb.scripts import mkpkglists + mkpkglists.main() PACKAGES = config.get("mkpkglists", "packagesfile") @@ -186,10 +198,7 @@ def test_mkpkglists_extended(config_mock: None, user: User, PKGBASE, "pkgbase_0\npkgbase_1\npkgbase_2\npkgbase_3\npkgbase_4\n", ), - ( - USERS, - "test\n" - ), + (USERS, "test\n"), ] for (file, expected_content) in expectations: diff --git a/test/test_notify.py b/test/test_notify.py index bbcc6b5a..9e61d9ee 100644 --- a/test/test_notify.py +++ b/test/test_notify.py @@ -23,24 +23,39 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - Passwd=str(), AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def user1() -> User: with db.begin(): - user1 = db.create(User, Username="user1", Email="user1@example.org", - Passwd=str(), AccountTypeID=USER_ID) + user1 = db.create( + User, + Username="user1", + Email="user1@example.org", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user1 @pytest.fixture def user2() -> User: with db.begin(): - user2 = db.create(User, Username="user2", Email="user2@example.org", - Passwd=str(), AccountTypeID=USER_ID) + user2 = db.create( + User, + Username="user2", + Email="user2@example.org", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user2 @@ -52,11 +67,15 @@ def pkgbases(user: User) -> list[PackageBase]: with db.begin(): for i in range(5): output.append( - db.create(PackageBase, Name=f"pkgbase_{i}", - Maintainer=user, SubmittedTS=now, - ModifiedTS=now)) - db.create(models.PackageNotification, PackageBase=output[-1], - User=user) + db.create( + PackageBase, + Name=f"pkgbase_{i}", + Maintainer=user, + SubmittedTS=now, + ModifiedTS=now, + ) + ) + db.create(models.PackageNotification, PackageBase=output[-1], User=user) yield output @@ -64,11 +83,15 @@ def pkgbases(user: User) -> list[PackageBase]: def pkgreq(user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): - pkgreq_ = db.create(PackageRequest, PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, User=user2, - ReqTypeID=ORPHAN_ID, - Comments="This is a request test comment.", - ClosureComment=str()) + pkgreq_ = db.create( + PackageRequest, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + User=user2, + ReqTypeID=ORPHAN_ID, + Comments="This is a request test comment.", + ClosureComment=str(), + ) yield pkgreq_ @@ -78,21 +101,24 @@ def packages(pkgbases: list[PackageBase]) -> list[Package]: with db.begin(): for i, pkgbase in enumerate(pkgbases): output.append( - db.create(Package, PackageBase=pkgbase, - Name=f"pkg_{i}", Version=f"{i}.0")) + db.create( + Package, PackageBase=pkgbase, Name=f"pkg_{i}", Version=f"{i}.0" + ) + ) yield output -def test_out_of_date(user: User, user1: User, user2: User, - pkgbases: list[PackageBase]): +def test_out_of_date(user: User, user1: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] # Create two comaintainers. We'll pass the maintainer uid to # FlagNotification, so we should expect to get two emails. with db.begin(): - db.create(models.PackageComaintainer, - PackageBase=pkgbase, User=user1, Priority=1) - db.create(models.PackageComaintainer, - PackageBase=pkgbase, User=user2, Priority=2) + db.create( + models.PackageComaintainer, PackageBase=pkgbase, User=user1, Priority=1 + ) + db.create( + models.PackageComaintainer, PackageBase=pkgbase, User=user2, Priority=2 + ) # Send the notification for pkgbases[0]. notif = notify.FlagNotification(user.ID, pkgbases[0].ID) @@ -165,8 +191,12 @@ def test_comment(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): - comment = db.create(models.PackageComment, PackageBase=pkgbase, - User=user2, Comments="This is a test comment.") + comment = db.create( + models.PackageComment, + PackageBase=pkgbase, + User=user2, + Comments="This is a test comment.", + ) rendercomment.update_comment_render_fastapi(comment) notif = notify.CommentNotification(user2.ID, pkgbase.ID, comment.ID) @@ -366,15 +396,16 @@ def set_tu(users: list[User]) -> User: user.AccountTypeID = TRUSTED_USER_ID -def test_open_close_request(user: User, user2: User, - pkgreq: PackageRequest, - pkgbases: list[PackageBase]): +def test_open_close_request( + user: User, user2: User, pkgreq: PackageRequest, pkgbases: list[PackageBase] +): set_tu([user]) pkgbase = pkgbases[0] # Send an open request notification. notif = notify.RequestOpenNotification( - user2.ID, pkgreq.ID, pkgreq.RequestType.Name, pkgbase.ID) + user2.ID, pkgreq.ID, pkgreq.RequestType.Name, pkgbase.ID + ) notif.send() assert Email.count() == 1 @@ -420,22 +451,24 @@ Request #{pkgreq.ID} has been rejected by {user2.Username} [1]. email = Email(3).parse() assert email.headers.get("To") == aur_request_ml assert email.headers.get("Cc") == ", ".join([user.Email, user2.Email]) - expected = (f"[PRQ#{pkgreq.ID}] Orphan Request for " - f"{pkgbase.Name} Accepted") + expected = f"[PRQ#{pkgreq.ID}] Orphan Request for " f"{pkgbase.Name} Accepted" assert email.headers.get("Subject") == expected - expected = (f"Request #{pkgreq.ID} has been accepted automatically " - "by the Arch User Repository\npackage request system.") + expected = ( + f"Request #{pkgreq.ID} has been accepted automatically " + "by the Arch User Repository\npackage request system." + ) assert email.body == expected -def test_close_request_comaintainer_cc(user: User, user2: User, - pkgreq: PackageRequest, - pkgbases: list[PackageBase]): +def test_close_request_comaintainer_cc( + user: User, user2: User, pkgreq: PackageRequest, pkgbases: list[PackageBase] +): pkgbase = pkgbases[0] with db.begin(): - db.create(models.PackageComaintainer, PackageBase=pkgbase, - User=user2, Priority=1) + db.create( + models.PackageComaintainer, PackageBase=pkgbase, User=user2, Priority=1 + ) notif = notify.RequestCloseNotification(0, pkgreq.ID, "accepted") notif.send() @@ -446,9 +479,9 @@ def test_close_request_comaintainer_cc(user: User, user2: User, assert email.headers.get("Cc") == ", ".join([user.Email, user2.Email]) -def test_close_request_closure_comment(user: User, user2: User, - pkgreq: PackageRequest, - pkgbases: list[PackageBase]): +def test_close_request_closure_comment( + user: User, user2: User, pkgreq: PackageRequest, pkgbases: list[PackageBase] +): pkgbase = pkgbases[0] with db.begin(): pkgreq.ClosureComment = "This is a test closure comment." @@ -496,7 +529,7 @@ ends in less than 48 hours. def test_notify_main(user: User): - """ Test TU vote reminder through aurweb.notify.main(). """ + """Test TU vote reminder through aurweb.notify.main().""" set_tu([user]) vote_id = 1 @@ -539,6 +572,7 @@ def mock_smtp_config(cls): elif key == "smtp-password": return cls() return cls(config_get(section, key)) + return _mock_smtp_config @@ -574,6 +608,7 @@ def mock_smtp_starttls_config(cls): elif key == "smtp-password": return cls("password") return cls(config_get(section, key)) + return _mock_smtp_starttls_config @@ -590,8 +625,7 @@ def test_smtp_starttls(user: User): get = "aurweb.config.get" getboolean = "aurweb.config.getboolean" with mock.patch(get, side_effect=mock_smtp_starttls_config(str)): - with mock.patch( - getboolean, side_effect=mock_smtp_starttls_config(bool)): + with mock.patch(getboolean, side_effect=mock_smtp_starttls_config(bool)): with mock.patch("smtplib.SMTP", side_effect=smtp): notif = notify.WelcomeNotification(user.ID) notif.send() @@ -621,6 +655,7 @@ def mock_smtp_ssl_config(cls): elif key == "smtp-password": return cls("password") return cls(config_get(section, key)) + return _mock_smtp_ssl_config @@ -651,7 +686,7 @@ def test_notification_defaults(): def test_notification_oserror(user: User, caplog: pytest.LogCaptureFixture): - """ Try sending a notification with a bad SMTP configuration. """ + """Try sending a notification with a bad SMTP configuration.""" caplog.set_level(ERROR) config_get = config.get config_getint = config.getint diff --git a/test/test_official_provider.py b/test/test_official_provider.py index 9287ea2d..b36fff5a 100644 --- a/test/test_official_provider.py +++ b/test/test_official_provider.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -13,10 +12,12 @@ def setup(db_test): def test_official_provider_creation(): with db.begin(): - oprovider = db.create(OfficialProvider, - Name="some-name", - Repo="some-repo", - Provides="some-provides") + oprovider = db.create( + OfficialProvider, + Name="some-name", + Repo="some-repo", + Provides="some-provides", + ) assert bool(oprovider.ID) assert oprovider.Name == "some-name" assert oprovider.Repo == "some-repo" @@ -24,19 +25,23 @@ def test_official_provider_creation(): def test_official_provider_cs(): - """ Test case sensitivity of the database table. """ + """Test case sensitivity of the database table.""" with db.begin(): - oprovider = db.create(OfficialProvider, - Name="some-name", - Repo="some-repo", - Provides="some-provides") + oprovider = db.create( + OfficialProvider, + Name="some-name", + Repo="some-repo", + Provides="some-provides", + ) assert bool(oprovider.ID) with db.begin(): - oprovider_cs = db.create(OfficialProvider, - Name="SOME-NAME", - Repo="SOME-REPO", - Provides="SOME-PROVIDES") + oprovider_cs = db.create( + OfficialProvider, + Name="SOME-NAME", + Repo="SOME-REPO", + Provides="SOME-PROVIDES", + ) assert bool(oprovider_cs.ID) assert oprovider.ID != oprovider_cs.ID diff --git a/test/test_package.py b/test/test_package.py index 1408a182..2a9df483 100644 --- a/test/test_package.py +++ b/test/test_package.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy import and_ from sqlalchemy.exc import IntegrityError @@ -20,20 +19,28 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def package(user: User) -> Package: with db.begin(): - pkgbase = db.create(PackageBase, Name="beautiful-package", - Maintainer=user) - package = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="Test description.", - URL="https://test.package") + pkgbase = db.create(PackageBase, Name="beautiful-package", Maintainer=user) + package = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="Test description.", + URL="https://test.package", + ) yield package @@ -48,21 +55,28 @@ def test_package(package: Package): package.Version = "1.2.3" # Make sure it got updated in the database. - record = db.query(Package).filter( - and_(Package.ID == package.ID, - Package.Version == "1.2.3") - ).first() + record = ( + db.query(Package) + .filter(and_(Package.ID == package.ID, Package.Version == "1.2.3")) + .first() + ) assert record is not None def test_package_null_pkgbase_raises(): with pytest.raises(IntegrityError): - Package(Name="some-package", Description="Some description.", - URL="https://some.package") + Package( + Name="some-package", + Description="Some description.", + URL="https://some.package", + ) def test_package_null_name_raises(package: Package): pkgbase = package.PackageBase with pytest.raises(IntegrityError): - Package(PackageBase=pkgbase, Description="Some description.", - URL="https://some.package") + Package( + PackageBase=pkgbase, + Description="Some description.", + URL="https://some.package", + ) diff --git a/test/test_package_base.py b/test/test_package_base.py index 5be7e40b..feea8183 100644 --- a/test/test_package_base.py +++ b/test/test_package_base.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -16,17 +15,21 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def pkgbase(user: User) -> PackageBase: with db.begin(): - pkgbase = db.create(PackageBase, Name="beautiful-package", - Maintainer=user) + pkgbase = db.create(PackageBase, Name="beautiful-package", Maintainer=user) yield pkgbase @@ -44,7 +47,7 @@ def test_package_base(user: User, pkgbase: PackageBase): def test_package_base_ci(user: User, pkgbase: PackageBase): - """ Test case insensitivity of the database table. """ + """Test case insensitivity of the database table.""" with pytest.raises(IntegrityError): with db.begin(): db.create(PackageBase, Name=pkgbase.Name.upper(), Maintainer=user) diff --git a/test/test_package_blacklist.py b/test/test_package_blacklist.py index 427c3be4..44de1830 100644 --- a/test/test_package_blacklist.py +++ b/test/test_package_blacklist.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db diff --git a/test/test_package_comaintainer.py b/test/test_package_comaintainer.py index e377edc0..52075887 100644 --- a/test/test_package_comaintainer.py +++ b/test/test_package_comaintainer.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -17,9 +16,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -32,8 +36,9 @@ def pkgbase(user: User) -> PackageBase: def test_package_comaintainer_creation(user: User, pkgbase: PackageBase): with db.begin(): - package_comaintainer = db.create(PackageComaintainer, User=user, - PackageBase=pkgbase, Priority=5) + package_comaintainer = db.create( + PackageComaintainer, User=user, PackageBase=pkgbase, Priority=5 + ) assert bool(package_comaintainer) assert package_comaintainer.User == user assert package_comaintainer.PackageBase == pkgbase @@ -50,7 +55,6 @@ def test_package_comaintainer_null_pkgbase_raises(user: User): PackageComaintainer(User=user, Priority=1) -def test_package_comaintainer_null_priority_raises(user: User, - pkgbase: PackageBase): +def test_package_comaintainer_null_priority_raises(user: User, pkgbase: PackageBase): with pytest.raises(IntegrityError): PackageComaintainer(User=user, PackageBase=pkgbase) diff --git a/test/test_package_comment.py b/test/test_package_comment.py index c89e23af..74f2895d 100644 --- a/test/test_package_comment.py +++ b/test/test_package_comment.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -17,9 +16,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -32,35 +36,46 @@ def pkgbase(user: User) -> PackageBase: def test_package_comment_creation(user: User, pkgbase: PackageBase): with db.begin(): - package_comment = db.create(PackageComment, PackageBase=pkgbase, - User=user, Comments="Test comment.", - RenderedComment="Test rendered comment.") + package_comment = db.create( + PackageComment, + PackageBase=pkgbase, + User=user, + Comments="Test comment.", + RenderedComment="Test rendered comment.", + ) assert bool(package_comment.ID) def test_package_comment_null_pkgbase_raises(user: User): with pytest.raises(IntegrityError): - PackageComment(User=user, Comments="Test comment.", - RenderedComment="Test rendered comment.") + PackageComment( + User=user, + Comments="Test comment.", + RenderedComment="Test rendered comment.", + ) def test_package_comment_null_user_raises(pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageComment(PackageBase=pkgbase, - Comments="Test comment.", - RenderedComment="Test rendered comment.") + PackageComment( + PackageBase=pkgbase, + Comments="Test comment.", + RenderedComment="Test rendered comment.", + ) -def test_package_comment_null_comments_raises(user: User, - pkgbase: PackageBase): +def test_package_comment_null_comments_raises(user: User, pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageComment(PackageBase=pkgbase, User=user, - RenderedComment="Test rendered comment.") + PackageComment( + PackageBase=pkgbase, User=user, RenderedComment="Test rendered comment." + ) -def test_package_comment_null_renderedcomment_defaults(user: User, - pkgbase: PackageBase): +def test_package_comment_null_renderedcomment_defaults( + user: User, pkgbase: PackageBase +): with db.begin(): - record = db.create(PackageComment, PackageBase=pkgbase, - User=user, Comments="Test comment.") + record = db.create( + PackageComment, PackageBase=pkgbase, User=user, Comments="Test comment." + ) assert record.RenderedComment == str() diff --git a/test/test_package_dependency.py b/test/test_package_dependency.py index 2afbc1e3..9366bb55 100644 --- a/test/test_package_dependency.py +++ b/test/test_package_dependency.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -19,9 +18,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd=str(), - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @@ -29,16 +33,21 @@ def user() -> User: def package(user: User) -> Package: with db.begin(): pkgbase = db.create(PackageBase, Name="test-package", Maintainer=user) - package = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="Test description.", - URL="https://test.package") + package = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="Test description.", + URL="https://test.package", + ) yield package def test_package_dependencies(user: User, package: Package): with db.begin(): - pkgdep = db.create(PackageDependency, Package=package, - DepTypeID=DEPENDS_ID, DepName="test-dep") + pkgdep = db.create( + PackageDependency, Package=package, DepTypeID=DEPENDS_ID, DepName="test-dep" + ) assert pkgdep.DepName == "test-dep" assert pkgdep.Package == package assert pkgdep in package.package_dependencies diff --git a/test/test_package_group.py b/test/test_package_group.py index 0cb83ee2..163f693d 100644 --- a/test/test_package_group.py +++ b/test/test_package_group.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -19,9 +18,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user diff --git a/test/test_package_keyword.py b/test/test_package_keyword.py index ff466efc..b52547f9 100644 --- a/test/test_package_keyword.py +++ b/test/test_package_keyword.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -17,24 +16,27 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def pkgbase(user: User) -> PackageBase: with db.begin(): - pkgbase = db.create(PackageBase, Name="beautiful-package", - Maintainer=user) + pkgbase = db.create(PackageBase, Name="beautiful-package", Maintainer=user) yield pkgbase def test_package_keyword(pkgbase: PackageBase): with db.begin(): - pkg_keyword = db.create(PackageKeyword, PackageBase=pkgbase, - Keyword="test") + pkg_keyword = db.create(PackageKeyword, PackageBase=pkgbase, Keyword="test") assert pkg_keyword in pkgbase.keywords assert pkgbase == pkg_keyword.PackageBase diff --git a/test/test_package_license.py b/test/test_package_license.py index c43423b8..b9242647 100644 --- a/test/test_package_license.py +++ b/test/test_package_license.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -19,9 +18,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -42,8 +46,7 @@ def package(user: User, license: License): def test_package_license(license: License, package: Package): with db.begin(): - package_license = db.create(PackageLicense, Package=package, - License=license) + package_license = db.create(PackageLicense, Package=package, License=license) assert package_license.License == license assert package_license.Package == package diff --git a/test/test_package_notification.py b/test/test_package_notification.py index e7a72a43..27a03e84 100644 --- a/test/test_package_notification.py +++ b/test/test_package_notification.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -16,8 +15,13 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword") + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + ) yield user @@ -31,7 +35,8 @@ def pkgbase(user: User) -> PackageBase: def test_package_notification_creation(user: User, pkgbase: PackageBase): with db.begin(): package_notification = db.create( - PackageNotification, User=user, PackageBase=pkgbase) + PackageNotification, User=user, PackageBase=pkgbase + ) assert bool(package_notification) assert package_notification.User == user assert package_notification.PackageBase == pkgbase diff --git a/test/test_package_relation.py b/test/test_package_relation.py index 6e9a5545..c20b1394 100644 --- a/test/test_package_relation.py +++ b/test/test_package_relation.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -19,9 +18,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -29,17 +33,24 @@ def user() -> User: def package(user: User) -> Package: with db.begin(): pkgbase = db.create(PackageBase, Name="test-package", Maintainer=user) - package = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="Test description.", - URL="https://test.package") + package = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="Test description.", + URL="https://test.package", + ) yield package def test_package_relation(package: Package): with db.begin(): - pkgrel = db.create(PackageRelation, Package=package, - RelTypeID=CONFLICTS_ID, - RelName="test-relation") + pkgrel = db.create( + PackageRelation, + Package=package, + RelTypeID=CONFLICTS_ID, + RelName="test-relation", + ) assert pkgrel.RelName == "test-relation" assert pkgrel.Package == package diff --git a/test/test_package_request.py b/test/test_package_request.py index 3474c565..a69a0617 100644 --- a/test/test_package_request.py +++ b/test/test_package_request.py @@ -1,12 +1,20 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db, time from aurweb.models.account_type import USER_ID from aurweb.models.package_base import PackageBase -from aurweb.models.package_request import (ACCEPTED, ACCEPTED_ID, CLOSED, CLOSED_ID, PENDING, PENDING_ID, REJECTED, - REJECTED_ID, PackageRequest) +from aurweb.models.package_request import ( + ACCEPTED, + ACCEPTED_ID, + CLOSED, + CLOSED_ID, + PENDING, + PENDING_ID, + REJECTED, + REJECTED_ID, + PackageRequest, +) from aurweb.models.request_type import MERGE_ID from aurweb.models.user import User @@ -19,9 +27,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -34,10 +47,15 @@ def pkgbase(user: User) -> PackageBase: def test_package_request_creation(user: User, pkgbase: PackageBase): with db.begin(): - package_request = db.create(PackageRequest, ReqTypeID=MERGE_ID, - User=user, PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - Comments=str(), ClosureComment=str()) + package_request = db.create( + PackageRequest, + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + ) assert bool(package_request.ID) assert package_request.User == user @@ -54,11 +72,17 @@ def test_package_request_creation(user: User, pkgbase: PackageBase): def test_package_request_closed(user: User, pkgbase: PackageBase): ts = time.utcnow() with db.begin(): - package_request = db.create(PackageRequest, ReqTypeID=MERGE_ID, - User=user, PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - Closer=user, ClosedTS=ts, - Comments=str(), ClosureComment=str()) + package_request = db.create( + PackageRequest, + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Closer=user, + ClosedTS=ts, + Comments=str(), + ClosureComment=str(), + ) assert package_request.Closer == user assert package_request.ClosedTS == ts @@ -67,61 +91,87 @@ def test_package_request_closed(user: User, pkgbase: PackageBase): assert package_request in user.closed_requests -def test_package_request_null_request_type_raises(user: User, - pkgbase: PackageBase): +def test_package_request_null_request_type_raises(user: User, pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageRequest(User=user, PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - Comments=str(), ClosureComment=str()) + PackageRequest( + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + ) def test_package_request_null_user_raises(pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageRequest(ReqTypeID=MERGE_ID, - PackageBase=pkgbase, PackageBaseName=pkgbase.Name, - Comments=str(), ClosureComment=str()) + PackageRequest( + ReqTypeID=MERGE_ID, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + ) -def test_package_request_null_package_base_raises(user: User, - pkgbase: PackageBase): +def test_package_request_null_package_base_raises(user: User, pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageRequest(ReqTypeID=MERGE_ID, - User=user, PackageBaseName=pkgbase.Name, - Comments=str(), ClosureComment=str()) + PackageRequest( + ReqTypeID=MERGE_ID, + User=user, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + ) -def test_package_request_null_package_base_name_raises(user: User, - pkgbase: PackageBase): +def test_package_request_null_package_base_name_raises( + user: User, pkgbase: PackageBase +): with pytest.raises(IntegrityError): - PackageRequest(ReqTypeID=MERGE_ID, - User=user, PackageBase=pkgbase, - Comments=str(), ClosureComment=str()) + PackageRequest( + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + Comments=str(), + ClosureComment=str(), + ) -def test_package_request_null_comments_raises(user: User, - pkgbase: PackageBase): +def test_package_request_null_comments_raises(user: User, pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageRequest(ReqTypeID=MERGE_ID, User=user, - PackageBase=pkgbase, PackageBaseName=pkgbase.Name, - ClosureComment=str()) + PackageRequest( + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + ClosureComment=str(), + ) -def test_package_request_null_closure_comment_raises(user: User, - pkgbase: PackageBase): +def test_package_request_null_closure_comment_raises(user: User, pkgbase: PackageBase): with pytest.raises(IntegrityError): - PackageRequest(ReqTypeID=MERGE_ID, User=user, - PackageBase=pkgbase, PackageBaseName=pkgbase.Name, - Comments=str()) + PackageRequest( + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ) def test_package_request_status_display(user: User, pkgbase: PackageBase): - """ Test status_display() based on the Status column value. """ + """Test status_display() based on the Status column value.""" with db.begin(): - pkgreq = db.create(PackageRequest, ReqTypeID=MERGE_ID, - User=user, PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - Comments=str(), ClosureComment=str(), - Status=PENDING_ID) + pkgreq = db.create( + PackageRequest, + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + Status=PENDING_ID, + ) assert pkgreq.status_display() == PENDING with db.begin(): diff --git a/test/test_package_source.py b/test/test_package_source.py index e5797f90..06230580 100644 --- a/test/test_package_source.py +++ b/test/test_package_source.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -18,9 +17,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user diff --git a/test/test_package_vote.py b/test/test_package_vote.py index 24d2fdd2..9a868262 100644 --- a/test/test_package_vote.py +++ b/test/test_package_vote.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db, time @@ -17,9 +16,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd=str(), - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @@ -34,8 +38,7 @@ def test_package_vote_creation(user: User, pkgbase: PackageBase): ts = time.utcnow() with db.begin(): - package_vote = db.create(PackageVote, User=user, - PackageBase=pkgbase, VoteTS=ts) + package_vote = db.create(PackageVote, User=user, PackageBase=pkgbase, VoteTS=ts) assert bool(package_vote) assert package_vote.User == user assert package_vote.PackageBase == pkgbase diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index 62f89e23..a707bbac 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -1,10 +1,8 @@ import re - from http import HTTPStatus from unittest import mock import pytest - from fastapi.testclient import TestClient from aurweb import asgi, db, time @@ -22,7 +20,12 @@ from aurweb.models.package_notification import PackageNotification from aurweb.models.package_relation import PackageRelation from aurweb.models.package_request import PackageRequest from aurweb.models.package_vote import PackageVote -from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID, RelationType +from aurweb.models.relation_type import ( + CONFLICTS_ID, + PROVIDES_ID, + REPLACES_ID, + RelationType, +) from aurweb.models.request_type import DELETION_ID, RequestType from aurweb.models.user import User from aurweb.testing.html import get_errors, get_successes, parse_root @@ -34,30 +37,24 @@ def package_endpoint(package: Package) -> str: def create_package(pkgname: str, maintainer: User) -> Package: - pkgbase = db.create(PackageBase, - Name=pkgname, - Maintainer=maintainer) + pkgbase = db.create(PackageBase, Name=pkgname, Maintainer=maintainer) return db.create(Package, Name=pkgbase.Name, PackageBase=pkgbase) -def create_package_dep(package: Package, depname: str, - dep_type_name: str = "depends") -> PackageDependency: - dep_type = db.query(DependencyType, - DependencyType.Name == dep_type_name).first() - return db.create(PackageDependency, - DependencyType=dep_type, - Package=package, - DepName=depname) +def create_package_dep( + package: Package, depname: str, dep_type_name: str = "depends" +) -> PackageDependency: + dep_type = db.query(DependencyType, DependencyType.Name == dep_type_name).first() + return db.create( + PackageDependency, DependencyType=dep_type, Package=package, DepName=depname + ) -def create_package_rel(package: Package, - relname: str) -> PackageRelation: - rel_type = db.query(RelationType, - RelationType.ID == PROVIDES_ID).first() - return db.create(PackageRelation, - RelationType=rel_type, - Package=package, - RelName=relname) +def create_package_rel(package: Package, relname: str) -> PackageRelation: + rel_type = db.query(RelationType, RelationType.ID == PROVIDES_ID).first() + return db.create( + PackageRelation, RelationType=rel_type, Package=package, RelName=relname + ) @pytest.fixture(autouse=True) @@ -67,64 +64,73 @@ def setup(db_test): @pytest.fixture def client() -> TestClient: - """ Yield a FastAPI TestClient. """ + """Yield a FastAPI TestClient.""" yield TestClient(app=asgi.app) def create_user(username: str) -> User: with db.begin(): - user = db.create(User, Username=username, - Email=f"{username}@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username=username, + Email=f"{username}@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) return user @pytest.fixture def user() -> User: - """ Yield a user. """ + """Yield a user.""" user = create_user("test") yield user @pytest.fixture def maintainer() -> User: - """ Yield a specific User used to maintain packages. """ + """Yield a specific User used to maintain packages.""" account_type = db.query(AccountType, AccountType.ID == USER_ID).first() with db.begin(): - maintainer = db.create(User, Username="test_maintainer", - Email="test_maintainer@example.org", - Passwd="testPassword", - AccountType=account_type) + maintainer = db.create( + User, + Username="test_maintainer", + Email="test_maintainer@example.org", + Passwd="testPassword", + AccountType=account_type, + ) yield maintainer @pytest.fixture def tu_user(): - tu_type = db.query(AccountType, - AccountType.AccountType == "Trusted User").first() + tu_type = db.query(AccountType, AccountType.AccountType == "Trusted User").first() with db.begin(): - tu_user = db.create(User, Username="test_tu", - Email="test_tu@example.org", - RealName="Test TU", Passwd="testPassword", - AccountType=tu_type) + tu_user = db.create( + User, + Username="test_tu", + Email="test_tu@example.org", + RealName="Test TU", + Passwd="testPassword", + AccountType=tu_type, + ) yield tu_user @pytest.fixture def package(maintainer: User) -> Package: - """ Yield a Package created by user. """ + """Yield a Package created by user.""" now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, - Name="test-package", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) - package = db.create(Package, - PackageBase=pkgbase, - Name=pkgbase.Name) + pkgbase = db.create( + PackageBase, + Name="test-package", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) + package = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name) yield package @@ -135,29 +141,34 @@ def pkgbase(package: Package) -> PackageBase: @pytest.fixture def target(maintainer: User) -> PackageBase: - """ Merge target. """ + """Merge target.""" now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, Name="target-package", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Name="target-package", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name) yield pkgbase @pytest.fixture def pkgreq(user: User, pkgbase: PackageBase) -> PackageRequest: - """ Yield a PackageRequest related to `pkgbase`. """ + """Yield a PackageRequest related to `pkgbase`.""" with db.begin(): - pkgreq = db.create(PackageRequest, - ReqTypeID=DELETION_ID, - User=user, - PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - Comments=f"Deletion request for {pkgbase.Name}", - ClosureComment=str()) + pkgreq = db.create( + PackageRequest, + ReqTypeID=DELETION_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=f"Deletion request for {pkgbase.Name}", + ClosureComment=str(), + ) yield pkgreq @@ -166,31 +177,33 @@ def comment(user: User, package: Package) -> PackageComment: pkgbase = package.PackageBase now = time.utcnow() with db.begin(): - comment = db.create(PackageComment, - User=user, - PackageBase=pkgbase, - Comments="Test comment.", - RenderedComment=str(), - CommentTS=now) + comment = db.create( + PackageComment, + User=user, + PackageBase=pkgbase, + Comments="Test comment.", + RenderedComment=str(), + CommentTS=now, + ) yield comment @pytest.fixture def packages(maintainer: User) -> list[Package]: - """ Yield 55 packages named pkg_0 .. pkg_54. """ + """Yield 55 packages named pkg_0 .. pkg_54.""" packages_ = [] now = time.utcnow() with db.begin(): for i in range(55): - pkgbase = db.create(PackageBase, - Name=f"pkg_{i}", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) - package = db.create(Package, - PackageBase=pkgbase, - Name=f"pkg_{i}") + pkgbase = db.create( + PackageBase, + Name=f"pkg_{i}", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) + package = db.create(Package, PackageBase=pkgbase, Name=f"pkg_{i}") packages_.append(package) yield packages_ @@ -203,40 +216,56 @@ def test_package_not_found(client: TestClient): def test_package(client: TestClient, package: Package): - """ Test a single / packages / {name} route. """ + """Test a single / packages / {name} route.""" with db.begin(): - db.create(PackageRelation, PackageID=package.ID, - RelTypeID=PROVIDES_ID, - RelName="test_provider1") - db.create(PackageRelation, PackageID=package.ID, - RelTypeID=PROVIDES_ID, - RelName="test_provider2") + db.create( + PackageRelation, + PackageID=package.ID, + RelTypeID=PROVIDES_ID, + RelName="test_provider1", + ) + db.create( + PackageRelation, + PackageID=package.ID, + RelTypeID=PROVIDES_ID, + RelName="test_provider2", + ) - db.create(PackageRelation, PackageID=package.ID, - RelTypeID=REPLACES_ID, - RelName="test_replacer1") - db.create(PackageRelation, PackageID=package.ID, - RelTypeID=REPLACES_ID, - RelName="test_replacer2") + db.create( + PackageRelation, + PackageID=package.ID, + RelTypeID=REPLACES_ID, + RelName="test_replacer1", + ) + db.create( + PackageRelation, + PackageID=package.ID, + RelTypeID=REPLACES_ID, + RelName="test_replacer2", + ) - db.create(PackageRelation, PackageID=package.ID, - RelTypeID=CONFLICTS_ID, - RelName="test_conflict1") - db.create(PackageRelation, PackageID=package.ID, - RelTypeID=CONFLICTS_ID, - RelName="test_conflict2") + db.create( + PackageRelation, + PackageID=package.ID, + RelTypeID=CONFLICTS_ID, + RelName="test_conflict1", + ) + db.create( + PackageRelation, + PackageID=package.ID, + RelTypeID=CONFLICTS_ID, + RelName="test_conflict2", + ) # Create some licenses. licenses = [ db.create(License, Name="test_license1"), - db.create(License, Name="test_license2") + db.create(License, Name="test_license2"), ] - db.create(PackageLicense, PackageID=package.ID, - License=licenses[0]) - db.create(PackageLicense, PackageID=package.ID, - License=licenses[1]) + db.create(PackageLicense, PackageID=package.ID, License=licenses[0]) + db.create(PackageLicense, PackageID=package.ID, License=licenses[1]) with client as request: resp = request.get(package_endpoint(package)) @@ -311,7 +340,7 @@ def paged_depends_required(client: TestClient, package: Package): params={ "all_deps": True, "all_reqs": True, - } + }, ) assert resp.status_code == int(HTTPStatus.OK) @@ -321,10 +350,15 @@ def paged_depends_required(client: TestClient, package: Package): def test_package_comments(client: TestClient, user: User, package: Package): - now = (time.utcnow()) + now = time.utcnow() with db.begin(): - comment = db.create(PackageComment, PackageBase=package.PackageBase, - User=user, Comments="Test comment", CommentTS=now) + comment = db.create( + PackageComment, + PackageBase=package.PackageBase, + User=user, + Comments="Test comment", + CommentTS=now, + ) cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: @@ -332,17 +366,18 @@ def test_package_comments(client: TestClient, user: User, package: Package): assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) - expected = [ - comment.Comments - ] - comments = root.xpath('.//div[contains(@class, "package-comments")]' - '/div[@class="article-content"]/div/text()') + expected = [comment.Comments] + comments = root.xpath( + './/div[contains(@class, "package-comments")]' + '/div[@class="article-content"]/div/text()' + ) for i, row in enumerate(expected): assert comments[i].strip() == row -def test_package_requests_display(client: TestClient, user: User, - package: Package, pkgreq: PackageRequest): +def test_package_requests_display( + client: TestClient, user: User, package: Package, pkgreq: PackageRequest +): # Test that a single request displays "1 pending request". with client as request: resp = request.get(package_endpoint(package)) @@ -355,11 +390,15 @@ def test_package_requests_display(client: TestClient, user: User, type_ = db.query(RequestType, RequestType.ID == DELETION_ID).first() with db.begin(): - db.create(PackageRequest, PackageBase=package.PackageBase, - PackageBaseName=package.PackageBase.Name, - User=user, RequestType=type_, - Comments="Test comment2.", - ClosureComment=str()) + db.create( + PackageRequest, + PackageBase=package.PackageBase, + PackageBaseName=package.PackageBase.Name, + User=user, + RequestType=type_, + Comments="Test comment2.", + ClosureComment=str(), + ) # Test that a two requests display "2 pending requests". with client as request: @@ -372,11 +411,10 @@ def test_package_requests_display(client: TestClient, user: User, assert target.text.strip() == "2 pending requests" -def test_package_authenticated(client: TestClient, user: User, - package: Package): - """ We get the same here for either authenticated or not +def test_package_authenticated(client: TestClient, user: User, package: Package): + """We get the same here for either authenticated or not authenticated. Form inputs are presented to maintainers. - This process also occurs when pkgbase.html is rendered. """ + This process also occurs when pkgbase.html is rendered.""" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get(package_endpoint(package), cookies=cookies) @@ -390,7 +428,7 @@ def test_package_authenticated(client: TestClient, user: User, "Flag package out-of-date", "Vote for this package", "Enable notifications", - "Submit Request" + "Submit Request", ] for expected_text in expected: assert expected_text in resp.text @@ -402,9 +440,9 @@ def test_package_authenticated(client: TestClient, user: User, assert len(target) == 0 -def test_package_authenticated_maintainer(client: TestClient, - maintainer: User, - package: Package): +def test_package_authenticated_maintainer( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: resp = request.get(package_endpoint(package), cookies=cookies) @@ -420,15 +458,13 @@ def test_package_authenticated_maintainer(client: TestClient, "Enable notifications", "Manage Co-Maintainers", "Submit Request", - "Disown Package" + "Disown Package", ] for expected_text in expected: assert expected_text in resp.text -def test_package_authenticated_tu(client: TestClient, - tu_user: User, - package: Package): +def test_package_authenticated_tu(client: TestClient, tu_user: User, package: Package): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: resp = request.get(package_endpoint(package), cookies=cookies) @@ -446,14 +482,13 @@ def test_package_authenticated_tu(client: TestClient, "Submit Request", "Delete Package", "Merge Package", - "Disown Package" + "Disown Package", ] for expected_text in expected: assert expected_text in resp.text -def test_package_dependencies(client: TestClient, maintainer: User, - package: Package): +def test_package_dependencies(client: TestClient, maintainer: User, package: Package): # Create a normal dependency of type depends. with db.begin(): dep_pkg = create_package("test-dep-1", maintainer) @@ -461,32 +496,32 @@ def test_package_dependencies(client: TestClient, maintainer: User, # Also, create a makedepends. make_dep_pkg = create_package("test-dep-2", maintainer) - make_dep = create_package_dep(package, make_dep_pkg.Name, - dep_type_name="makedepends") + make_dep = create_package_dep( + package, make_dep_pkg.Name, dep_type_name="makedepends" + ) make_dep.DepArch = "x86_64" # And... a checkdepends! check_dep_pkg = create_package("test-dep-3", maintainer) - create_package_dep(package, check_dep_pkg.Name, - dep_type_name="checkdepends") + create_package_dep(package, check_dep_pkg.Name, dep_type_name="checkdepends") # Geez. Just stop. This is optdepends. opt_dep_pkg = create_package("test-dep-4", maintainer) - create_package_dep(package, opt_dep_pkg.Name, - dep_type_name="optdepends") + create_package_dep(package, opt_dep_pkg.Name, dep_type_name="optdepends") # Heh. Another optdepends to test one with a description. opt_desc_dep_pkg = create_package("test-dep-5", maintainer) - opt_desc_dep = create_package_dep(package, opt_desc_dep_pkg.Name, - dep_type_name="optdepends") + opt_desc_dep = create_package_dep( + package, opt_desc_dep_pkg.Name, dep_type_name="optdepends" + ) opt_desc_dep.DepDesc = "Test description." - broken_dep = create_package_dep(package, "test-dep-6", - dep_type_name="depends") + broken_dep = create_package_dep(package, "test-dep-6", dep_type_name="depends") # Create an official provider record. - db.create(OfficialProvider, Name="test-dep-99", - Repo="core", Provides="test-dep-99") + db.create( + OfficialProvider, Name="test-dep-99", Repo="core", Provides="test-dep-99" + ) create_package_dep(package, "test-dep-99") # Also, create a provider who provides our test-dep-99. @@ -498,13 +533,14 @@ def test_package_dependencies(client: TestClient, maintainer: User, assert resp.status_code == int(HTTPStatus.OK) # Let's make sure all the non-broken deps are ordered as we expect. - expected = list(filter( - lambda e: e.is_package(), - package.package_dependencies.order_by( - PackageDependency.DepTypeID.asc(), - PackageDependency.DepName.asc() - ).all() - )) + expected = list( + filter( + lambda e: e.is_package(), + package.package_dependencies.order_by( + PackageDependency.DepTypeID.asc(), PackageDependency.DepName.asc() + ).all(), + ) + ) root = parse_root(resp.text) pkgdeps = root.findall('.//ul[@id="pkgdepslist"]/li/a') for i, expectation in enumerate(expected): @@ -512,7 +548,7 @@ def test_package_dependencies(client: TestClient, maintainer: User, # Let's make sure the DepArch was displayed for our target make dep. arch = root.findall('.//ul[@id="pkgdepslist"]/li')[3] - arch = arch.xpath('./em')[0] + arch = arch.xpath("./em")[0] assert arch.text.strip() == "(make, x86_64)" # And let's make sure that the broken package was displayed. @@ -522,16 +558,19 @@ def test_package_dependencies(client: TestClient, maintainer: User, def test_packages(client: TestClient, packages: list[Package]): with client as request: - response = request.get("/packages", params={ - "SeB": "X", # "X" isn't valid, defaults to "nd" - "PP": "1 or 1", - "O": "0 or 0" - }) + response = request.get( + "/packages", + params={ + "SeB": "X", # "X" isn't valid, defaults to "nd" + "PP": "1 or 1", + "O": "0 or 0", + }, + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) stats = root.xpath('//div[@class="pkglist-stats"]/p')[0] - pager_text = re.sub(r'\s+', " ", stats.text.replace("\n", "").strip()) + pager_text = re.sub(r"\s+", " ", stats.text.replace("\n", "").strip()) assert pager_text == "55 packages found. Page 1 of 2." rows = root.xpath('//table[@class="results"]/tbody/tr') @@ -551,10 +590,7 @@ def test_packages_empty(client: TestClient): def test_packages_search_by_name(client: TestClient, packages: list[Package]): with client as request: - response = request.get("/packages", params={ - "SeB": "n", - "K": "pkg_" - }) + response = request.get("/packages", params={"SeB": "n", "K": "pkg_"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -563,13 +599,9 @@ def test_packages_search_by_name(client: TestClient, packages: list[Package]): assert len(rows) == 50 # Default per-page -def test_packages_search_by_exact_name(client: TestClient, - packages: list[Package]): +def test_packages_search_by_exact_name(client: TestClient, packages: list[Package]): with client as request: - response = request.get("/packages", params={ - "SeB": "N", - "K": "pkg_" - }) + response = request.get("/packages", params={"SeB": "N", "K": "pkg_"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -579,10 +611,7 @@ def test_packages_search_by_exact_name(client: TestClient, assert len(rows) == 0 with client as request: - response = request.get("/packages", params={ - "SeB": "N", - "K": "pkg_1" - }) + response = request.get("/packages", params={"SeB": "N", "K": "pkg_1"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -592,13 +621,9 @@ def test_packages_search_by_exact_name(client: TestClient, assert len(rows) == 1 -def test_packages_search_by_pkgbase(client: TestClient, - packages: list[Package]): +def test_packages_search_by_pkgbase(client: TestClient, packages: list[Package]): with client as request: - response = request.get("/packages", params={ - "SeB": "b", - "K": "pkg_" - }) + response = request.get("/packages", params={"SeB": "b", "K": "pkg_"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -607,13 +632,9 @@ def test_packages_search_by_pkgbase(client: TestClient, assert len(rows) == 50 -def test_packages_search_by_exact_pkgbase(client: TestClient, - packages: list[Package]): +def test_packages_search_by_exact_pkgbase(client: TestClient, packages: list[Package]): with client as request: - response = request.get("/packages", params={ - "SeB": "B", - "K": "pkg_" - }) + response = request.get("/packages", params={"SeB": "B", "K": "pkg_"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -621,10 +642,7 @@ def test_packages_search_by_exact_pkgbase(client: TestClient, assert len(rows) == 0 with client as request: - response = request.get("/packages", params={ - "SeB": "B", - "K": "pkg_1" - }) + response = request.get("/packages", params={"SeB": "B", "K": "pkg_1"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -632,14 +650,10 @@ def test_packages_search_by_exact_pkgbase(client: TestClient, assert len(rows) == 1 -def test_packages_search_by_keywords(client: TestClient, - packages: list[Package]): +def test_packages_search_by_keywords(client: TestClient, packages: list[Package]): # None of our packages have keywords, so this query should return nothing. with client as request: - response = request.get("/packages", params={ - "SeB": "k", - "K": "testKeyword" - }) + response = request.get("/packages", params={"SeB": "k", "K": "testKeyword"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -649,16 +663,13 @@ def test_packages_search_by_keywords(client: TestClient, # But now, let's create the keyword for the first package. package = packages[0] with db.begin(): - db.create(PackageKeyword, - PackageBase=package.PackageBase, - Keyword="testKeyword") + db.create( + PackageKeyword, PackageBase=package.PackageBase, Keyword="testKeyword" + ) # And request packages with that keyword, we should get 1 result. with client as request: - response = request.get("/packages", params={ - "SeB": "k", - "K": "testKeyword" - }) + response = request.get("/packages", params={"SeB": "k", "K": "testKeyword"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -666,16 +677,15 @@ def test_packages_search_by_keywords(client: TestClient, assert len(rows) == 1 -def test_packages_search_by_maintainer(client: TestClient, - maintainer: User, - package: Package): +def test_packages_search_by_maintainer( + client: TestClient, maintainer: User, package: Package +): # We should expect that searching by `package`'s maintainer # returns `package` in the results. with client as request: - response = request.get("/packages", params={ - "SeB": "m", - "K": maintainer.Username - }) + response = request.get( + "/packages", params={"SeB": "m", "K": maintainer.Username} + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') @@ -704,15 +714,14 @@ def test_packages_search_by_maintainer(client: TestClient, assert len(rows) == 1 -def test_packages_search_by_comaintainer(client: TestClient, - maintainer: User, - package: Package): +def test_packages_search_by_comaintainer( + client: TestClient, maintainer: User, package: Package +): # Nobody's a comaintainer yet. with client as request: - response = request.get("/packages", params={ - "SeB": "c", - "K": maintainer.Username - }) + response = request.get( + "/packages", params={"SeB": "c", "K": maintainer.Username} + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -721,17 +730,18 @@ def test_packages_search_by_comaintainer(client: TestClient, # Now, we create a comaintainer. with db.begin(): - db.create(PackageComaintainer, - PackageBase=package.PackageBase, - User=maintainer, - Priority=1) + db.create( + PackageComaintainer, + PackageBase=package.PackageBase, + User=maintainer, + Priority=1, + ) # Then test that it's returned by our search. with client as request: - response = request.get("/packages", params={ - "SeB": "c", - "K": maintainer.Username - }) + response = request.get( + "/packages", params={"SeB": "c", "K": maintainer.Username} + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -739,15 +749,18 @@ def test_packages_search_by_comaintainer(client: TestClient, assert len(rows) == 1 -def test_packages_search_by_co_or_maintainer(client: TestClient, - maintainer: User, - package: Package): +def test_packages_search_by_co_or_maintainer( + client: TestClient, maintainer: User, package: Package +): with client as request: - response = request.get("/packages", params={ - "SeB": "M", - "SB": "BLAH", # Invalid SB; gets reset to default "n". - "K": maintainer.Username - }) + response = request.get( + "/packages", + params={ + "SeB": "M", + "SB": "BLAH", # Invalid SB; gets reset to default "n". + "K": maintainer.Username, + }, + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -755,19 +768,18 @@ def test_packages_search_by_co_or_maintainer(client: TestClient, assert len(rows) == 1 with db.begin(): - user = db.create(User, Username="comaintainer", - Email="comaintainer@example.org", - Passwd="testPassword") - db.create(PackageComaintainer, - PackageBase=package.PackageBase, - User=user, - Priority=1) + user = db.create( + User, + Username="comaintainer", + Email="comaintainer@example.org", + Passwd="testPassword", + ) + db.create( + PackageComaintainer, PackageBase=package.PackageBase, User=user, Priority=1 + ) with client as request: - response = request.get("/packages", params={ - "SeB": "M", - "K": user.Username - }) + response = request.get("/packages", params={"SeB": "M", "K": user.Username}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -775,14 +787,13 @@ def test_packages_search_by_co_or_maintainer(client: TestClient, assert len(rows) == 1 -def test_packages_search_by_submitter(client: TestClient, - maintainer: User, - package: Package): +def test_packages_search_by_submitter( + client: TestClient, maintainer: User, package: Package +): with client as request: - response = request.get("/packages", params={ - "SeB": "s", - "K": maintainer.Username - }) + response = request.get( + "/packages", params={"SeB": "s", "K": maintainer.Username} + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -792,184 +803,184 @@ def test_packages_search_by_submitter(client: TestClient, def test_packages_sort_by_name(client: TestClient, packages: list[Package]): with client as request: - response = request.get("/packages", params={ - "SB": "n", # Name - "SO": "a", # Ascending - "PP": "150" - }) + response = request.get( + "/packages", params={"SB": "n", "SO": "a", "PP": "150"} # Name # Ascending + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - rows = [row.xpath('./td/a')[0].text.strip() for row in rows] + rows = [row.xpath("./td/a")[0].text.strip() for row in rows] with client as request: - response2 = request.get("/packages", params={ - "SB": "n", # Name - "SO": "d", # Ascending - "PP": "150" - }) + response2 = request.get( + "/packages", params={"SB": "n", "SO": "d", "PP": "150"} # Name # Ascending + ) assert response2.status_code == int(HTTPStatus.OK) root = parse_root(response2.text) rows2 = root.xpath('//table[@class="results"]/tbody/tr') - rows2 = [row.xpath('./td/a')[0].text.strip() for row in rows2] + rows2 = [row.xpath("./td/a")[0].text.strip() for row in rows2] assert rows == list(reversed(rows2)) -def test_packages_sort_by_votes(client: TestClient, - maintainer: User, - packages: list[Package]): +def test_packages_sort_by_votes( + client: TestClient, maintainer: User, packages: list[Package] +): # Set the first package's NumVotes to 1. with db.begin(): packages[0].PackageBase.NumVotes = 1 # Test that, by default, the first result is what we just set above. with client as request: - response = request.get("/packages", params={ - "SB": "v" # Votes. - }) + response = request.get("/packages", params={"SB": "v"}) # Votes. assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - votes = rows[0].xpath('./td')[2] # The third column of the first row. + votes = rows[0].xpath("./td")[2] # The third column of the first row. assert votes.text.strip() == "1" # Now, test that with an ascending order, the last result is # the one we set, since the default (above) is descending. with client as request: - response = request.get("/packages", params={ - "SB": "v", # Votes. - "SO": "a", # Ascending. - "O": "50" # Second page. - }) + response = request.get( + "/packages", + params={ + "SB": "v", # Votes. + "SO": "a", # Ascending. + "O": "50", # Second page. + }, + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - votes = rows[-1].xpath('./td')[2] # The third column of the last row. + votes = rows[-1].xpath("./td")[2] # The third column of the last row. assert votes.text.strip() == "1" -def test_packages_sort_by_popularity(client: TestClient, - maintainer: User, - packages: list[Package]): +def test_packages_sort_by_popularity( + client: TestClient, maintainer: User, packages: list[Package] +): # Set the first package's Popularity to 0.50. with db.begin(): packages[0].PackageBase.Popularity = "0.50" # Test that, by default, the first result is what we just set above. with client as request: - response = request.get("/packages", params={ - "SB": "p" # Popularity - }) + response = request.get("/packages", params={"SB": "p"}) # Popularity assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - pop = rows[0].xpath('./td')[3] # The fourth column of the first row. + pop = rows[0].xpath("./td")[3] # The fourth column of the first row. assert pop.text.strip() == "0.50" -def test_packages_sort_by_voted(client: TestClient, - maintainer: User, - packages: list[Package]): +def test_packages_sort_by_voted( + client: TestClient, maintainer: User, packages: list[Package] +): now = time.utcnow() with db.begin(): - db.create(PackageVote, PackageBase=packages[0].PackageBase, - User=maintainer, VoteTS=now) + db.create( + PackageVote, + PackageBase=packages[0].PackageBase, + User=maintainer, + VoteTS=now, + ) # Test that, by default, the first result is what we just set above. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - response = request.get("/packages", params={ - "SB": "w", # Voted - "SO": "d" # Descending, Voted first. - }, cookies=cookies) + response = request.get( + "/packages", + params={"SB": "w", "SO": "d"}, # Voted # Descending, Voted first. + cookies=cookies, + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - voted = rows[0].xpath('./td')[5] # The sixth column of the first row. + voted = rows[0].xpath("./td")[5] # The sixth column of the first row. assert voted.text.strip() == "Yes" # Conversely, everything else was not voted on. - voted = rows[1].xpath('./td')[5] # The sixth column of the second row. + voted = rows[1].xpath("./td")[5] # The sixth column of the second row. assert voted.text.strip() == str() # Empty. -def test_packages_sort_by_notify(client: TestClient, - maintainer: User, - packages: list[Package]): - db.create(PackageNotification, - PackageBase=packages[0].PackageBase, - User=maintainer) +def test_packages_sort_by_notify( + client: TestClient, maintainer: User, packages: list[Package] +): + db.create(PackageNotification, PackageBase=packages[0].PackageBase, User=maintainer) # Test that, by default, the first result is what we just set above. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - response = request.get("/packages", params={ - "SB": "o", # Voted - "SO": "d" # Descending, Voted first. - }, cookies=cookies) + response = request.get( + "/packages", + params={"SB": "o", "SO": "d"}, # Voted # Descending, Voted first. + cookies=cookies, + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - notify = rows[0].xpath('./td')[6] # The sixth column of the first row. + notify = rows[0].xpath("./td")[6] # The sixth column of the first row. assert notify.text.strip() == "Yes" # Conversely, everything else was not voted on. - notify = rows[1].xpath('./td')[6] # The sixth column of the second row. + notify = rows[1].xpath("./td")[6] # The sixth column of the second row. assert notify.text.strip() == str() # Empty. -def test_packages_sort_by_maintainer(client: TestClient, - maintainer: User, - package: Package): - """ Sort a package search by the maintainer column. """ +def test_packages_sort_by_maintainer( + client: TestClient, maintainer: User, package: Package +): + """Sort a package search by the maintainer column.""" # Create a second package, so the two can be ordered and checked. with db.begin(): - maintainer2 = db.create(User, Username="maintainer2", - Email="maintainer2@example.org", - Passwd="testPassword") - base2 = db.create(PackageBase, Name="pkg_2", Maintainer=maintainer2, - Submitter=maintainer2, Packager=maintainer2) + maintainer2 = db.create( + User, + Username="maintainer2", + Email="maintainer2@example.org", + Passwd="testPassword", + ) + base2 = db.create( + PackageBase, + Name="pkg_2", + Maintainer=maintainer2, + Submitter=maintainer2, + Packager=maintainer2, + ) db.create(Package, Name="pkg_2", PackageBase=base2) # Check the descending order route. with client as request: - response = request.get("/packages", params={ - "SB": "m", - "SO": "d" - }) + response = request.get("/packages", params={"SB": "m", "SO": "d"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - col = rows[0].xpath('./td')[5].xpath('./a')[0] # Last column. + col = rows[0].xpath("./td")[5].xpath("./a")[0] # Last column. assert col.text.strip() == maintainer.Username # On the other hand, with ascending, we should get reverse ordering. with client as request: - response = request.get("/packages", params={ - "SB": "m", - "SO": "a" - }) + response = request.get("/packages", params={"SB": "m", "SO": "a"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) rows = root.xpath('//table[@class="results"]/tbody/tr') - col = rows[0].xpath('./td')[5].xpath('./a')[0] # Last column. + col = rows[0].xpath("./td")[5].xpath("./a")[0] # Last column. assert col.text.strip() == maintainer2.Username -def test_packages_sort_by_last_modified(client: TestClient, - packages: list[Package]): +def test_packages_sort_by_last_modified(client: TestClient, packages: list[Package]): now = time.utcnow() # Set the first package's ModifiedTS to be 1000 seconds before now. package = packages[0] @@ -977,10 +988,10 @@ def test_packages_sort_by_last_modified(client: TestClient, package.PackageBase.ModifiedTS = now - 1000 with client as request: - response = request.get("/packages", params={ - "SB": "l", - "SO": "a" # Ascending; oldest modification first. - }) + response = request.get( + "/packages", + params={"SB": "l", "SO": "a"}, # Ascending; oldest modification first. + ) assert response.status_code == int(HTTPStatus.OK) # We should have 50 (default per page) results. @@ -990,12 +1001,13 @@ def test_packages_sort_by_last_modified(client: TestClient, # Let's assert that the first item returned was the one we modified above. row = rows[0] - col = row.xpath('./td')[0].xpath('./a')[0] + col = row.xpath("./td")[0].xpath("./a")[0] assert col.text.strip() == package.Name -def test_packages_flagged(client: TestClient, maintainer: User, - packages: list[Package]): +def test_packages_flagged( + client: TestClient, maintainer: User, packages: list[Package] +): package = packages[0] now = time.utcnow() @@ -1005,9 +1017,7 @@ def test_packages_flagged(client: TestClient, maintainer: User, package.PackageBase.Flagger = maintainer with client as request: - response = request.get("/packages", params={ - "outdated": "on" - }) + response = request.get("/packages", params={"outdated": "on"}) assert response.status_code == int(HTTPStatus.OK) # We should only get one result from this query; the package we flagged. @@ -1016,9 +1026,7 @@ def test_packages_flagged(client: TestClient, maintainer: User, assert len(rows) == 1 with client as request: - response = request.get("/packages", params={ - "outdated": "off" - }) + response = request.get("/packages", params={"outdated": "off"}) assert response.status_code == int(HTTPStatus.OK) # In this case, we should get 54 results, which means that the first @@ -1044,14 +1052,17 @@ def test_packages_orphans(client: TestClient, packages: list[Package]): def test_packages_per_page(client: TestClient, maintainer: User): - """ Test the ability for /packages to deal with the PP query - argument specifications (50, 100, 250; default: 50). """ + """Test the ability for /packages to deal with the PP query + argument specifications (50, 100, 250; default: 50).""" with db.begin(): for i in range(255): - base = db.create(PackageBase, Name=f"pkg_{i}", - Maintainer=maintainer, - Submitter=maintainer, - Packager=maintainer) + base = db.create( + PackageBase, + Name=f"pkg_{i}", + Maintainer=maintainer, + Submitter=maintainer, + Packager=maintainer, + ) db.create(Package, PackageBase=base, Name=base.Name) # Test default case, PP of 50. @@ -1079,18 +1090,20 @@ def test_packages_per_page(client: TestClient, maintainer: User): assert len(rows) == 250 -def test_packages_post_unknown_action(client: TestClient, user: User, - package: Package): +def test_packages_post_unknown_action(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "unknown"}, - cookies=cookies, allow_redirects=False) + resp = request.post( + "/packages", + data={"action": "unknown"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) def test_packages_post_error(client: TestClient, user: User, package: Package): - async def stub_action(request: Request, **kwargs): return (False, ["Some error."]) @@ -1098,8 +1111,12 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "stub"}, - cookies=cookies, allow_redirects=False) + resp = request.post( + "/packages", + data={"action": "stub"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1108,7 +1125,6 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): def test_packages_post(client: TestClient, user: User, package: Package): - async def stub_action(request: Request, **kwargs): return (True, ["Some success."]) @@ -1116,8 +1132,12 @@ def test_packages_post(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "stub"}, - cookies=cookies, allow_redirects=False) + resp = request.post( + "/packages", + data={"action": "stub"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.OK) errors = get_successes(resp.text) @@ -1125,8 +1145,9 @@ def test_packages_post(client: TestClient, user: User, package: Package): assert errors[0].text.strip() == expected -def test_packages_post_unflag(client: TestClient, user: User, - maintainer: User, package: Package): +def test_packages_post_unflag( + client: TestClient, user: User, maintainer: User, package: Package +): # Flag `package` as `user`. now = time.utcnow() with db.begin(): @@ -1181,8 +1202,7 @@ def test_packages_post_notify(client: TestClient, user: User, package: Package): # an error to be rendered. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "notify"}, - cookies=cookies) + resp = request.post("/packages", data={"action": "notify"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to be notified about." @@ -1190,10 +1210,9 @@ def test_packages_post_notify(client: TestClient, user: User, package: Package): # Now let's actually enable notifications on `package`. with client as request: - resp = request.post("/packages", data={ - "action": "notify", - "IDs": [package.ID] - }, cookies=cookies) + resp = request.post( + "/packages", data={"action": "notify", "IDs": [package.ID]}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.OK) expected = "The selected packages' notifications have been enabled." successes = get_successes(resp.text) @@ -1202,31 +1221,27 @@ def test_packages_post_notify(client: TestClient, user: User, package: Package): # Try to enable notifications when they're already enabled, # causing an error to be rendered. with client as request: - resp = request.post("/packages", data={ - "action": "notify", - "IDs": [package.ID] - }, cookies=cookies) + resp = request.post( + "/packages", data={"action": "notify", "IDs": [package.ID]}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to be notified about." assert errors[0].text.strip() == expected -def test_packages_post_unnotify(client: TestClient, user: User, - package: Package): +def test_packages_post_unnotify(client: TestClient, user: User, package: Package): # Create a notification record. with db.begin(): - notif = db.create(PackageNotification, - PackageBase=package.PackageBase, - User=user) + notif = db.create( + PackageNotification, PackageBase=package.PackageBase, User=user + ) assert notif is not None # Request removal of the notification without any IDs. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "unnotify" - }, cookies=cookies) + resp = request.post("/packages", data={"action": "unnotify"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages for notification removal." @@ -1234,10 +1249,11 @@ def test_packages_post_unnotify(client: TestClient, user: User, # Request removal of the notification; really. with client as request: - resp = request.post("/packages", data={ - "action": "unnotify", - "IDs": [package.ID] - }, cookies=cookies) + resp = request.post( + "/packages", + data={"action": "unnotify", "IDs": [package.ID]}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.OK) successes = get_successes(resp.text) expected = "The selected packages' notifications have been removed." @@ -1251,25 +1267,23 @@ def test_packages_post_unnotify(client: TestClient, user: User, # Try it again. The notif no longer exists. with client as request: - resp = request.post("/packages", data={ - "action": "unnotify", - "IDs": [package.ID] - }, cookies=cookies) + resp = request.post( + "/packages", + data={"action": "unnotify", "IDs": [package.ID]}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "A package you selected does not have notifications enabled." assert errors[0].text.strip() == expected -def test_packages_post_adopt(client: TestClient, user: User, - package: Package): +def test_packages_post_adopt(client: TestClient, user: User, package: Package): # Try to adopt an empty list of packages. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "adopt" - }, cookies=cookies) + resp = request.post("/packages", data={"action": "adopt"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to adopt." @@ -1277,11 +1291,11 @@ def test_packages_post_adopt(client: TestClient, user: User, # Now, let's try to adopt a package that's already maintained. with client as request: - resp = request.post("/packages", data={ - "action": "adopt", - "IDs": [package.ID], - "confirm": True - }, cookies=cookies) + resp = request.post( + "/packages", + data={"action": "adopt", "IDs": [package.ID], "confirm": True}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You are not allowed to adopt one of the packages you selected." @@ -1294,33 +1308,34 @@ def test_packages_post_adopt(client: TestClient, user: User, # Now, let's try to adopt without confirming. with client as request: - resp = request.post("/packages", data={ - "action": "adopt", - "IDs": [package.ID] - }, cookies=cookies) + resp = request.post( + "/packages", data={"action": "adopt", "IDs": [package.ID]}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) - expected = ("The selected packages have not been adopted, " - "check the confirmation checkbox.") + expected = ( + "The selected packages have not been adopted, " + "check the confirmation checkbox." + ) assert errors[0].text.strip() == expected # Let's do it again now that there is no maintainer. with client as request: - resp = request.post("/packages", data={ - "action": "adopt", - "IDs": [package.ID], - "confirm": True - }, cookies=cookies) + resp = request.post( + "/packages", + data={"action": "adopt", "IDs": [package.ID], "confirm": True}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.OK) successes = get_successes(resp.text) expected = "The selected packages have been adopted." assert successes[0].text.strip() == expected -def test_packages_post_disown_as_maintainer(client: TestClient, user: User, - maintainer: User, - package: Package): - """ Disown packages as a maintainer. """ +def test_packages_post_disown_as_maintainer( + client: TestClient, user: User, maintainer: User, package: Package +): + """Disown packages as a maintainer.""" # Initially prove that we have a maintainer. assert package.PackageBase.Maintainer is not None assert package.PackageBase.Maintainer == maintainer @@ -1328,9 +1343,7 @@ def test_packages_post_disown_as_maintainer(client: TestClient, user: User, # Try to run the disown action with no IDs; get an error. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "disown" - }, cookies=cookies) + resp = request.post("/packages", data={"action": "disown"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to disown." @@ -1339,25 +1352,26 @@ def test_packages_post_disown_as_maintainer(client: TestClient, user: User, # Try to disown `package` without giving the confirm argument. with client as request: - resp = request.post("/packages", data={ - "action": "disown", - "IDs": [package.ID] - }, cookies=cookies) + resp = request.post( + "/packages", data={"action": "disown", "IDs": [package.ID]}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) assert package.PackageBase.Maintainer is not None errors = get_errors(resp.text) - expected = ("The selected packages have not been disowned, " - "check the confirmation checkbox.") + expected = ( + "The selected packages have not been disowned, " + "check the confirmation checkbox." + ) assert errors[0].text.strip() == expected # Now, try to disown `package` without credentials (as `user`). user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "disown", - "IDs": [package.ID], - "confirm": True - }, cookies=user_cookies) + resp = request.post( + "/packages", + data={"action": "disown", "IDs": [package.ID], "confirm": True}, + cookies=user_cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) assert package.PackageBase.Maintainer is not None errors = get_errors(resp.text) @@ -1366,11 +1380,11 @@ def test_packages_post_disown_as_maintainer(client: TestClient, user: User, # Now, let's really disown `package` as `maintainer`. with client as request: - resp = request.post("/packages", data={ - "action": "disown", - "IDs": [package.ID], - "confirm": True - }, cookies=cookies) + resp = request.post( + "/packages", + data={"action": "disown", "IDs": [package.ID], "confirm": True}, + cookies=cookies, + ) assert package.PackageBase.Maintainer is None successes = get_successes(resp.text) @@ -1378,30 +1392,36 @@ def test_packages_post_disown_as_maintainer(client: TestClient, user: User, assert successes[0].text.strip() == expected -def test_packages_post_disown(client: TestClient, tu_user: User, - maintainer: User, package: Package): - """ Disown packages as a Trusted User, which cannot bypass idle time. """ +def test_packages_post_disown( + client: TestClient, tu_user: User, maintainer: User, package: Package +): + """Disown packages as a Trusted User, which cannot bypass idle time.""" cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "disown", - "IDs": [package.ID], - "confirm": True - }, cookies=cookies) + resp = request.post( + "/packages", + data={"action": "disown", "IDs": [package.ID], "confirm": True}, + cookies=cookies, + ) errors = get_errors(resp.text) expected = r"^No due existing orphan requests to accept for .+\.$" assert re.match(expected, errors[0].text.strip()) -def test_packages_post_delete(caplog: pytest.fixture, client: TestClient, - user: User, tu_user: User, package: Package): +def test_packages_post_delete( + caplog: pytest.fixture, + client: TestClient, + user: User, + tu_user: User, + package: Package, +): # First, let's try to use the delete action with no packages IDs. user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "delete" - }, cookies=user_cookies) + resp = request.post( + "/packages", data={"action": "delete"}, cookies=user_cookies + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to delete." @@ -1409,23 +1429,26 @@ def test_packages_post_delete(caplog: pytest.fixture, client: TestClient, # Now, let's try to delete real packages without supplying "confirm". with client as request: - resp = request.post("/packages", data={ - "action": "delete", - "IDs": [package.ID] - }, cookies=user_cookies) + resp = request.post( + "/packages", + data={"action": "delete", "IDs": [package.ID]}, + cookies=user_cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) - expected = ("The selected packages have not been deleted, " - "check the confirmation checkbox.") + expected = ( + "The selected packages have not been deleted, " + "check the confirmation checkbox." + ) assert errors[0].text.strip() == expected # And again, with everything, but `user` doesn't have permissions. with client as request: - resp = request.post("/packages", data={ - "action": "delete", - "IDs": [package.ID], - "confirm": True - }, cookies=user_cookies) + resp = request.post( + "/packages", + data={"action": "delete", "IDs": [package.ID], "confirm": True}, + cookies=user_cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You do not have permission to delete packages." @@ -1436,11 +1459,11 @@ def test_packages_post_delete(caplog: pytest.fixture, client: TestClient, # an invalid package ID. tu_cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={ - "action": "delete", - "IDs": [0], - "confirm": True - }, cookies=tu_cookies) + resp = request.post( + "/packages", + data={"action": "delete", "IDs": [0], "confirm": True}, + cookies=tu_cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "One of the packages you selected does not exist." @@ -1449,11 +1472,11 @@ def test_packages_post_delete(caplog: pytest.fixture, client: TestClient, # Whoo. Now, let's finally make a valid request as `tu_user` # to delete `package`. with client as request: - resp = request.post("/packages", data={ - "action": "delete", - "IDs": [package.ID], - "confirm": True - }, cookies=tu_cookies) + resp = request.post( + "/packages", + data={"action": "delete", "IDs": [package.ID], "confirm": True}, + cookies=tu_cookies, + ) assert resp.status_code == int(HTTPStatus.OK) successes = get_successes(resp.text) expected = "The selected packages have been deleted." @@ -1461,15 +1484,17 @@ def test_packages_post_delete(caplog: pytest.fixture, client: TestClient, # Expect that the package deletion was logged. pkgbases = [package.PackageBase.Name] - expected = (f"Privileged user '{tu_user.Username}' deleted the " - f"following package bases: {str(pkgbases)}.") + expected = ( + f"Privileged user '{tu_user.Username}' deleted the " + f"following package bases: {str(pkgbases)}." + ) assert expected in caplog.text def test_account_comments_unauthorized(client: TestClient, user: User): - """ This test may seem out of place, but it requires packages, + """This test may seem out of place, but it requires packages, so its being included in the packages routes test suite to - leverage existing fixtures. """ + leverage existing fixtures.""" endpoint = f"/account/{user.Username}/comments" with client as request: resp = request.get(endpoint, allow_redirects=False) @@ -1478,22 +1503,28 @@ def test_account_comments_unauthorized(client: TestClient, user: User): def test_account_comments(client: TestClient, user: User, package: Package): - """ This test may seem out of place, but it requires packages, + """This test may seem out of place, but it requires packages, so its being included in the packages routes test suite to - leverage existing fixtures. """ + leverage existing fixtures.""" now = time.utcnow() with db.begin(): # This comment's CommentTS is `now + 1`, so it is found in rendered # HTML before the rendered_comment, which has a CommentTS of `now`. - comment = db.create(PackageComment, - PackageBase=package.PackageBase, - User=user, Comments="Test comment", - CommentTS=now + 1) - rendered_comment = db.create(PackageComment, - PackageBase=package.PackageBase, - User=user, Comments="Test comment", - RenderedComment="

    Test comment

    ", - CommentTS=now) + comment = db.create( + PackageComment, + PackageBase=package.PackageBase, + User=user, + Comments="Test comment", + CommentTS=now + 1, + ) + rendered_comment = db.create( + PackageComment, + PackageBase=package.PackageBase, + User=user, + Comments="Test comment", + RenderedComment="

    Test comment

    ", + CommentTS=now, + ) cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/account/{user.Username}/comments" @@ -1508,7 +1539,6 @@ def test_account_comments(client: TestClient, user: User, package: Package): assert comments[0].text.strip() == comment.Comments # And from the second, we have rendered content. - rendered = comments[1].xpath('./p') - expected = rendered_comment.RenderedComment.replace( - "

    ", "").replace("

    ", "") + rendered = comments[1].xpath("./p") + expected = rendered_comment.RenderedComment.replace("

    ", "").replace("

    ", "") assert rendered[0].text.strip() == expected diff --git a/test/test_packages_util.py b/test/test_packages_util.py index 02f84601..0042cd71 100644 --- a/test/test_packages_util.py +++ b/test/test_packages_util.py @@ -1,5 +1,4 @@ import pytest - from fastapi.testclient import TestClient from aurweb import asgi, config, db, time @@ -23,18 +22,22 @@ def setup(db_test): @pytest.fixture def maintainer() -> User: with db.begin(): - maintainer = db.create(User, Username="test_maintainer", - Email="test_maintainer@examepl.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + maintainer = db.create( + User, + Username="test_maintainer", + Email="test_maintainer@examepl.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield maintainer @pytest.fixture def package(maintainer: User) -> Package: with db.begin(): - pkgbase = db.create(PackageBase, Name="test-pkg", - Packager=maintainer, Maintainer=maintainer) + pkgbase = db.create( + PackageBase, Name="test-pkg", Packager=maintainer, Maintainer=maintainer + ) package = db.create(Package, Name=pkgbase.Name, PackageBase=pkgbase) yield package @@ -51,10 +54,9 @@ def test_package_link(client: TestClient, package: Package): def test_official_package_link(client: TestClient, package: Package): with db.begin(): - provider = db.create(OfficialProvider, - Name=package.Name, - Repo="core", - Provides=package.Name) + provider = db.create( + OfficialProvider, Name=package.Name, Repo="core", Provides=package.Name + ) expected = f"{OFFICIAL_BASE}/packages/?q={package.Name}" assert util.package_link(provider) == expected @@ -63,9 +65,7 @@ def test_updated_packages(maintainer: User, package: Package): expected = { "Name": package.Name, "Version": package.Version, - "PackageBase": { - "ModifiedTS": package.PackageBase.ModifiedTS - } + "PackageBase": {"ModifiedTS": package.PackageBase.ModifiedTS}, } kill_redis() # Kill it here to ensure we're on a fake instance. @@ -77,8 +77,9 @@ def test_updated_packages(maintainer: User, package: Package): def test_query_voted(maintainer: User, package: Package): now = time.utcnow() with db.begin(): - db.create(PackageVote, User=maintainer, VoteTS=now, - PackageBase=package.PackageBase) + db.create( + PackageVote, User=maintainer, VoteTS=now, PackageBase=package.PackageBase + ) query = db.query(Package).filter(Package.ID == package.ID).all() query_voted = util.query_voted(query, maintainer) @@ -87,8 +88,7 @@ def test_query_voted(maintainer: User, package: Package): def test_query_notified(maintainer: User, package: Package): with db.begin(): - db.create(PackageNotification, User=maintainer, - PackageBase=package.PackageBase) + db.create(PackageNotification, User=maintainer, PackageBase=package.PackageBase) query = db.query(Package).filter(Package.ID == package.ID).all() query_notified = util.query_notified(query, maintainer) @@ -99,8 +99,9 @@ def test_source_uri_file(package: Package): FILE = "test_file" with db.begin(): - pkgsrc = db.create(PackageSource, Source=FILE, - Package=package, SourceArch="x86_64") + pkgsrc = db.create( + PackageSource, Source=FILE, Package=package, SourceArch="x86_64" + ) source_file_uri = config.get("options", "source_file_uri") file, uri = util.source_uri(pkgsrc) expected = source_file_uri % (pkgsrc.Source, package.PackageBase.Name) @@ -112,8 +113,9 @@ def test_source_uri_named_uri(package: Package): URL = "https://test.xyz" with db.begin(): - pkgsrc = db.create(PackageSource, Source=f"{FILE}::{URL}", - Package=package, SourceArch="x86_64") + pkgsrc = db.create( + PackageSource, Source=f"{FILE}::{URL}", Package=package, SourceArch="x86_64" + ) file, uri = util.source_uri(pkgsrc) assert (file, uri) == (FILE, URL) @@ -122,7 +124,8 @@ def test_source_uri_unnamed_uri(package: Package): URL = "https://test.xyz" with db.begin(): - pkgsrc = db.create(PackageSource, Source=f"{URL}", - Package=package, SourceArch="x86_64") + pkgsrc = db.create( + PackageSource, Source=f"{URL}", Package=package, SourceArch="x86_64" + ) file, uri = util.source_uri(pkgsrc) assert (file, uri) == (URL, URL) diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index 52241b9e..bfdb0c37 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -1,10 +1,8 @@ import re - from http import HTTPStatus from unittest import mock import pytest - from fastapi.testclient import TestClient from sqlalchemy import and_ @@ -33,30 +31,24 @@ def package_endpoint(package: Package) -> str: def create_package(pkgname: str, maintainer: User) -> Package: - pkgbase = db.create(PackageBase, - Name=pkgname, - Maintainer=maintainer) + pkgbase = db.create(PackageBase, Name=pkgname, Maintainer=maintainer) return db.create(Package, Name=pkgbase.Name, PackageBase=pkgbase) -def create_package_dep(package: Package, depname: str, - dep_type_name: str = "depends") -> PackageDependency: - dep_type = db.query(DependencyType, - DependencyType.Name == dep_type_name).first() - return db.create(PackageDependency, - DependencyType=dep_type, - Package=package, - DepName=depname) +def create_package_dep( + package: Package, depname: str, dep_type_name: str = "depends" +) -> PackageDependency: + dep_type = db.query(DependencyType, DependencyType.Name == dep_type_name).first() + return db.create( + PackageDependency, DependencyType=dep_type, Package=package, DepName=depname + ) -def create_package_rel(package: Package, - relname: str) -> PackageRelation: - rel_type = db.query(RelationType, - RelationType.ID == PROVIDES_ID).first() - return db.create(PackageRelation, - RelationType=rel_type, - Package=package, - RelName=relname) +def create_package_rel(package: Package, relname: str) -> PackageRelation: + rel_type = db.query(RelationType, RelationType.ID == PROVIDES_ID).first() + return db.create( + PackageRelation, RelationType=rel_type, Package=package, RelName=relname + ) @pytest.fixture(autouse=True) @@ -66,76 +58,88 @@ def setup(db_test): @pytest.fixture def client() -> TestClient: - """ Yield a FastAPI TestClient. """ + """Yield a FastAPI TestClient.""" yield TestClient(app=asgi.app) def create_user(username: str) -> User: with db.begin(): - user = db.create(User, Username=username, - Email=f"{username}@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username=username, + Email=f"{username}@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) return user @pytest.fixture def user() -> User: - """ Yield a user. """ + """Yield a user.""" user = create_user("test") yield user @pytest.fixture def maintainer() -> User: - """ Yield a specific User used to maintain packages. """ + """Yield a specific User used to maintain packages.""" account_type = db.query(AccountType, AccountType.ID == USER_ID).first() with db.begin(): - maintainer = db.create(User, Username="test_maintainer", - Email="test_maintainer@example.org", - Passwd="testPassword", - AccountType=account_type) + maintainer = db.create( + User, + Username="test_maintainer", + Email="test_maintainer@example.org", + Passwd="testPassword", + AccountType=account_type, + ) yield maintainer @pytest.fixture def comaintainer() -> User: - """ Yield a specific User used to maintain packages. """ + """Yield a specific User used to maintain packages.""" account_type = db.query(AccountType, AccountType.ID == USER_ID).first() with db.begin(): - comaintainer = db.create(User, Username="test_comaintainer", - Email="test_comaintainer@example.org", - Passwd="testPassword", - AccountType=account_type) + comaintainer = db.create( + User, + Username="test_comaintainer", + Email="test_comaintainer@example.org", + Passwd="testPassword", + AccountType=account_type, + ) yield comaintainer @pytest.fixture def tu_user(): - tu_type = db.query(AccountType, - AccountType.AccountType == "Trusted User").first() + tu_type = db.query(AccountType, AccountType.AccountType == "Trusted User").first() with db.begin(): - tu_user = db.create(User, Username="test_tu", - Email="test_tu@example.org", - RealName="Test TU", Passwd="testPassword", - AccountType=tu_type) + tu_user = db.create( + User, + Username="test_tu", + Email="test_tu@example.org", + RealName="Test TU", + Passwd="testPassword", + AccountType=tu_type, + ) yield tu_user @pytest.fixture def package(maintainer: User) -> Package: - """ Yield a Package created by user. """ + """Yield a Package created by user.""" now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, - Name="test-package", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) - package = db.create(Package, - PackageBase=pkgbase, - Name=pkgbase.Name) + pkgbase = db.create( + PackageBase, + Name="test-package", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) + package = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name) yield package @@ -146,29 +150,34 @@ def pkgbase(package: Package) -> PackageBase: @pytest.fixture def target(maintainer: User) -> PackageBase: - """ Merge target. """ + """Merge target.""" now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, Name="target-package", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Name="target-package", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name) yield pkgbase @pytest.fixture def pkgreq(user: User, pkgbase: PackageBase) -> PackageRequest: - """ Yield a PackageRequest related to `pkgbase`. """ + """Yield a PackageRequest related to `pkgbase`.""" with db.begin(): - pkgreq = db.create(PackageRequest, - ReqTypeID=DELETION_ID, - User=user, - PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - Comments=f"Deletion request for {pkgbase.Name}", - ClosureComment=str()) + pkgreq = db.create( + PackageRequest, + ReqTypeID=DELETION_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=f"Deletion request for {pkgbase.Name}", + ClosureComment=str(), + ) yield pkgreq @@ -177,31 +186,33 @@ def comment(user: User, package: Package) -> PackageComment: pkgbase = package.PackageBase now = time.utcnow() with db.begin(): - comment = db.create(PackageComment, - User=user, - PackageBase=pkgbase, - Comments="Test comment.", - RenderedComment=str(), - CommentTS=now) + comment = db.create( + PackageComment, + User=user, + PackageBase=pkgbase, + Comments="Test comment.", + RenderedComment=str(), + CommentTS=now, + ) yield comment @pytest.fixture def packages(maintainer: User) -> list[Package]: - """ Yield 55 packages named pkg_0 .. pkg_54. """ + """Yield 55 packages named pkg_0 .. pkg_54.""" packages_ = [] now = time.utcnow() with db.begin(): for i in range(55): - pkgbase = db.create(PackageBase, - Name=f"pkg_{i}", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) - package = db.create(Package, - PackageBase=pkgbase, - Name=f"pkg_{i}") + pkgbase = db.create( + PackageBase, + Name=f"pkg_{i}", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) + package = db.create(Package, PackageBase=pkgbase, Name=f"pkg_{i}") packages_.append(package) yield packages_ @@ -210,18 +221,18 @@ def packages(maintainer: User) -> list[Package]: @pytest.fixture def requests(user: User, packages: list[Package]) -> list[PackageRequest]: pkgreqs = [] - deletion_type = db.query(RequestType).filter( - RequestType.ID == DELETION_ID - ).first() + deletion_type = db.query(RequestType).filter(RequestType.ID == DELETION_ID).first() with db.begin(): for i in range(55): - pkgreq = db.create(PackageRequest, - RequestType=deletion_type, - User=user, - PackageBase=packages[i].PackageBase, - PackageBaseName=packages[i].Name, - Comments=f"Deletion request for pkg_{i}", - ClosureComment=str()) + pkgreq = db.create( + PackageRequest, + RequestType=deletion_type, + User=user, + PackageBase=packages[i].PackageBase, + PackageBaseName=packages[i].Name, + Comments=f"Deletion request for pkg_{i}", + ClosureComment=str(), + ) pkgreqs.append(pkgreq) yield pkgreqs @@ -234,21 +245,18 @@ def test_pkgbase_not_found(client: TestClient): def test_pkgbase_redirect(client: TestClient, package: Package): with client as request: - resp = request.get(f"/pkgbase/{package.Name}", - allow_redirects=False) + resp = request.get(f"/pkgbase/{package.Name}", allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/packages/{package.Name}" def test_pkgbase(client: TestClient, package: Package): with db.begin(): - second = db.create(Package, Name="second-pkg", - PackageBase=package.PackageBase) + second = db.create(Package, Name="second-pkg", PackageBase=package.PackageBase) expected = [package.Name, second.Name] with client as request: - resp = request.get(f"/pkgbase/{package.Name}", - allow_redirects=False) + resp = request.get(f"/pkgbase/{package.Name}", allow_redirects=False) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -264,8 +272,9 @@ def test_pkgbase(client: TestClient, package: Package): assert pkgs[i].text.strip() == name -def test_pkgbase_maintainer(client: TestClient, user: User, maintainer: User, - package: Package): +def test_pkgbase_maintainer( + client: TestClient, user: User, maintainer: User, package: Package +): """ Test that the Maintainer field is beind displayed correctly. @@ -273,9 +282,9 @@ def test_pkgbase_maintainer(client: TestClient, user: User, maintainer: User, the maintainer. """ with db.begin(): - db.create(PackageComaintainer, User=user, - PackageBase=package.PackageBase, - Priority=1) + db.create( + PackageComaintainer, User=user, PackageBase=package.PackageBase, Priority=1 + ) with client as request: resp = request.get(f"/pkgbase/{package.Name}") @@ -286,7 +295,7 @@ def test_pkgbase_maintainer(client: TestClient, user: User, maintainer: User, maint = root.xpath('//table[@id="pkginfo"]/tr[@class="pkgmaint"]/td')[0] maint, comaint = maint.text.strip().split() assert maint == maintainer.Username - assert comaint == f'({user.Username})' + assert comaint == f"({user.Username})" def test_pkgbase_voters(client: TestClient, tu_user: User, package: Package): @@ -309,8 +318,7 @@ def test_pkgbase_voters(client: TestClient, tu_user: User, package: Package): assert rows[0].text.strip() == tu_user.Username -def test_pkgbase_voters_unauthorized(client: TestClient, user: User, - package: Package): +def test_pkgbase_voters_unauthorized(client: TestClient, user: User, package: Package): pkgbase = package.PackageBase endpoint = f"/pkgbase/{pkgbase.Name}/voters" @@ -324,25 +332,30 @@ def test_pkgbase_voters_unauthorized(client: TestClient, user: User, assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" -def test_pkgbase_comment_not_found(client: TestClient, maintainer: User, - package: Package): +def test_pkgbase_comment_not_found( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} comment_id = 12345 # A non-existing comment. endpoint = f"/pkgbase/{package.PackageBase.Name}/comments/{comment_id}" with client as request: - resp = request.post(endpoint, data={ - "comment": "Failure" - }, cookies=cookies) + resp = request.post(endpoint, data={"comment": "Failure"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_comment_form_unauthorized(client: TestClient, user: User, - maintainer: User, package: Package): +def test_pkgbase_comment_form_unauthorized( + client: TestClient, user: User, maintainer: User, package: Package +): now = time.utcnow() with db.begin(): - comment = db.create(PackageComment, PackageBase=package.PackageBase, - User=maintainer, Comments="Test", - RenderedComment=str(), CommentTS=now) + comment = db.create( + PackageComment, + PackageBase=package.PackageBase, + User=maintainer, + Comments="Test", + RenderedComment=str(), + CommentTS=now, + ) cookies = {"AURSID": user.login(Request(), "testPassword")} pkgbasename = package.PackageBase.Name @@ -352,8 +365,9 @@ def test_pkgbase_comment_form_unauthorized(client: TestClient, user: User, assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) -def test_pkgbase_comment_form_not_found(client: TestClient, maintainer: User, - package: Package): +def test_pkgbase_comment_form_not_found( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} comment_id = 12345 # A non-existing comment. pkgbasename = package.PackageBase.Name @@ -363,8 +377,9 @@ def test_pkgbase_comment_form_not_found(client: TestClient, maintainer: User, assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_comments_missing_comment(client: TestClient, maintainer: User, - package: Package): +def test_pkgbase_comments_missing_comment( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/comments" with client as request: @@ -372,9 +387,10 @@ def test_pkgbase_comments_missing_comment(client: TestClient, maintainer: User, assert resp.status_code == int(HTTPStatus.BAD_REQUEST) -def test_pkgbase_comments(client: TestClient, maintainer: User, user: User, - package: Package): - """ This test includes tests against the following routes: +def test_pkgbase_comments( + client: TestClient, maintainer: User, user: User, package: Package +): + """This test includes tests against the following routes: - POST /pkgbase/{name}/comments - GET /pkgbase/{name} (to check comments) - Tested against a comment created with the POST route @@ -383,18 +399,17 @@ def test_pkgbase_comments(client: TestClient, maintainer: User, user: User, """ with db.begin(): user.CommentNotify = 1 - db.create(PackageNotification, - PackageBase=package.PackageBase, - User=user) + db.create(PackageNotification, PackageBase=package.PackageBase, User=user) cookies = {"AURSID": maintainer.login(Request(), "testPassword")} pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments" with client as request: - resp = request.post(endpoint, data={ - "comment": "Test comment.", - "enable_notifications": True - }, cookies=cookies) + resp = request.post( + endpoint, + data={"comment": "Test comment.", "enable_notifications": True}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # user should've gotten a CommentNotification email. @@ -438,10 +453,11 @@ def test_pkgbase_comments(client: TestClient, maintainer: User, user: User, comment_id = int(headers[0].attrib["id"].split("-")[-1]) endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}" with client as request: - resp = request.post(endpoint, data={ - "comment": "Edited comment.", - "enable_notifications": True - }, cookies=cookies) + resp = request.post( + endpoint, + data={"comment": "Edited comment.", "enable_notifications": True}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) with client as request: @@ -479,27 +495,33 @@ def test_pkgbase_comments(client: TestClient, maintainer: User, user: User, assert "form" in data -def test_pkgbase_comment_edit_unauthorized(client: TestClient, - user: User, - maintainer: User, - package: Package, - comment: PackageComment): +def test_pkgbase_comment_edit_unauthorized( + client: TestClient, + user: User, + maintainer: User, + package: Package, + comment: PackageComment, +): pkgbase = package.PackageBase cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: endp = f"/pkgbase/{pkgbase.Name}/comments/{comment.ID}" - response = request.post(endp, data={ - "comment": "abcd im trying to change this comment." - }, cookies=cookies) + response = request.post( + endp, + data={"comment": "abcd im trying to change this comment."}, + cookies=cookies, + ) assert response.status_code == HTTPStatus.UNAUTHORIZED -def test_pkgbase_comment_delete(client: TestClient, - maintainer: User, - user: User, - package: Package, - comment: PackageComment): +def test_pkgbase_comment_delete( + client: TestClient, + maintainer: User, + user: User, + package: Package, + comment: PackageComment, +): # Test the unauthorized case of comment deletion. cookies = {"AURSID": user.login(Request(), "testPassword")} pkgbasename = package.PackageBase.Name @@ -524,10 +546,9 @@ def test_pkgbase_comment_delete(client: TestClient, assert resp.status_code == int(HTTPStatus.SEE_OTHER) -def test_pkgbase_comment_delete_unauthorized(client: TestClient, - maintainer: User, - package: Package, - comment: PackageComment): +def test_pkgbase_comment_delete_unauthorized( + client: TestClient, maintainer: User, package: Package, comment: PackageComment +): # Test the unauthorized case of comment deletion. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} pkgbasename = package.PackageBase.Name @@ -537,9 +558,9 @@ def test_pkgbase_comment_delete_unauthorized(client: TestClient, assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) -def test_pkgbase_comment_delete_not_found(client: TestClient, - maintainer: User, - package: Package): +def test_pkgbase_comment_delete_not_found( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} comment_id = 12345 # Non-existing comment. pkgbasename = package.PackageBase.Name @@ -549,9 +570,9 @@ def test_pkgbase_comment_delete_not_found(client: TestClient, assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_comment_undelete_not_found(client: TestClient, - maintainer: User, - package: Package): +def test_pkgbase_comment_undelete_not_found( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} comment_id = 12345 # Non-existing comment. pkgbasename = package.PackageBase.Name @@ -561,13 +582,18 @@ def test_pkgbase_comment_undelete_not_found(client: TestClient, assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_comment_pin_as_co(client: TestClient, package: Package, - comment: PackageComment): +def test_pkgbase_comment_pin_as_co( + client: TestClient, package: Package, comment: PackageComment +): comaint = create_user("comaint1") with db.begin(): - db.create(PackageComaintainer, PackageBase=package.PackageBase, - User=comaint, Priority=1) + db.create( + PackageComaintainer, + PackageBase=package.PackageBase, + User=comaint, + Priority=1, + ) # Pin the comment. pkgbasename = package.PackageBase.Name @@ -590,10 +616,9 @@ def test_pkgbase_comment_pin_as_co(client: TestClient, package: Package, assert comment.PinnedTS == 0 -def test_pkgbase_comment_pin(client: TestClient, - maintainer: User, - package: Package, - comment: PackageComment): +def test_pkgbase_comment_pin( + client: TestClient, maintainer: User, package: Package, comment: PackageComment +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} comment_id = comment.ID pkgbasename = package.PackageBase.Name @@ -617,10 +642,9 @@ def test_pkgbase_comment_pin(client: TestClient, assert comment.PinnedTS == 0 -def test_pkgbase_comment_pin_unauthorized(client: TestClient, - user: User, - package: Package, - comment: PackageComment): +def test_pkgbase_comment_pin_unauthorized( + client: TestClient, user: User, package: Package, comment: PackageComment +): cookies = {"AURSID": user.login(Request(), "testPassword")} comment_id = comment.ID pkgbasename = package.PackageBase.Name @@ -630,10 +654,9 @@ def test_pkgbase_comment_pin_unauthorized(client: TestClient, assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) -def test_pkgbase_comment_unpin_unauthorized(client: TestClient, - user: User, - package: Package, - comment: PackageComment): +def test_pkgbase_comment_unpin_unauthorized( + client: TestClient, user: User, package: Package, comment: PackageComment +): cookies = {"AURSID": user.login(Request(), "testPassword")} comment_id = comment.ID pkgbasename = package.PackageBase.Name @@ -651,8 +674,7 @@ def test_pkgbase_comaintainers_not_found(client: TestClient, maintainer: User): assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_comaintainers_post_not_found(client: TestClient, - maintainer: User): +def test_pkgbase_comaintainers_post_not_found(client: TestClient, maintainer: User): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = "/pkgbase/fake/comaintainers" with client as request: @@ -660,8 +682,9 @@ def test_pkgbase_comaintainers_post_not_found(client: TestClient, assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_comaintainers_unauthorized(client: TestClient, user: User, - package: Package): +def test_pkgbase_comaintainers_unauthorized( + client: TestClient, user: User, package: Package +): pkgbase = package.PackageBase endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": user.login(Request(), "testPassword")} @@ -671,9 +694,9 @@ def test_pkgbase_comaintainers_unauthorized(client: TestClient, user: User, assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" -def test_pkgbase_comaintainers_post_unauthorized(client: TestClient, - user: User, - package: Package): +def test_pkgbase_comaintainers_post_unauthorized( + client: TestClient, user: User, package: Package +): pkgbase = package.PackageBase endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": user.login(Request(), "testPassword")} @@ -683,16 +706,16 @@ def test_pkgbase_comaintainers_post_unauthorized(client: TestClient, assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" -def test_pkgbase_comaintainers_post_invalid_user(client: TestClient, - maintainer: User, - package: Package): +def test_pkgbase_comaintainers_post_invalid_user( + client: TestClient, maintainer: User, package: Package +): pkgbase = package.PackageBase endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={ - "users": "\nfake\n" - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, data={"users": "\nfake\n"}, cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -700,8 +723,9 @@ def test_pkgbase_comaintainers_post_invalid_user(client: TestClient, assert error.text.strip() == "Invalid user name: fake" -def test_pkgbase_comaintainers(client: TestClient, user: User, - maintainer: User, package: Package): +def test_pkgbase_comaintainers( + client: TestClient, user: User, maintainer: User, package: Package +): pkgbase = package.PackageBase endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": maintainer.login(Request(), "testPassword")} @@ -709,17 +733,23 @@ def test_pkgbase_comaintainers(client: TestClient, user: User, # Start off by adding user as a comaintainer to package. # The maintainer username given should be ignored. with client as request: - resp = request.post(endpoint, data={ - "users": f"\n{user.Username}\n{maintainer.Username}\n" - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, + data={"users": f"\n{user.Username}\n{maintainer.Username}\n"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" # Do it again to exercise the last_priority bump path. with client as request: - resp = request.post(endpoint, data={ - "users": f"\n{user.Username}\n{maintainer.Username}\n" - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, + data={"users": f"\n{user.Username}\n{maintainer.Username}\n"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -736,9 +766,9 @@ def test_pkgbase_comaintainers(client: TestClient, user: User, # Finish off by removing all the comaintainers. with client as request: - resp = request.post(endpoint, data={ - "users": str() - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, data={"users": str()}, cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -774,15 +804,15 @@ def test_pkgbase_request(client: TestClient, user: User, package: Package): def test_pkgbase_request_post_not_found(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/pkgbase/fake/request", data={ - "type": "fake" - }, cookies=cookies) + resp = request.post( + "/pkgbase/fake/request", data={"type": "fake"}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.NOT_FOUND) -def test_pkgbase_request_post_invalid_type(client: TestClient, - user: User, - package: Package): +def test_pkgbase_request_post_invalid_type( + client: TestClient, user: User, package: Package +): endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: @@ -790,16 +820,20 @@ def test_pkgbase_request_post_invalid_type(client: TestClient, assert resp.status_code == int(HTTPStatus.BAD_REQUEST) -def test_pkgbase_request_post_no_comment_error(client: TestClient, - user: User, - package: Package): +def test_pkgbase_request_post_no_comment_error( + client: TestClient, user: User, package: Package +): endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={ - "type": "deletion", - "comments": "" # An empty comment field causes an error. - }, cookies=cookies) + resp = request.post( + endpoint, + data={ + "type": "deletion", + "comments": "", # An empty comment field causes an error. + }, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -808,17 +842,22 @@ def test_pkgbase_request_post_no_comment_error(client: TestClient, assert error.text.strip() == expected -def test_pkgbase_request_post_merge_not_found_error(client: TestClient, - user: User, - package: Package): +def test_pkgbase_request_post_merge_not_found_error( + client: TestClient, user: User, package: Package +): endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={ - "type": "merge", - "merge_into": "fake", # There is no PackageBase.Name "fake" - "comments": "We want to merge this." - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, + data={ + "type": "merge", + "merge_into": "fake", # There is no PackageBase.Name "fake" + "comments": "We want to merge this.", + }, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -827,17 +866,22 @@ def test_pkgbase_request_post_merge_not_found_error(client: TestClient, assert error.text.strip() == expected -def test_pkgbase_request_post_merge_no_merge_into_error(client: TestClient, - user: User, - package: Package): +def test_pkgbase_request_post_merge_no_merge_into_error( + client: TestClient, user: User, package: Package +): endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={ - "type": "merge", - "merge_into": "", # There is no PackageBase.Name "fake" - "comments": "We want to merge this." - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, + data={ + "type": "merge", + "merge_into": "", # There is no PackageBase.Name "fake" + "comments": "We want to merge this.", + }, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -846,16 +890,22 @@ def test_pkgbase_request_post_merge_no_merge_into_error(client: TestClient, assert error.text.strip() == expected -def test_pkgbase_request_post_merge_self_error(client: TestClient, user: User, - package: Package): +def test_pkgbase_request_post_merge_self_error( + client: TestClient, user: User, package: Package +): endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={ - "type": "merge", - "merge_into": package.PackageBase.Name, - "comments": "We want to merge this." - }, cookies=cookies, allow_redirects=False) + resp = request.post( + endpoint, + data={ + "type": "merge", + "merge_into": package.PackageBase.Name, + "comments": "We want to merge this.", + }, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -864,8 +914,9 @@ def test_pkgbase_request_post_merge_self_error(client: TestClient, user: User, assert error.text.strip() == expected -def test_pkgbase_flag(client: TestClient, user: User, maintainer: User, - package: Package): +def test_pkgbase_flag( + client: TestClient, user: User, maintainer: User, package: Package +): pkgbase = package.PackageBase # We shouldn't have flagged the package yet; assert so. @@ -882,8 +933,9 @@ def test_pkgbase_flag(client: TestClient, user: User, maintainer: User, # Now, let's check the /pkgbase/{name}/flag-comment route. flag_comment_endpoint = f"/pkgbase/{pkgbase.Name}/flag-comment" with client as request: - resp = request.get(flag_comment_endpoint, cookies=cookies, - allow_redirects=False) + resp = request.get( + flag_comment_endpoint, cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -894,9 +946,7 @@ def test_pkgbase_flag(client: TestClient, user: User, maintainer: User, # Flag it with a valid comment. with client as request: - resp = request.post(endpoint, data={ - "comments": "Test" - }, cookies=cookies) + resp = request.post(endpoint, data={"comments": "Test"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgbase.Flagger == user assert pkgbase.FlaggerComment == "Test" @@ -907,8 +957,9 @@ def test_pkgbase_flag(client: TestClient, user: User, maintainer: User, # Now, let's check the /pkgbase/{name}/flag-comment route. flag_comment_endpoint = f"/pkgbase/{pkgbase.Name}/flag-comment" with client as request: - resp = request.get(flag_comment_endpoint, cookies=cookies, - allow_redirects=False) + resp = request.get( + flag_comment_endpoint, cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.OK) # Now try to perform a get; we should be redirected because @@ -918,10 +969,13 @@ def test_pkgbase_flag(client: TestClient, user: User, maintainer: User, assert resp.status_code == int(HTTPStatus.SEE_OTHER) with db.begin(): - user2 = db.create(User, Username="test2", - Email="test2@example.org", - Passwd="testPassword", - AccountType=user.AccountType) + user2 = db.create( + User, + Username="test2", + Email="test2@example.org", + Passwd="testPassword", + AccountType=user.AccountType, + ) # Now, test that the 'user2' user can't unflag it, because they # didn't flag it to begin with. @@ -941,9 +995,9 @@ def test_pkgbase_flag(client: TestClient, user: User, maintainer: User, # Flag it again. with client as request: - resp = request.post(f"/pkgbase/{pkgbase.Name}/flag", data={ - "comments": "Test" - }, cookies=cookies) + resp = request.post( + f"/pkgbase/{pkgbase.Name}/flag", data={"comments": "Test"}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Now, unflag it for real. @@ -961,16 +1015,17 @@ def test_pkgbase_flag_vcs(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(f"/pkgbase/{package.PackageBase.Name}/flag", - cookies=cookies) + resp = request.get(f"/pkgbase/{package.PackageBase.Name}/flag", cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) - expected = ("This seems to be a VCS package. Please do " - "not flag it out-of-date if the package " - "version in the AUR does not match the most recent commit. " - "Flagging this package should only be done if the sources " - "moved or changes in the PKGBUILD are required because of " - "recent upstream changes.") + expected = ( + "This seems to be a VCS package. Please do " + "not flag it out-of-date if the package " + "version in the AUR does not match the most recent commit. " + "Flagging this package should only be done if the sources " + "moved or changes in the PKGBUILD are required because of " + "recent upstream changes." + ) assert expected in resp.text @@ -978,9 +1033,7 @@ def test_pkgbase_notify(client: TestClient, user: User, package: Package): pkgbase = package.PackageBase # We have no notif record yet; assert that. - notif = pkgbase.notifications.filter( - PackageNotification.UserID == user.ID - ).first() + notif = pkgbase.notifications.filter(PackageNotification.UserID == user.ID).first() assert notif is None # Enable notifications. @@ -990,9 +1043,7 @@ def test_pkgbase_notify(client: TestClient, user: User, package: Package): resp = request.post(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) - notif = pkgbase.notifications.filter( - PackageNotification.UserID == user.ID - ).first() + notif = pkgbase.notifications.filter(PackageNotification.UserID == user.ID).first() assert notif is not None # Disable notifications. @@ -1001,9 +1052,7 @@ def test_pkgbase_notify(client: TestClient, user: User, package: Package): resp = request.post(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) - notif = pkgbase.notifications.filter( - PackageNotification.UserID == user.ID - ).first() + notif = pkgbase.notifications.filter(PackageNotification.UserID == user.ID).first() assert notif is None @@ -1036,9 +1085,9 @@ def test_pkgbase_vote(client: TestClient, user: User, package: Package): assert pkgbase.NumVotes == 0 -def test_pkgbase_disown_as_sole_maintainer(client: TestClient, - maintainer: User, - package: Package): +def test_pkgbase_disown_as_sole_maintainer( + client: TestClient, maintainer: User, package: Package +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} pkgbase = package.PackageBase endpoint = f"/pkgbase/{pkgbase.Name}/disown" @@ -1049,26 +1098,23 @@ def test_pkgbase_disown_as_sole_maintainer(client: TestClient, assert resp.status_code == int(HTTPStatus.SEE_OTHER) -def test_pkgbase_disown_as_maint_with_comaint(client: TestClient, - user: User, - maintainer: User, - package: Package): - """ When disowning as a maintainer, the lowest priority comaintainer - is promoted to maintainer. """ +def test_pkgbase_disown_as_maint_with_comaint( + client: TestClient, user: User, maintainer: User, package: Package +): + """When disowning as a maintainer, the lowest priority comaintainer + is promoted to maintainer.""" pkgbase = package.PackageBase endp = f"/pkgbase/{pkgbase.Name}/disown" post_data = {"confirm": True} with db.begin(): - db.create(PackageComaintainer, - PackageBase=pkgbase, - User=user, - Priority=1) + db.create(PackageComaintainer, PackageBase=pkgbase, User=user, Priority=1) maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post(endp, data=post_data, cookies=maint_cookies, - allow_redirects=True) + resp = request.post( + endp, data=post_data, cookies=maint_cookies, allow_redirects=True + ) assert resp.status_code == int(HTTPStatus.OK) package = db.refresh(package) @@ -1078,8 +1124,13 @@ def test_pkgbase_disown_as_maint_with_comaint(client: TestClient, assert pkgbase.comaintainers.count() == 0 -def test_pkgbase_disown(client: TestClient, user: User, maintainer: User, - comaintainer: User, package: Package): +def test_pkgbase_disown( + client: TestClient, + user: User, + maintainer: User, + comaintainer: User, + package: Package, +): maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} comaint_cookies = {"AURSID": comaintainer.login(Request(), "testPassword")} user_cookies = {"AURSID": user.login(Request(), "testPassword")} @@ -1088,21 +1139,18 @@ def test_pkgbase_disown(client: TestClient, user: User, maintainer: User, endpoint = f"{pkgbase_endp}/disown" with db.begin(): - db.create(PackageComaintainer, - User=comaintainer, - PackageBase=pkgbase, - Priority=1) + db.create( + PackageComaintainer, User=comaintainer, PackageBase=pkgbase, Priority=1 + ) # GET as a normal user, which is rejected for lack of credentials. with client as request: - resp = request.get(endpoint, cookies=user_cookies, - allow_redirects=False) + resp = request.get(endpoint, cookies=user_cookies, allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # GET as a comaintainer. with client as request: - resp = request.get(endpoint, cookies=comaint_cookies, - allow_redirects=False) + resp = request.get(endpoint, cookies=comaint_cookies, allow_redirects=False) assert resp.status_code == int(HTTPStatus.OK) # Ensure that the comaintainer can see "Disown Package" link @@ -1146,8 +1194,9 @@ def test_pkgbase_disown(client: TestClient, user: User, maintainer: User, assert resp.status_code == int(HTTPStatus.SEE_OTHER) -def test_pkgbase_adopt(client: TestClient, user: User, tu_user: User, - maintainer: User, package: Package): +def test_pkgbase_adopt( + client: TestClient, user: User, tu_user: User, maintainer: User, package: Package +): # Unset the maintainer as if package is orphaned. with db.begin(): package.PackageBase.Maintainer = None @@ -1165,22 +1214,19 @@ def test_pkgbase_adopt(client: TestClient, user: User, tu_user: User, # Try to adopt it when it already has a maintainer; nothing changes. user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=user_cookies, - allow_redirects=False) + resp = request.post(endpoint, cookies=user_cookies, allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == maintainer # Steal the package as a TU. tu_cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=tu_cookies, - allow_redirects=False) + resp = request.post(endpoint, cookies=tu_cookies, allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == tu_user -def test_pkgbase_delete_unauthorized(client: TestClient, user: User, - package: Package): +def test_pkgbase_delete_unauthorized(client: TestClient, user: User, package: Package): pkgbase = package.PackageBase cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbase.Name}/delete" @@ -1219,9 +1265,7 @@ def test_pkgbase_delete(client: TestClient, tu_user: User, package: Package): assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Let's assert that the package base record got removed. - record = db.query(PackageBase).filter( - PackageBase.Name == pkgbase.Name - ).first() + record = db.query(PackageBase).filter(PackageBase.Name == pkgbase.Name).first() assert record is None # Two emails should've been sent out; an autogenerated @@ -1234,9 +1278,9 @@ def test_pkgbase_delete(client: TestClient, tu_user: User, package: Package): assert re.match(expr, subject) -def test_pkgbase_delete_with_request(client: TestClient, tu_user: User, - pkgbase: PackageBase, - pkgreq: PackageRequest): +def test_pkgbase_delete_with_request( + client: TestClient, tu_user: User, pkgbase: PackageBase, pkgreq: PackageRequest +): # TODO: Test that a previously existing request gets Accepted when # a TU deleted the package. @@ -1257,12 +1301,15 @@ def test_pkgbase_delete_with_request(client: TestClient, tu_user: User, assert re.match(expr, email.headers.get("Subject")) -def test_packages_post_unknown_action(client: TestClient, user: User, - package: Package): +def test_packages_post_unknown_action(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "unknown"}, - cookies=cookies, allow_redirects=False) + resp = request.post( + "/packages", + data={"action": "unknown"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1274,8 +1321,12 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "stub"}, - cookies=cookies, allow_redirects=False) + resp = request.post( + "/packages", + data={"action": "stub"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1291,8 +1342,12 @@ def test_packages_post(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "stub"}, - cookies=cookies, allow_redirects=False) + resp = request.post( + "/packages", + data={"action": "stub"}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.OK) errors = get_successes(resp.text) @@ -1300,8 +1355,7 @@ def test_packages_post(client: TestClient, user: User, package: Package): assert errors[0].text.strip() == expected -def test_pkgbase_merge_unauthorized(client: TestClient, user: User, - package: Package): +def test_pkgbase_merge_unauthorized(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: @@ -1318,8 +1372,9 @@ def test_pkgbase_merge(client: TestClient, tu_user: User, package: Package): assert not get_errors(resp.text) -def test_pkgbase_merge_post_unauthorized(client: TestClient, user: User, - package: Package): +def test_pkgbase_merge_post_unauthorized( + client: TestClient, user: User, package: Package +): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: @@ -1327,54 +1382,62 @@ def test_pkgbase_merge_post_unauthorized(client: TestClient, user: User, assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) -def test_pkgbase_merge_post_unconfirmed(client: TestClient, tu_user: User, - package: Package): +def test_pkgbase_merge_post_unconfirmed( + client: TestClient, tu_user: User, package: Package +): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: resp = request.post(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) - expected = ("The selected packages have not been deleted, " - "check the confirmation checkbox.") + expected = ( + "The selected packages have not been deleted, " + "check the confirmation checkbox." + ) assert errors[0].text.strip() == expected -def test_pkgbase_merge_post_invalid_into(client: TestClient, tu_user: User, - package: Package): +def test_pkgbase_merge_post_invalid_into( + client: TestClient, tu_user: User, package: Package +): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post(endpoint, data={ - "into": "not_real", - "confirm": True - }, cookies=cookies) + resp = request.post( + endpoint, data={"into": "not_real", "confirm": True}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "Cannot find package to merge votes and comments into." assert errors[0].text.strip() == expected -def test_pkgbase_merge_post_self_invalid(client: TestClient, tu_user: User, - package: Package): +def test_pkgbase_merge_post_self_invalid( + client: TestClient, tu_user: User, package: Package +): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post(endpoint, data={ - "into": package.PackageBase.Name, - "confirm": True - }, cookies=cookies) + resp = request.post( + endpoint, + data={"into": package.PackageBase.Name, "confirm": True}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "Cannot merge a package base with itself." assert errors[0].text.strip() == expected -def test_pkgbase_merge_post(client: TestClient, tu_user: User, - package: Package, - pkgbase: PackageBase, - target: PackageBase, - pkgreq: PackageRequest): +def test_pkgbase_merge_post( + client: TestClient, + tu_user: User, + package: Package, + pkgbase: PackageBase, + target: PackageBase, + pkgreq: PackageRequest, +): pkgname = package.Name pkgbasename = pkgbase.Name @@ -1401,9 +1464,9 @@ def test_pkgbase_merge_post(client: TestClient, tu_user: User, # Comment on the package. endpoint = f"/pkgbase/{package.PackageBase.Name}/comments" with client as request: - resp = request.post(endpoint, data={ - "comment": "Test comment." - }, cookies=cookies) + resp = request.post( + endpoint, data={"comment": "Test comment."}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Save these relationships for later comparison. @@ -1414,10 +1477,9 @@ def test_pkgbase_merge_post(client: TestClient, tu_user: User, # Merge the package into target. endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post(endpoint, data={ - "into": target.Name, - "confirm": True - }, cookies=cookies) + resp = request.post( + endpoint, data={"into": target.Name, "confirm": True}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) loc = resp.headers.get("location") assert loc == f"/pkgbase/{target.Name}" @@ -1442,11 +1504,17 @@ def test_pkgbase_merge_post(client: TestClient, tu_user: User, assert pkgreq.Closer is not None # A PackageRequest is always created when merging this way. - pkgreq = db.query(PackageRequest).filter( - and_(PackageRequest.ReqTypeID == MERGE_ID, - PackageRequest.PackageBaseName == pkgbasename, - PackageRequest.MergeBaseName == target.Name) - ).first() + pkgreq = ( + db.query(PackageRequest) + .filter( + and_( + PackageRequest.ReqTypeID == MERGE_ID, + PackageRequest.PackageBaseName == pkgbasename, + PackageRequest.MergeBaseName == target.Name, + ) + ) + .first() + ) assert pkgreq is not None @@ -1464,9 +1532,9 @@ def test_pkgbase_keywords(client: TestClient, user: User, package: Package): cookies = {"AURSID": maint.login(Request(), "testPassword")} post_endpoint = f"{endpoint}/keywords" with client as request: - resp = request.post(post_endpoint, data={ - "keywords": "abc test" - }, cookies=cookies) + resp = request.post( + post_endpoint, data={"keywords": "abc test"}, cookies=cookies + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) with client as request: @@ -1495,9 +1563,11 @@ def test_pkgbase_empty_keywords(client: TestClient, user: User, package: Package cookies = {"AURSID": maint.login(Request(), "testPassword")} post_endpoint = f"{endpoint}/keywords" with client as request: - resp = request.post(post_endpoint, data={ - "keywords": "abc test foo bar " - }, cookies=cookies) + resp = request.post( + post_endpoint, + data={"keywords": "abc test foo bar "}, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) with client as request: @@ -1514,8 +1584,9 @@ def test_pkgbase_empty_keywords(client: TestClient, user: User, package: Package def test_unauthorized_pkgbase_keywords(client: TestClient, package: Package): with db.begin(): - user = db.create(User, Username="random_user", Email="random_user", - Passwd="testPassword") + user = db.create( + User, Username="random_user", Email="random_user", Passwd="testPassword" + ) cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: @@ -1525,20 +1596,25 @@ def test_unauthorized_pkgbase_keywords(client: TestClient, package: Package): assert response.status_code == HTTPStatus.UNAUTHORIZED -def test_independent_user_unflag(client: TestClient, user: User, - package: Package): +def test_independent_user_unflag(client: TestClient, user: User, package: Package): with db.begin(): - flagger = db.create(User, Username="test_flagger", - Email="test_flagger@example.com", - Passwd="testPassword") + flagger = db.create( + User, + Username="test_flagger", + Email="test_flagger@example.com", + Passwd="testPassword", + ) pkgbase = package.PackageBase cookies = {"AURSID": flagger.login(Request(), "testPassword")} with client as request: endp = f"/pkgbase/{pkgbase.Name}/flag" - response = request.post(endp, data={ - "comments": "This thing needs a flag!" - }, cookies=cookies, allow_redirects=True) + response = request.post( + endp, + data={"comments": "This thing needs a flag!"}, + cookies=cookies, + allow_redirects=True, + ) assert response.status_code == HTTPStatus.OK # At this point, we've flagged it as `flagger`. diff --git a/test/test_pkgmaint.py b/test/test_pkgmaint.py index da758c22..a0fece78 100644 --- a/test/test_pkgmaint.py +++ b/test/test_pkgmaint.py @@ -14,8 +14,13 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @@ -26,11 +31,12 @@ def packages(user: User) -> list[Package]: now = time.utcnow() with db.begin(): for i in range(5): - pkgbase = db.create(PackageBase, Name=f"pkg_{i}", - SubmittedTS=now, - ModifiedTS=now) - pkg = db.create(Package, PackageBase=pkgbase, - Name=f"pkg_{i}", Version=f"{i}.0") + pkgbase = db.create( + PackageBase, Name=f"pkg_{i}", SubmittedTS=now, ModifiedTS=now + ) + pkg = db.create( + Package, PackageBase=pkgbase, Name=f"pkg_{i}", Version=f"{i}.0" + ) output.append(pkg) yield output @@ -48,7 +54,7 @@ def test_pkgmaint(packages: list[Package]): # Modify the first package so it's out of date and gets deleted. with db.begin(): # Reduce SubmittedTS by a day + 10 seconds. - packages[0].PackageBase.SubmittedTS -= (86400 + 10) + packages[0].PackageBase.SubmittedTS -= 86400 + 10 # Run pkgmaint. pkgmaint.main() diff --git a/test/test_ratelimit.py b/test/test_ratelimit.py index 859adea9..20528847 100644 --- a/test/test_ratelimit.py +++ b/test/test_ratelimit.py @@ -1,7 +1,6 @@ from unittest import mock import pytest - from redis.client import Pipeline from aurweb import config, db, logging @@ -49,6 +48,7 @@ def mock_config_getboolean(return_value: int = 0): if section == "ratelimit" and key == "cache": return return_value return config_getboolean(section, key) + return fn @@ -60,17 +60,22 @@ def mock_config_get(return_value: str = "none"): if section == "options" and key == "cache": return return_value return config_get(section, key) + return fn @mock.patch("aurweb.config.getint", side_effect=mock_config_getint) @mock.patch("aurweb.config.getboolean", side_effect=mock_config_getboolean(1)) @mock.patch("aurweb.config.get", side_effect=mock_config_get("none")) -def test_ratelimit_redis(get: mock.MagicMock, getboolean: mock.MagicMock, - getint: mock.MagicMock, pipeline: Pipeline): - """ This test will only cover aurweb.ratelimit's Redis +def test_ratelimit_redis( + get: mock.MagicMock, + getboolean: mock.MagicMock, + getint: mock.MagicMock, + pipeline: Pipeline, +): + """This test will only cover aurweb.ratelimit's Redis path if a real Redis server is configured. Otherwise, - it'll use the database. """ + it'll use the database.""" # We'll need a Request for everything here. request = Request() @@ -96,8 +101,12 @@ def test_ratelimit_redis(get: mock.MagicMock, getboolean: mock.MagicMock, @mock.patch("aurweb.config.getint", side_effect=mock_config_getint) @mock.patch("aurweb.config.getboolean", side_effect=mock_config_getboolean(0)) @mock.patch("aurweb.config.get", side_effect=mock_config_get("none")) -def test_ratelimit_db(get: mock.MagicMock, getboolean: mock.MagicMock, - getint: mock.MagicMock, pipeline: Pipeline): +def test_ratelimit_db( + get: mock.MagicMock, + getboolean: mock.MagicMock, + getint: mock.MagicMock, + pipeline: Pipeline, +): # We'll need a Request for everything here. request = Request() diff --git a/test/test_redis.py b/test/test_redis.py index 82aebb57..a66cd204 100644 --- a/test/test_redis.py +++ b/test/test_redis.py @@ -3,13 +3,13 @@ from unittest import mock import pytest import aurweb.config - from aurweb.redis import redis_connection @pytest.fixture def rediss(): - """ Create a RedisStub. """ + """Create a RedisStub.""" + def mock_get(section, key): return "none" diff --git a/test/test_rendercomment.py b/test/test_rendercomment.py index bf4009fd..5b7ff5ac 100644 --- a/test/test_rendercomment.py +++ b/test/test_rendercomment.py @@ -31,8 +31,13 @@ def setup(db_test, git: GitRepository): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - Passwd=str(), AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @@ -40,24 +45,32 @@ def user() -> User: def pkgbase(user: User) -> PackageBase: now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, Packager=user, Name="pkgbase_0", - SubmittedTS=now, ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Packager=user, + Name="pkgbase_0", + SubmittedTS=now, + ModifiedTS=now, + ) yield pkgbase @pytest.fixture def package(pkgbase: PackageBase) -> Package: with db.begin(): - package = db.create(Package, PackageBase=pkgbase, - Name=pkgbase.Name, Version="1.0") + package = db.create( + Package, PackageBase=pkgbase, Name=pkgbase.Name, Version="1.0" + ) yield package -def create_comment(user: User, pkgbase: PackageBase, comments: str, - render: bool = True): +def create_comment( + user: User, pkgbase: PackageBase, comments: str, render: bool = True +): with db.begin(): - comment = db.create(PackageComment, User=user, - PackageBase=pkgbase, Comments=comments) + comment = db.create( + PackageComment, User=user, PackageBase=pkgbase, Comments=comments + ) if render: update_comment_render(comment) return comment @@ -86,8 +99,7 @@ def test_rendercomment_main(user: User, pkgbase: PackageBase): def test_markdown_conversion(user: User, pkgbase: PackageBase): text = "*Hello* [world](https://aur.archlinux.org)!" comment = create_comment(user, pkgbase, text) - expected = ('

    Hello ' - 'world!

    ') + expected = "

    Hello " 'world!

    ' assert comment.RenderedComment == expected @@ -109,7 +121,7 @@ Visit [Arch Linux][arch]. [arch]: https://www.archlinux.org/\ """ comment = create_comment(user, pkgbase, text) - expected = '''\ + expected = """\

    Visit \ https://www.archlinux.org/#_test_. Visit https://www.archlinux.org/. @@ -117,7 +129,7 @@ Visit https://www.archlinux.org/. Visit https://www.archlinux.org/. Visit Arch Linux. Visit Arch Linux.

    \ -''' +""" assert comment.RenderedComment == expected diff --git a/test/test_requests.py b/test/test_requests.py index b7ab3835..fd831674 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -1,10 +1,8 @@ import re - from http import HTTPStatus from logging import DEBUG import pytest - from fastapi import HTTPException from fastapi.testclient import TestClient @@ -24,13 +22,13 @@ from aurweb.testing.requests import Request @pytest.fixture(autouse=True) def setup(db_test) -> None: - """ Setup the database. """ + """Setup the database.""" return @pytest.fixture def client() -> TestClient: - """ Yield a TestClient. """ + """Yield a TestClient.""" yield TestClient(app=asgi.app) @@ -43,21 +41,26 @@ def create_user(username: str, email: str) -> User: :return: User instance """ with db.begin(): - user = db.create(User, Username=username, Email=email, - Passwd="testPassword", AccountTypeID=USER_ID) + user = db.create( + User, + Username=username, + Email=email, + Passwd="testPassword", + AccountTypeID=USER_ID, + ) return user @pytest.fixture def user() -> User: - """ Yield a User instance. """ + """Yield a User instance.""" user = create_user("test", "test@example.org") yield user @pytest.fixture def auser(user: User) -> User: - """ Yield an authenticated User instance. """ + """Yield an authenticated User instance.""" cookies = {"AURSID": user.login(Request(), "testPassword")} user.cookies = cookies yield user @@ -65,14 +68,14 @@ def auser(user: User) -> User: @pytest.fixture def user2() -> User: - """ Yield a secondary non-maintainer User instance. """ + """Yield a secondary non-maintainer User instance.""" user = create_user("test2", "test2@example.org") yield user @pytest.fixture def auser2(user2: User) -> User: - """ Yield an authenticated secondary non-maintainer User instance. """ + """Yield an authenticated secondary non-maintainer User instance.""" cookies = {"AURSID": user2.login(Request(), "testPassword")} user2.cookies = cookies yield user2 @@ -80,31 +83,34 @@ def auser2(user2: User) -> User: @pytest.fixture def maintainer() -> User: - """ Yield a specific User used to maintain packages. """ + """Yield a specific User used to maintain packages.""" with db.begin(): - maintainer = db.create(User, Username="test_maintainer", - Email="test_maintainer@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + maintainer = db.create( + User, + Username="test_maintainer", + Email="test_maintainer@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield maintainer @pytest.fixture def packages(maintainer: User) -> list[Package]: - """ Yield 55 packages named pkg_0 .. pkg_54. """ + """Yield 55 packages named pkg_0 .. pkg_54.""" packages_ = [] now = time.utcnow() with db.begin(): for i in range(55): - pkgbase = db.create(PackageBase, - Name=f"pkg_{i}", - Maintainer=maintainer, - Packager=maintainer, - Submitter=maintainer, - ModifiedTS=now) - package = db.create(Package, - PackageBase=pkgbase, - Name=f"pkg_{i}") + pkgbase = db.create( + PackageBase, + Name=f"pkg_{i}", + Maintainer=maintainer, + Packager=maintainer, + Submitter=maintainer, + ModifiedTS=now, + ) + package = db.create(Package, PackageBase=pkgbase, Name=f"pkg_{i}") packages_.append(package) yield packages_ @@ -115,20 +121,22 @@ def requests(user: User, packages: list[Package]) -> list[PackageRequest]: pkgreqs = [] with db.begin(): for i in range(55): - pkgreq = db.create(PackageRequest, - ReqTypeID=DELETION_ID, - User=user, - PackageBase=packages[i].PackageBase, - PackageBaseName=packages[i].Name, - Comments=f"Deletion request for pkg_{i}", - ClosureComment=str()) + pkgreq = db.create( + PackageRequest, + ReqTypeID=DELETION_ID, + User=user, + PackageBase=packages[i].PackageBase, + PackageBaseName=packages[i].Name, + Comments=f"Deletion request for pkg_{i}", + ClosureComment=str(), + ) pkgreqs.append(pkgreq) yield pkgreqs @pytest.fixture def tu_user() -> User: - """ Yield an authenticated Trusted User instance. """ + """Yield an authenticated Trusted User instance.""" user = create_user("test_tu", "test_tu@example.org") with db.begin(): user.AccountTypeID = TRUSTED_USER_ID @@ -149,31 +157,38 @@ def create_pkgbase(user: User, name: str) -> PackageBase: """ now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, Name=name, - Maintainer=user, Packager=user, - SubmittedTS=now, ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Name=name, + Maintainer=user, + Packager=user, + SubmittedTS=now, + ModifiedTS=now, + ) db.create(Package, Name=pkgbase.Name, PackageBase=pkgbase) return pkgbase @pytest.fixture def pkgbase(user: User) -> PackageBase: - """ Yield a package base. """ + """Yield a package base.""" pkgbase = create_pkgbase(user, "test-package") yield pkgbase @pytest.fixture def target(user: User) -> PackageBase: - """ Yield a merge target (package base). """ + """Yield a merge target (package base).""" with db.begin(): - target = db.create(PackageBase, Name="target-package", - Maintainer=user, Packager=user) + target = db.create( + PackageBase, Name="target-package", Maintainer=user, Packager=user + ) yield target -def create_request(reqtype_id: int, user: User, pkgbase: PackageBase, - comments: str) -> PackageRequest: +def create_request( + reqtype_id: int, user: User, pkgbase: PackageBase, comments: str +) -> PackageRequest: """ Create a package request based on `reqtype_id`, `user`, `pkgbase` and `comments`. @@ -186,40 +201,43 @@ def create_request(reqtype_id: int, user: User, pkgbase: PackageBase, """ now = time.utcnow() with db.begin(): - pkgreq = db.create(PackageRequest, ReqTypeID=reqtype_id, - User=user, PackageBase=pkgbase, - PackageBaseName=pkgbase.Name, - RequestTS=now, - Comments=comments, - ClosureComment=str()) + pkgreq = db.create( + PackageRequest, + ReqTypeID=reqtype_id, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + RequestTS=now, + Comments=comments, + ClosureComment=str(), + ) return pkgreq @pytest.fixture def pkgreq(user: User, pkgbase: PackageBase): - """ Yield a package request. """ + """Yield a package request.""" pkgreq = create_request(DELETION_ID, user, pkgbase, "Test request.") yield pkgreq def create_notification(user: User, pkgbase: PackageBase): - """ Create a notification for a `user` on `pkgbase`. """ + """Create a notification for a `user` on `pkgbase`.""" with db.begin(): notif = db.create(PackageNotification, User=user, PackageBase=pkgbase) return notif def test_request(client: TestClient, auser: User, pkgbase: PackageBase): - """ Test the standard pkgbase request route GET method. """ + """Test the standard pkgbase request route GET method.""" endpoint = f"/pkgbase/{pkgbase.Name}/request" with client as request: resp = request.get(endpoint, cookies=auser.cookies) assert resp.status_code == int(HTTPStatus.OK) -def test_request_post_deletion(client: TestClient, auser2: User, - pkgbase: PackageBase): - """ Test the POST route for creating a deletion request works. """ +def test_request_post_deletion(client: TestClient, auser2: User, pkgbase: PackageBase): + """Test the POST route for creating a deletion request works.""" endpoint = f"/pkgbase/{pkgbase.Name}/request" data = {"comments": "Test request.", "type": "deletion"} with client as request: @@ -238,9 +256,10 @@ def test_request_post_deletion(client: TestClient, auser2: User, assert re.match(expr, email.headers.get("Subject")) -def test_request_post_deletion_as_maintainer(client: TestClient, auser: User, - pkgbase: PackageBase): - """ Test the POST route for creating a deletion request as maint works. """ +def test_request_post_deletion_as_maintainer( + client: TestClient, auser: User, pkgbase: PackageBase +): + """Test the POST route for creating a deletion request as maint works.""" endpoint = f"/pkgbase/{pkgbase.Name}/request" data = {"comments": "Test request.", "type": "deletion"} with client as request: @@ -267,10 +286,13 @@ def test_request_post_deletion_as_maintainer(client: TestClient, auser: User, assert re.match(expr, email.headers.get("Subject")) -def test_request_post_deletion_autoaccept(client: TestClient, auser: User, - pkgbase: PackageBase, - caplog: pytest.LogCaptureFixture): - """ Test the request route for deletion as maintainer. """ +def test_request_post_deletion_autoaccept( + client: TestClient, + auser: User, + pkgbase: PackageBase, + caplog: pytest.LogCaptureFixture, +): + """Test the request route for deletion as maintainer.""" caplog.set_level(DEBUG) now = time.utcnow() @@ -284,9 +306,11 @@ def test_request_post_deletion_autoaccept(client: TestClient, auser: User, resp = request.post(endpoint, data=data, cookies=auser.cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) - pkgreq = db.query(PackageRequest).filter( - PackageRequest.PackageBaseName == pkgbase.Name - ).first() + pkgreq = ( + db.query(PackageRequest) + .filter(PackageRequest.PackageBaseName == pkgbase.Name) + .first() + ) assert pkgreq is not None assert pkgreq.ReqTypeID == DELETION_ID assert pkgreq.Status == ACCEPTED_ID @@ -310,9 +334,10 @@ def test_request_post_deletion_autoaccept(client: TestClient, auser: User, assert re.search(expr, caplog.text) -def test_request_post_merge(client: TestClient, auser: User, - pkgbase: PackageBase, target: PackageBase): - """ Test the request route for merge as maintainer. """ +def test_request_post_merge( + client: TestClient, auser: User, pkgbase: PackageBase, target: PackageBase +): + """Test the request route for merge as maintainer.""" endpoint = f"/pkgbase/{pkgbase.Name}/request" data = { "type": "merge", @@ -336,9 +361,8 @@ def test_request_post_merge(client: TestClient, auser: User, assert re.match(expr, email.headers.get("Subject")) -def test_request_post_orphan(client: TestClient, auser: User, - pkgbase: PackageBase): - """ Test the POST route for creating an orphan request works. """ +def test_request_post_orphan(client: TestClient, auser: User, pkgbase: PackageBase): + """Test the POST route for creating an orphan request works.""" endpoint = f"/pkgbase/{pkgbase.Name}/request" data = { "type": "orphan", @@ -361,9 +385,14 @@ def test_request_post_orphan(client: TestClient, auser: User, assert re.match(expr, email.headers.get("Subject")) -def test_deletion_request(client: TestClient, user: User, tu_user: User, - pkgbase: PackageBase, pkgreq: PackageRequest): - """ Test deleting a package with a preexisting request. """ +def test_deletion_request( + client: TestClient, + user: User, + tu_user: User, + pkgbase: PackageBase, + pkgreq: PackageRequest, +): + """Test deleting a package with a preexisting request.""" # `pkgreq`.ReqTypeID is already DELETION_ID. create_request(DELETION_ID, user, pkgbase, "Other request.") @@ -402,9 +431,8 @@ def test_deletion_request(client: TestClient, user: User, tu_user: User, assert body in email.body -def test_deletion_autorequest(client: TestClient, tu_user: User, - pkgbase: PackageBase): - """ Test deleting a package without a request. """ +def test_deletion_autorequest(client: TestClient, tu_user: User, pkgbase: PackageBase): + """Test deleting a package without a request.""" # `pkgreq`.ReqTypeID is already DELETION_ID. endpoint = f"/pkgbase/{pkgbase.Name}/delete" data = {"confirm": True} @@ -421,10 +449,15 @@ def test_deletion_autorequest(client: TestClient, tu_user: User, assert "[Autogenerated]" in email.body -def test_merge_request(client: TestClient, user: User, tu_user: User, - pkgbase: PackageBase, target: PackageBase, - pkgreq: PackageRequest): - """ Test merging a package with a pre - existing request. """ +def test_merge_request( + client: TestClient, + user: User, + tu_user: User, + pkgbase: PackageBase, + target: PackageBase, + pkgreq: PackageRequest, +): + """Test merging a package with a pre - existing request.""" with db.begin(): pkgreq.ReqTypeID = MERGE_ID pkgreq.MergeBaseName = target.Name @@ -473,9 +506,14 @@ def test_merge_request(client: TestClient, user: User, tu_user: User, assert "[Autogenerated]" in rejected.body -def test_merge_autorequest(client: TestClient, user: User, tu_user: User, - pkgbase: PackageBase, target: PackageBase): - """ Test merging a package without a request. """ +def test_merge_autorequest( + client: TestClient, + user: User, + tu_user: User, + pkgbase: PackageBase, + target: PackageBase, +): + """Test merging a package without a request.""" with db.begin(): pkgreq.ReqTypeID = MERGE_ID pkgreq.MergeBaseName = target.Name @@ -498,13 +536,17 @@ def test_merge_autorequest(client: TestClient, user: User, tu_user: User, assert "[Autogenerated]" in email.body -def test_orphan_request(client: TestClient, user: User, tu_user: User, - pkgbase: PackageBase, pkgreq: PackageRequest): - """ Test the standard orphan request route. """ +def test_orphan_request( + client: TestClient, + user: User, + tu_user: User, + pkgbase: PackageBase, + pkgreq: PackageRequest, +): + """Test the standard orphan request route.""" user2 = create_user("user2", "user2@example.org") with db.begin(): - db.create(PackageComaintainer, User=user2, - PackageBase=pkgbase, Priority=1) + db.create(PackageComaintainer, User=user2, PackageBase=pkgbase, Priority=1) idle_time = config.getint("options", "request_idle_time") now = time.utcnow() @@ -537,10 +579,9 @@ def test_orphan_request(client: TestClient, user: User, tu_user: User, assert re.match(subj, email.headers.get("Subject")) -def test_request_post_orphan_autogenerated_closure(client: TestClient, - tu_user: User, - pkgbase: PackageBase, - pkgreq: PackageRequest): +def test_request_post_orphan_autogenerated_closure( + client: TestClient, tu_user: User, pkgbase: PackageBase, pkgreq: PackageRequest +): idle_time = config.getint("options", "request_idle_time") now = time.utcnow() with db.begin(): @@ -564,10 +605,13 @@ def test_request_post_orphan_autogenerated_closure(client: TestClient, assert re.search(expr, email.body) -def test_request_post_orphan_autoaccept(client: TestClient, auser: User, - pkgbase: PackageBase, - caplog: pytest.LogCaptureFixture): - """ Test the standard pkgbase request route GET method. """ +def test_request_post_orphan_autoaccept( + client: TestClient, + auser: User, + pkgbase: PackageBase, + caplog: pytest.LogCaptureFixture, +): + """Test the standard pkgbase request route GET method.""" caplog.set_level(DEBUG) now = time.utcnow() auto_orphan_age = config.getint("options", "auto_orphan_age") @@ -605,8 +649,7 @@ def test_request_post_orphan_autoaccept(client: TestClient, auser: User, assert re.search(expr, caplog.text) -def test_orphan_as_maintainer(client: TestClient, auser: User, - pkgbase: PackageBase): +def test_orphan_as_maintainer(client: TestClient, auser: User, pkgbase: PackageBase): endpoint = f"/pkgbase/{pkgbase.Name}/disown" data = {"confirm": True} with client as request: @@ -620,9 +663,10 @@ def test_orphan_as_maintainer(client: TestClient, auser: User, assert pkgbase.Maintainer is None -def test_orphan_without_requests(client: TestClient, tu_user: User, - pkgbase: PackageBase): - """ Test orphans are automatically accepted past a certain date. """ +def test_orphan_without_requests( + client: TestClient, tu_user: User, pkgbase: PackageBase +): + """Test orphans are automatically accepted past a certain date.""" endpoint = f"/pkgbase/{pkgbase.Name}/disown" data = {"confirm": True} with client as request: @@ -637,7 +681,7 @@ def test_orphan_without_requests(client: TestClient, tu_user: User, def test_closure_factory_invalid_reqtype_id(): - """ Test providing an invalid reqtype_id raises NotImplementedError. """ + """Test providing an invalid reqtype_id raises NotImplementedError.""" automated = ClosureFactory() match = r"^Unsupported '.+' value\.$" with pytest.raises(NotImplementedError, match=match): @@ -657,19 +701,25 @@ def test_requests_unauthorized(client: TestClient): assert resp.status_code == int(HTTPStatus.SEE_OTHER) -def test_requests(client: TestClient, - tu_user: User, - packages: list[Package], - requests: list[PackageRequest]): +def test_requests( + client: TestClient, + tu_user: User, + packages: list[Package], + requests: list[PackageRequest], +): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.get("/requests", params={ - # Pass in url query parameters O, SeB and SB to exercise - # their paths inside of the pager_nav used in this request. - "O": 0, # Page 1 - "SeB": "nd", - "SB": "n" - }, cookies=cookies) + resp = request.get( + "/requests", + params={ + # Pass in url query parameters O, SeB and SB to exercise + # their paths inside of the pager_nav used in this request. + "O": 0, # Page 1 + "SeB": "nd", + "SB": "n", + }, + cookies=cookies, + ) assert resp.status_code == int(HTTPStatus.OK) assert "Next ›" in resp.text @@ -682,9 +732,7 @@ def test_requests(client: TestClient, # Request page 2 of the requests page. with client as request: - resp = request.get("/requests", params={ - "O": 50 # Page 2 - }, cookies=cookies) + resp = request.get("/requests", params={"O": 50}, cookies=cookies) # Page 2 assert resp.status_code == int(HTTPStatus.OK) assert "‹ Previous" in resp.text @@ -695,8 +743,9 @@ def test_requests(client: TestClient, assert len(rows) == 5 # There are five records left on the second page. -def test_requests_selfmade(client: TestClient, user: User, - requests: list[PackageRequest]): +def test_requests_selfmade( + client: TestClient, user: User, requests: list[PackageRequest] +): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get("/requests", cookies=cookies) @@ -710,46 +759,52 @@ def test_requests_selfmade(client: TestClient, user: User, # Our first and only link in the last row should be "Close". for row in rows: - last_row = row.xpath('./td')[-1].xpath('./a')[0] + last_row = row.xpath("./td")[-1].xpath("./a")[0] assert last_row.text.strip() == "Close" -def test_requests_close(client: TestClient, user: User, - pkgreq: PackageRequest): +def test_requests_close(client: TestClient, user: User, pkgreq: PackageRequest): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(f"/requests/{pkgreq.ID}/close", cookies=cookies, - allow_redirects=False) + resp = request.get( + f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.OK) -def test_requests_close_unauthorized(client: TestClient, maintainer: User, - pkgreq: PackageRequest): +def test_requests_close_unauthorized( + client: TestClient, maintainer: User, pkgreq: PackageRequest +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.get(f"/requests/{pkgreq.ID}/close", cookies=cookies, - allow_redirects=False) + resp = request.get( + f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" -def test_requests_close_post_unauthorized(client: TestClient, maintainer: User, - pkgreq: PackageRequest): +def test_requests_close_post_unauthorized( + client: TestClient, maintainer: User, pkgreq: PackageRequest +): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post(f"/requests/{pkgreq.ID}/close", data={ - "reason": ACCEPTED_ID - }, cookies=cookies, allow_redirects=False) + resp = request.post( + f"/requests/{pkgreq.ID}/close", + data={"reason": ACCEPTED_ID}, + cookies=cookies, + allow_redirects=False, + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" -def test_requests_close_post(client: TestClient, user: User, - pkgreq: PackageRequest): +def test_requests_close_post(client: TestClient, user: User, pkgreq: PackageRequest): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(f"/requests/{pkgreq.ID}/close", - cookies=cookies, allow_redirects=False) + resp = request.post( + f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgreq.Status == REJECTED_ID @@ -757,12 +812,14 @@ def test_requests_close_post(client: TestClient, user: User, assert pkgreq.ClosureComment == str() -def test_requests_close_post_rejected(client: TestClient, user: User, - pkgreq: PackageRequest): +def test_requests_close_post_rejected( + client: TestClient, user: User, pkgreq: PackageRequest +): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(f"/requests/{pkgreq.ID}/close", - cookies=cookies, allow_redirects=False) + resp = request.post( + f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgreq.Status == REJECTED_ID diff --git a/test/test_routes.py b/test/test_routes.py index 85d30c02..78b0a65b 100644 --- a/test/test_routes.py +++ b/test/test_routes.py @@ -1,11 +1,9 @@ import re import urllib.parse - from http import HTTPStatus import lxml.etree import pytest - from fastapi.testclient import TestClient from aurweb import db @@ -28,21 +26,26 @@ def client() -> TestClient: @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user def test_index(client: TestClient): - """ Test the index route at '/'. """ + """Test the index route at '/'.""" with client as req: response = req.get("/") assert response.status_code == int(HTTPStatus.OK) def test_index_security_headers(client: TestClient): - """ Check for the existence of CSP, XCTO, XFO and RP security headers. + """Check for the existence of CSP, XCTO, XFO and RP security headers. CSP: Content-Security-Policy XCTO: X-Content-Type-Options @@ -60,7 +63,7 @@ def test_index_security_headers(client: TestClient): def test_favicon(client: TestClient): - """ Test the favicon route at '/favicon.ico'. """ + """Test the favicon route at '/favicon.ico'.""" with client as request: response1 = request.get("/static/images/favicon.ico") response2 = request.get("/favicon.ico") @@ -69,52 +72,38 @@ def test_favicon(client: TestClient): def test_language(client: TestClient): - """ Test the language post route as a guest user. """ - post_data = { - "set_lang": "de", - "next": "/" - } + """Test the language post route as a guest user.""" + post_data = {"set_lang": "de", "next": "/"} with client as req: response = req.post("/language", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) def test_language_invalid_next(client: TestClient): - """ Test an invalid next route at '/language'. """ - post_data = { - "set_lang": "de", - "next": "https://evil.net" - } + """Test an invalid next route at '/language'.""" + post_data = {"set_lang": "de", "next": "https://evil.net"} with client as req: response = req.post("/language", data=post_data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) def test_user_language(client: TestClient, user: User): - """ Test the language post route as an authenticated user. """ - post_data = { - "set_lang": "de", - "next": "/" - } + """Test the language post route as an authenticated user.""" + post_data = {"set_lang": "de", "next": "/"} sid = user.login(Request(), "testPassword") assert sid is not None with client as req: - response = req.post("/language", data=post_data, - cookies={"AURSID": sid}) + response = req.post("/language", data=post_data, cookies={"AURSID": sid}) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert user.LangPreference == "de" def test_language_query_params(client: TestClient): - """ Test the language post route with query params. """ + """Test the language post route with query params.""" next = urllib.parse.quote_plus("/") - post_data = { - "set_lang": "de", - "next": "/", - "q": f"next={next}" - } + post_data = {"set_lang": "de", "next": "/", "q": f"next={next}"} q = post_data.get("q") with client as req: response = req.post("/language", data=post_data) @@ -154,9 +143,13 @@ def test_nonce_csp(client: TestClient): def test_id_redirect(client: TestClient): with client as request: - response = request.get("/", params={ - "id": "test", # This param will be rewritten into Location. - "key": "value", # Test that this param persists. - "key2": "value2" # And this one. - }, allow_redirects=False) + response = request.get( + "/", + params={ + "id": "test", # This param will be rewritten into Location. + "key": "value", # Test that this param persists. + "key2": "value2", # And this one. + }, + allow_redirects=False, + ) assert response.headers.get("location") == "/test?key=value&key2=value2" diff --git a/test/test_rpc.py b/test/test_rpc.py index c0861d3d..ed7e8894 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -1,17 +1,14 @@ import re - from http import HTTPStatus from unittest import mock import orjson import pytest - from fastapi.testclient import TestClient from redis.client import Pipeline import aurweb.models.dependency_type as dt import aurweb.models.relation_type as rt - from aurweb import asgi, config, db, rpc, scripts, time from aurweb.models.account_type import USER_ID from aurweb.models.dependency_type import DEPENDS_ID @@ -36,27 +33,42 @@ def client() -> TestClient: @pytest.fixture def user(db_test) -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User 1", Passwd=str(), - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User 1", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def user2() -> User: with db.begin(): - user = db.create(User, Username="user2", Email="user2@example.org", - RealName="Test User 2", Passwd=str(), - AccountTypeID=USER_ID) + user = db.create( + User, + Username="user2", + Email="user2@example.org", + RealName="Test User 2", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def user3() -> User: with db.begin(): - user = db.create(User, Username="user3", Email="user3@example.org", - RealName="Test User 3", Passwd=str(), - AccountTypeID=USER_ID) + user = db.create( + User, + Username="user3", + Email="user3@example.org", + RealName="Test User 3", + Passwd=str(), + AccountTypeID=USER_ID, + ) yield user @@ -66,39 +78,64 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: # Create package records used in our tests. with db.begin(): - pkgbase = db.create(PackageBase, Name="big-chungus", - Maintainer=user, Packager=user) - pkg = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="Bunny bunny around bunny", - URL="https://example.com/") + pkgbase = db.create( + PackageBase, Name="big-chungus", Maintainer=user, Packager=user + ) + pkg = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="Bunny bunny around bunny", + URL="https://example.com/", + ) output.append(pkg) - pkgbase = db.create(PackageBase, Name="chungy-chungus", - Maintainer=user, Packager=user) - pkg = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="Wubby wubby on wobba wuubu", - URL="https://example.com/") + pkgbase = db.create( + PackageBase, Name="chungy-chungus", Maintainer=user, Packager=user + ) + pkg = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="Wubby wubby on wobba wuubu", + URL="https://example.com/", + ) output.append(pkg) - pkgbase = db.create(PackageBase, Name="gluggly-chungus", - Maintainer=user, Packager=user) - pkg = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="glurrba glurrba gur globba", - URL="https://example.com/") + pkgbase = db.create( + PackageBase, Name="gluggly-chungus", Maintainer=user, Packager=user + ) + pkg = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="glurrba glurrba gur globba", + URL="https://example.com/", + ) output.append(pkg) - pkgbase = db.create(PackageBase, Name="fugly-chungus", - Maintainer=user, Packager=user) + pkgbase = db.create( + PackageBase, Name="fugly-chungus", Maintainer=user, Packager=user + ) desc = "A Package belonging to a PackageBase with another name." - pkg = db.create(Package, PackageBase=pkgbase, Name="other-pkg", - Description=desc, URL="https://example.com") + pkg = db.create( + Package, + PackageBase=pkgbase, + Name="other-pkg", + Description=desc, + URL="https://example.com", + ) output.append(pkg) pkgbase = db.create(PackageBase, Name="woogly-chungus") - pkg = db.create(Package, PackageBase=pkgbase, Name=pkgbase.Name, - Description="wuggla woblabeloop shemashmoop", - URL="https://example.com/") + pkg = db.create( + Package, + PackageBase=pkgbase, + Name=pkgbase.Name, + Description="wuggla woblabeloop shemashmoop", + URL="https://example.com/", + ) output.append(pkg) # Setup a few more related records on the first package: @@ -108,14 +145,15 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: db.create(PackageLicense, Package=output[0], License=lic) for keyword in ["big-chungus", "smol-chungus", "sizeable-chungus"]: - db.create(PackageKeyword, - PackageBase=output[0].PackageBase, - Keyword=keyword) + db.create( + PackageKeyword, PackageBase=output[0].PackageBase, Keyword=keyword + ) now = time.utcnow() for user_ in [user, user2, user3]: - db.create(PackageVote, User=user_, - PackageBase=output[0].PackageBase, VoteTS=now) + db.create( + PackageVote, User=user_, PackageBase=output[0].PackageBase, VoteTS=now + ) scripts.popupdate.run_single(output[0].PackageBase) yield output @@ -126,35 +164,45 @@ def depends(packages: list[Package]) -> list[PackageDependency]: output = [] with db.begin(): - dep = db.create(PackageDependency, - Package=packages[0], - DepTypeID=dt.DEPENDS_ID, - DepName="chungus-depends") + dep = db.create( + PackageDependency, + Package=packages[0], + DepTypeID=dt.DEPENDS_ID, + DepName="chungus-depends", + ) output.append(dep) - dep = db.create(PackageDependency, - Package=packages[1], - DepTypeID=dt.DEPENDS_ID, - DepName="chungy-depends") + dep = db.create( + PackageDependency, + Package=packages[1], + DepTypeID=dt.DEPENDS_ID, + DepName="chungy-depends", + ) output.append(dep) - dep = db.create(PackageDependency, - Package=packages[0], - DepTypeID=dt.OPTDEPENDS_ID, - DepName="chungus-optdepends", - DepCondition="=50") + dep = db.create( + PackageDependency, + Package=packages[0], + DepTypeID=dt.OPTDEPENDS_ID, + DepName="chungus-optdepends", + DepCondition="=50", + ) output.append(dep) - dep = db.create(PackageDependency, - Package=packages[0], - DepTypeID=dt.MAKEDEPENDS_ID, - DepName="chungus-makedepends") + dep = db.create( + PackageDependency, + Package=packages[0], + DepTypeID=dt.MAKEDEPENDS_ID, + DepName="chungus-makedepends", + ) output.append(dep) - dep = db.create(PackageDependency, - Package=packages[0], - DepTypeID=dt.CHECKDEPENDS_ID, - DepName="chungus-checkdepends") + dep = db.create( + PackageDependency, + Package=packages[0], + DepTypeID=dt.CHECKDEPENDS_ID, + DepName="chungus-checkdepends", + ) output.append(dep) yield output @@ -165,30 +213,38 @@ def relations(user: User, packages: list[Package]) -> list[PackageRelation]: output = [] with db.begin(): - rel = db.create(PackageRelation, - Package=packages[0], - RelTypeID=rt.CONFLICTS_ID, - RelName="chungus-conflicts") + rel = db.create( + PackageRelation, + Package=packages[0], + RelTypeID=rt.CONFLICTS_ID, + RelName="chungus-conflicts", + ) output.append(rel) - rel = db.create(PackageRelation, - Package=packages[1], - RelTypeID=rt.CONFLICTS_ID, - RelName="chungy-conflicts") + rel = db.create( + PackageRelation, + Package=packages[1], + RelTypeID=rt.CONFLICTS_ID, + RelName="chungy-conflicts", + ) output.append(rel) - rel = db.create(PackageRelation, - Package=packages[0], - RelTypeID=rt.PROVIDES_ID, - RelName="chungus-provides", - RelCondition="<=200") + rel = db.create( + PackageRelation, + Package=packages[0], + RelTypeID=rt.PROVIDES_ID, + RelName="chungus-provides", + RelCondition="<=200", + ) output.append(rel) - rel = db.create(PackageRelation, - Package=packages[0], - RelTypeID=rt.REPLACES_ID, - RelName="chungus-replaces", - RelCondition="<=200") + rel = db.create( + PackageRelation, + Package=packages[0], + RelTypeID=rt.REPLACES_ID, + RelName="chungus-replaces", + RelCondition="<=200", + ) output.append(rel) # Finally, yield the packages. @@ -238,51 +294,54 @@ def test_rpc_documentation_missing(): config.rehash() -def test_rpc_singular_info(client: TestClient, - user: User, - packages: list[Package], - depends: list[PackageDependency], - relations: list[PackageRelation]): +def test_rpc_singular_info( + client: TestClient, + user: User, + packages: list[Package], + depends: list[PackageDependency], + relations: list[PackageRelation], +): # Define expected response. pkg = packages[0] expected_data = { "version": 5, - "results": [{ - "Name": pkg.Name, - "Version": pkg.Version, - "Description": pkg.Description, - "URL": pkg.URL, - "PackageBase": pkg.PackageBase.Name, - "NumVotes": pkg.PackageBase.NumVotes, - "Popularity": float(pkg.PackageBase.Popularity), - "OutOfDate": None, - "Maintainer": user.Username, - "URLPath": f"/cgit/aur.git/snapshot/{pkg.Name}.tar.gz", - "Depends": ["chungus-depends"], - "OptDepends": ["chungus-optdepends=50"], - "MakeDepends": ["chungus-makedepends"], - "CheckDepends": ["chungus-checkdepends"], - "Conflicts": ["chungus-conflicts"], - "Provides": ["chungus-provides<=200"], - "Replaces": ["chungus-replaces<=200"], - "License": [pkg.package_licenses.first().License.Name], - "Keywords": [ - "big-chungus", - "sizeable-chungus", - "smol-chungus" - ] - }], + "results": [ + { + "Name": pkg.Name, + "Version": pkg.Version, + "Description": pkg.Description, + "URL": pkg.URL, + "PackageBase": pkg.PackageBase.Name, + "NumVotes": pkg.PackageBase.NumVotes, + "Popularity": float(pkg.PackageBase.Popularity), + "OutOfDate": None, + "Maintainer": user.Username, + "URLPath": f"/cgit/aur.git/snapshot/{pkg.Name}.tar.gz", + "Depends": ["chungus-depends"], + "OptDepends": ["chungus-optdepends=50"], + "MakeDepends": ["chungus-makedepends"], + "CheckDepends": ["chungus-checkdepends"], + "Conflicts": ["chungus-conflicts"], + "Provides": ["chungus-provides<=200"], + "Replaces": ["chungus-replaces<=200"], + "License": [pkg.package_licenses.first().License.Name], + "Keywords": ["big-chungus", "sizeable-chungus", "smol-chungus"], + } + ], "resultcount": 1, - "type": "multiinfo" + "type": "multiinfo", } # Make dummy request. with client as request: - resp = request.get("/rpc", params={ - "v": 5, - "type": "info", - "arg": ["chungy-chungus", "big-chungus"], - }) + resp = request.get( + "/rpc", + params={ + "v": 5, + "type": "info", + "arg": ["chungy-chungus", "big-chungus"], + }, + ) # Load request response into Python dictionary. response_data = orjson.loads(resp.text) @@ -299,19 +358,21 @@ def test_rpc_singular_info(client: TestClient, def test_rpc_split_package_urlpath(client: TestClient, user: User): with db.begin(): - pkgbase = db.create(PackageBase, Name="pkg", - Maintainer=user, Packager=user) + pkgbase = db.create(PackageBase, Name="pkg", Maintainer=user, Packager=user) pkgs = [ db.create(Package, PackageBase=pkgbase, Name="pkg_1"), db.create(Package, PackageBase=pkgbase, Name="pkg_2"), ] with client as request: - response = request.get("/rpc", params={ - "v": 5, - "type": "info", - "arg": [pkgs[0].Name], - }) + response = request.get( + "/rpc", + params={ + "v": 5, + "type": "info", + "arg": [pkgs[0].Name], + }, + ) data = orjson.loads(response.text) snapshot_uri = config.get("options", "snapshot_uri") @@ -335,9 +396,9 @@ def test_rpc_multiinfo(client: TestClient, packages: list[Package]): # Make dummy request. request_packages = ["big-chungus", "chungy-chungus"] with client as request: - response = request.get("/rpc", params={ - "v": 5, "type": "info", "arg[]": request_packages - }) + response = request.get( + "/rpc", params={"v": 5, "type": "info", "arg[]": request_packages} + ) # Load request response into Python dictionary. response_data = orjson.loads(response.content.decode()) @@ -357,13 +418,15 @@ def test_rpc_mixedargs(client: TestClient, packages: list[Package]): with client as request: # Supply all of the args in the url to enforce ordering. response1 = request.get( - "/rpc?v=5&arg[]=big-chungus&arg=gluggly-chungus&type=info") + "/rpc?v=5&arg[]=big-chungus&arg=gluggly-chungus&type=info" + ) assert response1.status_code == int(HTTPStatus.OK) with client as request: response2 = request.get( "/rpc?v=5&arg=big-chungus&arg[]=gluggly-chungus" - "&type=info&arg[]=chungy-chungus") + "&type=info&arg[]=chungy-chungus" + ) assert response1.status_code == int(HTTPStatus.OK) # Load request response into Python dictionary. @@ -381,42 +444,47 @@ def test_rpc_mixedargs(client: TestClient, packages: list[Package]): assert i == [] -def test_rpc_no_dependencies_omits_key(client: TestClient, user: User, - packages: list[Package], - depends: list[PackageDependency], - relations: list[PackageRelation]): +def test_rpc_no_dependencies_omits_key( + client: TestClient, + user: User, + packages: list[Package], + depends: list[PackageDependency], + relations: list[PackageRelation], +): """ This makes sure things like 'MakeDepends' get removed from JSON strings when they don't have set values. """ pkg = packages[1] expected_response = { - 'version': 5, - 'results': [{ - 'Name': pkg.Name, - 'Version': pkg.Version, - 'Description': pkg.Description, - 'URL': pkg.URL, - 'PackageBase': pkg.PackageBase.Name, - 'NumVotes': pkg.PackageBase.NumVotes, - 'Popularity': int(pkg.PackageBase.Popularity), - 'OutOfDate': None, - 'Maintainer': user.Username, - 'URLPath': '/cgit/aur.git/snapshot/chungy-chungus.tar.gz', - 'Depends': ['chungy-depends'], - 'Conflicts': ['chungy-conflicts'], - 'License': [], - 'Keywords': [] - }], - 'resultcount': 1, - 'type': 'multiinfo' + "version": 5, + "results": [ + { + "Name": pkg.Name, + "Version": pkg.Version, + "Description": pkg.Description, + "URL": pkg.URL, + "PackageBase": pkg.PackageBase.Name, + "NumVotes": pkg.PackageBase.NumVotes, + "Popularity": int(pkg.PackageBase.Popularity), + "OutOfDate": None, + "Maintainer": user.Username, + "URLPath": "/cgit/aur.git/snapshot/chungy-chungus.tar.gz", + "Depends": ["chungy-depends"], + "Conflicts": ["chungy-conflicts"], + "License": [], + "Keywords": [], + } + ], + "resultcount": 1, + "type": "multiinfo", } # Make dummy request. with client as request: - response = request.get("/rpc", params={ - "v": 5, "type": "info", "arg": "chungy-chungus" - }) + response = request.get( + "/rpc", params={"v": 5, "type": "info", "arg": "chungy-chungus"} + ) response_data = orjson.loads(response.content.decode()) # Remove inconsistent keys. @@ -429,18 +497,18 @@ def test_rpc_no_dependencies_omits_key(client: TestClient, user: User, def test_rpc_bad_type(client: TestClient): # Define expected response. expected_data = { - 'version': 5, - 'results': [], - 'resultcount': 0, - 'type': 'error', - 'error': 'Incorrect request type specified.' + "version": 5, + "results": [], + "resultcount": 0, + "type": "error", + "error": "Incorrect request type specified.", } # Make dummy request. with client as request: - response = request.get("/rpc", params={ - "v": 5, "type": "invalid-type", "arg": "big-chungus" - }) + response = request.get( + "/rpc", params={"v": 5, "type": "invalid-type", "arg": "big-chungus"} + ) # Load request response into Python dictionary. response_data = orjson.loads(response.content.decode()) @@ -452,18 +520,18 @@ def test_rpc_bad_type(client: TestClient): def test_rpc_bad_version(client: TestClient): # Define expected response. expected_data = { - 'version': 0, - 'resultcount': 0, - 'results': [], - 'type': 'error', - 'error': 'Invalid version specified.' + "version": 0, + "resultcount": 0, + "results": [], + "type": "error", + "error": "Invalid version specified.", } # Make dummy request. with client as request: - response = request.get("/rpc", params={ - "v": 0, "type": "info", "arg": "big-chungus" - }) + response = request.get( + "/rpc", params={"v": 0, "type": "info", "arg": "big-chungus"} + ) # Load request response into Python dictionary. response_data = orjson.loads(response.content.decode()) @@ -475,19 +543,16 @@ def test_rpc_bad_version(client: TestClient): def test_rpc_no_version(client: TestClient): # Define expected response. expected_data = { - 'version': None, - 'resultcount': 0, - 'results': [], - 'type': 'error', - 'error': 'Please specify an API version.' + "version": None, + "resultcount": 0, + "results": [], + "type": "error", + "error": "Please specify an API version.", } # Make dummy request. with client as request: - response = request.get("/rpc", params={ - "type": "info", - "arg": "big-chungus" - }) + response = request.get("/rpc", params={"type": "info", "arg": "big-chungus"}) # Load request response into Python dictionary. response_data = orjson.loads(response.content.decode()) @@ -499,11 +564,11 @@ def test_rpc_no_version(client: TestClient): def test_rpc_no_type(client: TestClient): # Define expected response. expected_data = { - 'version': 5, - 'results': [], - 'resultcount': 0, - 'type': 'error', - 'error': 'No request type/data specified.' + "version": 5, + "results": [], + "resultcount": 0, + "type": "error", + "error": "No request type/data specified.", } # Make dummy request. @@ -520,11 +585,11 @@ def test_rpc_no_type(client: TestClient): def test_rpc_no_args(client: TestClient): # Define expected response. expected_data = { - 'version': 5, - 'results': [], - 'resultcount': 0, - 'type': 'error', - 'error': 'No request type/data specified.' + "version": 5, + "results": [], + "resultcount": 0, + "type": "error", + "error": "No request type/data specified.", } # Make dummy request. @@ -541,9 +606,9 @@ def test_rpc_no_args(client: TestClient): def test_rpc_no_maintainer(client: TestClient, packages: list[Package]): # Make dummy request. with client as request: - response = request.get("/rpc", params={ - "v": 5, "type": "info", "arg": "woogly-chungus" - }) + response = request.get( + "/rpc", params={"v": 5, "type": "info", "arg": "woogly-chungus"} + ) # Load request response into Python dictionary. response_data = orjson.loads(response.content.decode()) @@ -620,8 +685,12 @@ def mock_config_getint(section: str, key: str): @mock.patch("aurweb.config.getint", side_effect=mock_config_getint) -def test_rpc_ratelimit(getint: mock.MagicMock, client: TestClient, - pipeline: Pipeline, packages: list[Package]): +def test_rpc_ratelimit( + getint: mock.MagicMock, + client: TestClient, + pipeline: Pipeline, + packages: list[Package], +): params = {"v": 5, "type": "suggest-pkgbase", "arg": "big"} for i in range(4): @@ -685,7 +754,7 @@ def test_rpc_search(client: TestClient, packages: list[Package]): headers = {"If-None-Match": etag} response = request.get("/rpc", params=params, headers=headers) assert response.status_code == int(HTTPStatus.NOT_MODIFIED) - assert response.content == b'' + assert response.content == b"" # No args on non-m by types return an error. del params["arg"] @@ -703,12 +772,7 @@ def test_rpc_msearch(client: TestClient, user: User, packages: list[Package]): # user1 maintains 4 packages; assert that we got them all. assert data.get("resultcount") == 4 names = list(sorted(r.get("Name") for r in data.get("results"))) - expected_results = [ - "big-chungus", - "chungy-chungus", - "gluggly-chungus", - "other-pkg" - ] + expected_results = ["big-chungus", "chungy-chungus", "gluggly-chungus", "other-pkg"] assert names == expected_results # Search for a non-existent maintainer, giving us zero packages. @@ -730,11 +794,10 @@ def test_rpc_msearch(client: TestClient, user: User, packages: list[Package]): assert result.get("Name") == "big-chungus" -def test_rpc_search_depends(client: TestClient, packages: list[Package], - depends: list[PackageDependency]): - params = { - "v": 5, "type": "search", "by": "depends", "arg": "chungus-depends" - } +def test_rpc_search_depends( + client: TestClient, packages: list[Package], depends: list[PackageDependency] +): + params = {"v": 5, "type": "search", "by": "depends", "arg": "chungus-depends"} with client as request: response = request.get("/rpc", params=params) data = response.json() @@ -743,13 +806,14 @@ def test_rpc_search_depends(client: TestClient, packages: list[Package], assert result.get("Name") == packages[0].Name -def test_rpc_search_makedepends(client: TestClient, packages: list[Package], - depends: list[PackageDependency]): +def test_rpc_search_makedepends( + client: TestClient, packages: list[Package], depends: list[PackageDependency] +): params = { "v": 5, "type": "search", "by": "makedepends", - "arg": "chungus-makedepends" + "arg": "chungus-makedepends", } with client as request: response = request.get("/rpc", params=params) @@ -759,14 +823,10 @@ def test_rpc_search_makedepends(client: TestClient, packages: list[Package], assert result.get("Name") == packages[0].Name -def test_rpc_search_optdepends(client: TestClient, packages: list[Package], - depends: list[PackageDependency]): - params = { - "v": 5, - "type": "search", - "by": "optdepends", - "arg": "chungus-optdepends" - } +def test_rpc_search_optdepends( + client: TestClient, packages: list[Package], depends: list[PackageDependency] +): + params = {"v": 5, "type": "search", "by": "optdepends", "arg": "chungus-optdepends"} with client as request: response = request.get("/rpc", params=params) data = response.json() @@ -775,13 +835,14 @@ def test_rpc_search_optdepends(client: TestClient, packages: list[Package], assert result.get("Name") == packages[0].Name -def test_rpc_search_checkdepends(client: TestClient, packages: list[Package], - depends: list[PackageDependency]): +def test_rpc_search_checkdepends( + client: TestClient, packages: list[Package], depends: list[PackageDependency] +): params = { "v": 5, "type": "search", "by": "checkdepends", - "arg": "chungus-checkdepends" + "arg": "chungus-checkdepends", } with client as request: response = request.get("/rpc", params=params) @@ -799,21 +860,16 @@ def test_rpc_incorrect_by(client: TestClient): def test_rpc_jsonp_callback(client: TestClient): - """ Test the callback parameter. + """Test the callback parameter. For end-to-end verification, the `examples/jsonp.html` file can be used to submit jsonp callback requests to the RPC. """ - params = { - "v": 5, - "type": "search", - "arg": "big", - "callback": "jsonCallback" - } + params = {"v": 5, "type": "search", "arg": "big", "callback": "jsonCallback"} with client as request: response = request.get("/rpc", params=params) assert response.headers.get("content-type") == "text/javascript" - assert re.search(r'^/\*\*/jsonCallback\(.*\)$', response.text) is not None + assert re.search(r"^/\*\*/jsonCallback\(.*\)$", response.text) is not None # Test an invalid callback name; we get an application/json error. params["callback"] = "jsonCallback!" @@ -824,20 +880,14 @@ def test_rpc_jsonp_callback(client: TestClient): def test_rpc_post(client: TestClient, packages: list[Package]): - data = { - "v": 5, - "type": "info", - "arg": "big-chungus", - "arg[]": ["chungy-chungus"] - } + data = {"v": 5, "type": "info", "arg": "big-chungus", "arg[]": ["chungy-chungus"]} with client as request: resp = request.post("/rpc", data=data) assert resp.status_code == int(HTTPStatus.OK) assert resp.json().get("resultcount") == 2 -def test_rpc_too_many_search_results(client: TestClient, - packages: list[Package]): +def test_rpc_too_many_search_results(client: TestClient, packages: list[Package]): config_getint = config.getint def mock_config(section: str, key: str): @@ -858,10 +908,18 @@ def test_rpc_too_many_info_results(client: TestClient, packages: list[Package]): # regardless of the number of related records. with db.begin(): for i in range(len(packages) - 1): - db.create(PackageDependency, DepTypeID=DEPENDS_ID, - Package=packages[i], DepName=packages[i + 1].Name) - db.create(PackageRelation, RelTypeID=PROVIDES_ID, - Package=packages[i], RelName=packages[i + 1].Name) + db.create( + PackageDependency, + DepTypeID=DEPENDS_ID, + Package=packages[i], + DepName=packages[i + 1].Name, + ) + db.create( + PackageRelation, + RelTypeID=PROVIDES_ID, + Package=packages[i], + RelName=packages[i + 1].Name, + ) config_getint = config.getint diff --git a/test/test_rss.py b/test/test_rss.py index cef6a46f..8526caa1 100644 --- a/test/test_rss.py +++ b/test/test_rss.py @@ -2,7 +2,6 @@ from http import HTTPStatus import lxml.etree import pytest - from fastapi.testclient import TestClient from aurweb import db, logging, time @@ -27,13 +26,15 @@ def client(): @pytest.fixture def user(): - account_type = db.query(AccountType, - AccountType.AccountType == "User").first() - yield db.create(User, Username="test", - Email="test@example.org", - RealName="Test User", - Passwd="testPassword", - AccountType=account_type) + account_type = db.query(AccountType, AccountType.AccountType == "User").first() + yield db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountType=account_type, + ) @pytest.fixture @@ -45,8 +46,12 @@ def packages(user): with db.begin(): for i in range(101): pkgbase = db.create( - PackageBase, Maintainer=user, Name=f"test-package-{i}", - SubmittedTS=(now + i), ModifiedTS=(now + i)) + PackageBase, + Maintainer=user, + Name=f"test-package-{i}", + SubmittedTS=(now + i), + ModifiedTS=(now + i), + ) pkg = db.create(Package, Name=pkgbase.Name, PackageBase=pkgbase) pkgs.append(pkg) yield pkgs @@ -64,6 +69,7 @@ def test_rss(client, user, packages): # Test that the RSS we got is sorted by descending SubmittedTS. def key_(pkg): return pkg.PackageBase.SubmittedTS + packages = list(reversed(sorted(packages, key=key_))) # Just take the first 100. @@ -74,7 +80,7 @@ def test_rss(client, user, packages): assert len(items) == 100 for i, item in enumerate(items): - title = next(iter(item.xpath('./title'))) + title = next(iter(item.xpath("./title"))) logger.debug(f"title: '{title.text}' vs name: '{packages[i].Name}'") assert title.text == packages[i].Name @@ -87,6 +93,7 @@ def test_rss_modified(client, user, packages): # Test that the RSS we got is sorted by descending SubmittedTS. def key_(pkg): return pkg.PackageBase.ModifiedTS + packages = list(reversed(sorted(packages, key=key_))) # Just take the first 100. @@ -97,6 +104,6 @@ def test_rss_modified(client, user, packages): assert len(items) == 100 for i, item in enumerate(items): - title = next(iter(item.xpath('./title'))) + title = next(iter(item.xpath("./title"))) logger.debug(f"title: '{title.text}' vs name: '{packages[i].Name}'") assert title.text == packages[i].Name diff --git a/test/test_session.py b/test/test_session.py index edae57f9..db792b33 100644 --- a/test/test_session.py +++ b/test/test_session.py @@ -2,7 +2,6 @@ from unittest import mock import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db, time @@ -19,17 +18,23 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - ResetKey="testReset", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + ResetKey="testReset", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def session(user: User) -> Session: with db.begin(): - session = db.create(Session, User=user, SessionID="testSession", - LastUpdateTS=time.utcnow()) + session = db.create( + Session, User=user, SessionID="testSession", LastUpdateTS=time.utcnow() + ) yield session @@ -39,15 +44,21 @@ def test_session(user: User, session: Session): def test_session_cs(): - """ Test case sensitivity of the database table. """ + """Test case sensitivity of the database table.""" with db.begin(): - user2 = db.create(User, Username="test2", Email="test2@example.org", - ResetKey="testReset2", Passwd="testPassword", - AccountTypeID=USER_ID) + user2 = db.create( + User, + Username="test2", + Email="test2@example.org", + ResetKey="testReset2", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) with db.begin(): - session_cs = db.create(Session, User=user2, SessionID="TESTSESSION", - LastUpdateTS=time.utcnow()) + session_cs = db.create( + Session, User=user2, SessionID="TESTSESSION", LastUpdateTS=time.utcnow() + ) assert session_cs.SessionID == "TESTSESSION" assert session_cs.SessionID != "testSession" diff --git a/test/test_spawn.py b/test/test_spawn.py index 195eb897..be1c5e7c 100644 --- a/test/test_spawn.py +++ b/test/test_spawn.py @@ -1,6 +1,5 @@ import os import tempfile - from typing import Tuple from unittest import mock @@ -8,26 +7,21 @@ import pytest import aurweb.config import aurweb.spawn - from aurweb.exceptions import AurwebException # Some os.environ overrides we use in this suite. -TEST_ENVIRONMENT = { - "PHP_NGINX_PORT": "8001", - "FASTAPI_NGINX_PORT": "8002" -} +TEST_ENVIRONMENT = {"PHP_NGINX_PORT": "8001", "FASTAPI_NGINX_PORT": "8002"} class FakeProcess: - """ Fake a subprocess.Popen return object. """ + """Fake a subprocess.Popen return object.""" returncode = 0 - stdout = b'' - stderr = b'' + stdout = b"" + stderr = b"" def __init__(self, *args, **kwargs): - """ We need this constructor to remain compatible with Popen. """ - pass + """We need this constructor to remain compatible with Popen.""" def communicate(self) -> Tuple[bytes, bytes]: return (self.stdout, self.stderr) @@ -40,10 +34,9 @@ class FakeProcess: class MockFakeProcess: - """ FakeProcess construction helper to be used in mocks. """ + """FakeProcess construction helper to be used in mocks.""" - def __init__(self, return_code: int = 0, stdout: bytes = b'', - stderr: bytes = b''): + def __init__(self, return_code: int = 0, stdout: bytes = b"", stderr: bytes = b""): self.returncode = return_code self.stdout = stdout self.stderr = stderr @@ -101,7 +94,7 @@ def test_spawn_generate_nginx_config(): f'listen {php_host}:{TEST_ENVIRONMENT.get("PHP_NGINX_PORT")}', f"proxy_pass http://{php_address}", f'listen {fastapi_host}:{TEST_ENVIRONMENT.get("FASTAPI_NGINX_PORT")}', - f"proxy_pass http://{fastapi_address}" + f"proxy_pass http://{fastapi_address}", ] for expected in expected_content: assert expected in nginx_config diff --git a/test/test_ssh_pub_key.py b/test/test_ssh_pub_key.py index 93298a11..1a586800 100644 --- a/test/test_ssh_pub_key.py +++ b/test/test_ssh_pub_key.py @@ -27,18 +27,23 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user @pytest.fixture def pubkey(user: User) -> SSHPubKey: with db.begin(): - pubkey = db.create(SSHPubKey, User=user, - Fingerprint="testFingerprint", - PubKey="testPubKey") + pubkey = db.create( + SSHPubKey, User=user, Fingerprint="testFingerprint", PubKey="testPubKey" + ) yield pubkey @@ -50,11 +55,11 @@ def test_pubkey(user: User, pubkey: SSHPubKey): def test_pubkey_cs(user: User): - """ Test case sensitivity of the database table. """ + """Test case sensitivity of the database table.""" with db.begin(): - pubkey_cs = db.create(SSHPubKey, User=user, - Fingerprint="TESTFINGERPRINT", - PubKey="TESTPUBKEY") + pubkey_cs = db.create( + SSHPubKey, User=user, Fingerprint="TESTFINGERPRINT", PubKey="TESTPUBKEY" + ) assert pubkey_cs.Fingerprint == "TESTFINGERPRINT" assert pubkey_cs.Fingerprint != "testFingerprint" diff --git a/test/test_templates.py b/test/test_templates.py index 4b138567..383f45d1 100644 --- a/test/test_templates.py +++ b/test/test_templates.py @@ -1,21 +1,23 @@ import re - from typing import Any import pytest import aurweb.filters # noqa: F401 - from aurweb import config, db, templates, time -from aurweb.filters import as_timezone, number_format -from aurweb.filters import timestamp_to_datetime as to_dt +from aurweb.filters import as_timezone, number_format, timestamp_to_datetime as to_dt from aurweb.models import Package, PackageBase, User from aurweb.models.account_type import USER_ID from aurweb.models.license import License from aurweb.models.package_license import PackageLicense from aurweb.models.package_relation import PackageRelation from aurweb.models.relation_type import PROVIDES_ID, REPLACES_ID -from aurweb.templates import base_template, make_context, register_filter, register_function +from aurweb.templates import ( + base_template, + make_context, + register_filter, + register_function, +) from aurweb.testing.html import parse_root from aurweb.testing.requests import Request @@ -35,19 +37,20 @@ def function(): def create_user(username: str) -> User: with db.begin(): - user = db.create(User, Username=username, - Email=f"{username}@example.org", - Passwd="testPassword", - AccountTypeID=USER_ID) + user = db.create( + User, + Username=username, + Email=f"{username}@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) return user -def create_pkgrel(package: Package, reltype_id: int, relname: str) \ - -> PackageRelation: - return db.create(PackageRelation, - Package=package, - RelTypeID=reltype_id, - RelName=relname) +def create_pkgrel(package: Package, reltype_id: int, relname: str) -> PackageRelation: + return db.create( + PackageRelation, Package=package, RelTypeID=reltype_id, RelName=relname + ) @pytest.fixture @@ -60,8 +63,13 @@ def user(db_test) -> User: def pkgbase(user: User) -> PackageBase: now = time.utcnow() with db.begin(): - pkgbase = db.create(PackageBase, Name="test-pkg", Maintainer=user, - SubmittedTS=now, ModifiedTS=now) + pkgbase = db.create( + PackageBase, + Name="test-pkg", + Maintainer=user, + SubmittedTS=now, + ModifiedTS=now, + ) yield pkgbase @@ -79,9 +87,10 @@ def create_license(pkg: Package, license_name: str) -> PackageLicense: def test_register_function_exists_key_error(): - """ Most instances of register_filter are tested through module - imports or template renders, so we only test failures here. """ + """Most instances of register_filter are tested through module + imports or template renders, so we only test failures here.""" with pytest.raises(KeyError): + @register_function("function") def some_func(): pass @@ -93,8 +102,9 @@ def test_commit_hash(): commit_hash = "abcdefg" long_commit_hash = commit_hash + "1234567" - def config_get_with_fallback(section: str, option: str, - fallback: str = None) -> str: + def config_get_with_fallback( + section: str, option: str, fallback: str = None + ) -> str: if section == "devel" and option == "commit_hash": return long_commit_hash return config.original_get_with_fallback(section, option, fallback) @@ -134,12 +144,12 @@ def pager_context(num_packages: int) -> dict[str, Any]: "prefix": "/packages", "total": num_packages, "O": 0, - "PP": 50 + "PP": 50, } def test_pager_no_results(): - """ Test the pager partial with no results. """ + """Test the pager partial with no results.""" num_packages = 0 context = pager_context(num_packages) body = base_template("partials/pager.html").render(context) @@ -151,7 +161,7 @@ def test_pager_no_results(): def test_pager(): - """ Test the pager partial with two pages of results. """ + """Test the pager partial with two pages of results.""" num_packages = 100 context = pager_context(num_packages) body = base_template("partials/pager.html").render(context) @@ -274,17 +284,19 @@ def check_package_details(content: str, pkg: Package) -> None: def test_package_details(user: User, package: Package): - """ Test package details with most fields populated, but not all. """ + """Test package details with most fields populated, but not all.""" request = Request(user=user, authenticated=True) context = make_context(request, "Test Details") - context.update({ - "request": request, - "git_clone_uri_anon": GIT_CLONE_URI_ANON, - "git_clone_uri_priv": GIT_CLONE_URI_PRIV, - "pkgbase": package.PackageBase, - "pkg": package, - "comaintainers": [], - }) + context.update( + { + "request": request, + "git_clone_uri_anon": GIT_CLONE_URI_ANON, + "git_clone_uri_priv": GIT_CLONE_URI_PRIV, + "pkgbase": package.PackageBase, + "pkg": package, + "comaintainers": [], + } + ) base = base_template("partials/packages/details.html") body = base.render(context, show_package_details=True) @@ -292,7 +304,7 @@ def test_package_details(user: User, package: Package): def test_package_details_filled(user: User, package: Package): - """ Test package details with all fields populated. """ + """Test package details with all fields populated.""" pkgbase = package.PackageBase with db.begin(): @@ -311,19 +323,23 @@ def test_package_details_filled(user: User, package: Package): request = Request(user=user, authenticated=True) context = make_context(request, "Test Details") - context.update({ - "request": request, - "git_clone_uri_anon": GIT_CLONE_URI_ANON, - "git_clone_uri_priv": GIT_CLONE_URI_PRIV, - "pkgbase": package.PackageBase, - "pkg": package, - "comaintainers": [], - "licenses": package.package_licenses, - "provides": package.package_relations.filter( - PackageRelation.RelTypeID == PROVIDES_ID), - "replaces": package.package_relations.filter( - PackageRelation.RelTypeID == REPLACES_ID), - }) + context.update( + { + "request": request, + "git_clone_uri_anon": GIT_CLONE_URI_ANON, + "git_clone_uri_priv": GIT_CLONE_URI_PRIV, + "pkgbase": package.PackageBase, + "pkg": package, + "comaintainers": [], + "licenses": package.package_licenses, + "provides": package.package_relations.filter( + PackageRelation.RelTypeID == PROVIDES_ID + ), + "replaces": package.package_relations.filter( + PackageRelation.RelTypeID == REPLACES_ID + ), + } + ) base = base_template("partials/packages/details.html") body = base.render(context, show_package_details=True) diff --git a/test/test_term.py b/test/test_term.py index bfa73a76..4b608a9a 100644 --- a/test/test_term.py +++ b/test/test_term.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db @@ -13,8 +12,9 @@ def setup(db_test): def test_term_creation(): with db.begin(): - term = db.create(Term, Description="Term description", - URL="https://fake_url.io") + term = db.create( + Term, Description="Term description", URL="https://fake_url.io" + ) assert bool(term.ID) assert term.Description == "Term description" assert term.URL == "https://fake_url.io" diff --git a/test/test_time.py b/test/test_time.py index 2134d217..db7b30bf 100644 --- a/test/test_time.py +++ b/test/test_time.py @@ -1,5 +1,4 @@ import aurweb.config - from aurweb.testing.requests import Request from aurweb.time import get_request_timezone, tz_offset diff --git a/test/test_trusted_user_routes.py b/test/test_trusted_user_routes.py index 2e7dc193..203008e3 100644 --- a/test/test_trusted_user_routes.py +++ b/test/test_trusted_user_routes.py @@ -1,12 +1,10 @@ import re - from http import HTTPStatus from io import StringIO from typing import Tuple import lxml.etree import pytest - from fastapi.testclient import TestClient from aurweb import config, db, filters, time @@ -16,8 +14,8 @@ from aurweb.models.tu_voteinfo import TUVoteInfo from aurweb.models.user import User from aurweb.testing.requests import Request -DATETIME_REGEX = r'^[0-9]{4}-[0-9]{2}-[0-9]{2} \(.+\)$' -PARTICIPATION_REGEX = r'^1?[0-9]{2}[%]$' # 0% - 100% +DATETIME_REGEX = r"^[0-9]{4}-[0-9]{2}-[0-9]{2} \(.+\)$" +PARTICIPATION_REGEX = r"^1?[0-9]{2}[%]$" # 0% - 100% def parse_root(html): @@ -43,11 +41,11 @@ def get_pkglist_directions(table): def get_a(node): - return node.xpath('./a')[0].text.strip() + return node.xpath("./a")[0].text.strip() def get_span(node): - return node.xpath('./span')[0].text.strip() + return node.xpath("./span")[0].text.strip() def assert_current_vote_html(row, expected): @@ -82,39 +80,51 @@ def setup(db_test): @pytest.fixture def client(): from aurweb.asgi import app + yield TestClient(app=app) @pytest.fixture def tu_user(): - tu_type = db.query(AccountType, - AccountType.AccountType == "Trusted User").first() + tu_type = db.query(AccountType, AccountType.AccountType == "Trusted User").first() with db.begin(): - tu_user = db.create(User, Username="test_tu", - Email="test_tu@example.org", - RealName="Test TU", Passwd="testPassword", - AccountType=tu_type) + tu_user = db.create( + User, + Username="test_tu", + Email="test_tu@example.org", + RealName="Test TU", + Passwd="testPassword", + AccountType=tu_type, + ) yield tu_user @pytest.fixture def tu_user2(): with db.begin(): - tu_user2 = db.create(User, Username="test_tu2", - Email="test_tu2@example.org", - RealName="Test TU 2", Passwd="testPassword", - AccountTypeID=TRUSTED_USER_ID) + tu_user2 = db.create( + User, + Username="test_tu2", + Email="test_tu2@example.org", + RealName="Test TU 2", + Passwd="testPassword", + AccountTypeID=TRUSTED_USER_ID, + ) yield tu_user2 @pytest.fixture def user(): - user_type = db.query(AccountType, - AccountType.AccountType == "User").first() + user_type = db.query(AccountType, AccountType.AccountType == "User").first() with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountType=user_type) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountType=user_type, + ) yield user @@ -126,10 +136,15 @@ def proposal(user, tu_user): end = ts + 1000 with db.begin(): - voteinfo = db.create(TUVoteInfo, - Agenda=agenda, Quorum=0.0, - User=user.Username, Submitter=tu_user, - Submitted=start, End=end) + voteinfo = db.create( + TUVoteInfo, + Agenda=agenda, + Quorum=0.0, + User=user.Username, + Submitter=tu_user, + Submitted=start, + End=end, + ) yield (tu_user, user, voteinfo) @@ -153,7 +168,7 @@ def test_tu_index_unauthorized(client: TestClient, user: User): def test_tu_empty_index(client, tu_user): - """ Check an empty index when we don't create any records. """ + """Check an empty index when we don't create any records.""" # Make a default get request to /tu. cookies = {"AURSID": tu_user.login(Request(), "testPassword")} @@ -179,18 +194,23 @@ def test_tu_index(client, tu_user): # Create some test votes: (Agenda, Start, End). votes = [ ("Test agenda 1", ts - 5, ts + 1000), # Still running. - ("Test agenda 2", ts - 1000, ts - 5) # Not running anymore. + ("Test agenda 2", ts - 1000, ts - 5), # Not running anymore. ] vote_records = [] with db.begin(): for vote in votes: agenda, start, end = vote vote_records.append( - db.create(TUVoteInfo, Agenda=agenda, - User=tu_user.Username, - Submitted=start, End=end, - Quorum=0.0, - Submitter=tu_user)) + db.create( + TUVoteInfo, + Agenda=agenda, + User=tu_user.Username, + Submitted=start, + End=end, + Quorum=0.0, + Submitter=tu_user, + ) + ) with db.begin(): # Vote on an ended proposal. @@ -202,21 +222,23 @@ def test_tu_index(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: # Pass an invalid cby and pby; let them default to "desc". - response = request.get("/tu", cookies=cookies, params={ - "cby": "BAD!", - "pby": "blah" - }, allow_redirects=False) + response = request.get( + "/tu", + cookies=cookies, + params={"cby": "BAD!", "pby": "blah"}, + allow_redirects=False, + ) assert response.status_code == int(HTTPStatus.OK) # Rows we expect to exist in HTML produced by /tu for current votes. expected_rows = [ ( - r'Test agenda 1', + r"Test agenda 1", DATETIME_REGEX, DATETIME_REGEX, tu_user.Username, - r'^(Yes|No)$' + r"^(Yes|No)$", ) ] @@ -239,13 +261,13 @@ def test_tu_index(client, tu_user): # Rows we expect to exist in HTML produced by /tu for past votes. expected_rows = [ ( - r'Test agenda 2', + r"Test agenda 2", DATETIME_REGEX, DATETIME_REGEX, tu_user.Username, - r'^\d+$', - r'^\d+$', - r'^(Yes|No)$' + r"^\d+$", + r"^\d+$", + r"^(Yes|No)$", ) ] @@ -315,19 +337,27 @@ def test_tu_index_table_paging(client, tu_user): with db.begin(): for i in range(25): # Create 25 current votes. - db.create(TUVoteInfo, Agenda=f"Agenda #{i}", - User=tu_user.Username, - Submitted=(ts - 5), End=(ts + 1000), - Quorum=0.0, - Submitter=tu_user) + db.create( + TUVoteInfo, + Agenda=f"Agenda #{i}", + User=tu_user.Username, + Submitted=(ts - 5), + End=(ts + 1000), + Quorum=0.0, + Submitter=tu_user, + ) for i in range(25): # Create 25 past votes. - db.create(TUVoteInfo, Agenda=f"Agenda #{25 + i}", - User=tu_user.Username, - Submitted=(ts - 1000), End=(ts - 5), - Quorum=0.0, - Submitter=tu_user) + db.create( + TUVoteInfo, + Agenda=f"Agenda #{25 + i}", + User=tu_user.Username, + Submitted=(ts - 1000), + End=(ts - 5), + Quorum=0.0, + Submitter=tu_user, + ) cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: @@ -347,7 +377,7 @@ def test_tu_index_table_paging(client, tu_user): DATETIME_REGEX, DATETIME_REGEX, tu_user.Username, - r'^(Yes|No)$' + r"^(Yes|No)$", ] for i, row in enumerate(rows): @@ -361,9 +391,9 @@ def test_tu_index_table_paging(client, tu_user): # Now, get the next page of current votes. offset = 10 # Specify coff=10 with client as request: - response = request.get("/tu", cookies=cookies, params={ - "coff": offset - }, allow_redirects=False) + response = request.get( + "/tu", cookies=cookies, params={"coff": offset}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) old_rows = rows @@ -390,9 +420,9 @@ def test_tu_index_table_paging(client, tu_user): offset = 20 # Specify coff=10 with client as request: - response = request.get("/tu", cookies=cookies, params={ - "coff": offset - }, allow_redirects=False) + response = request.get( + "/tu", cookies=cookies, params={"coff": offset}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) # Do it again, we only have five left. @@ -423,11 +453,15 @@ def test_tu_index_sorting(client, tu_user): with db.begin(): for i in range(2): # Create 'Agenda #1' and 'Agenda #2'. - db.create(TUVoteInfo, Agenda=f"Agenda #{i + 1}", - User=tu_user.Username, - Submitted=(ts + 5), End=(ts + 1000), - Quorum=0.0, - Submitter=tu_user) + db.create( + TUVoteInfo, + Agenda=f"Agenda #{i + 1}", + User=tu_user.Username, + Submitted=(ts + 5), + End=(ts + 1000), + Quorum=0.0, + Submitter=tu_user, + ) # Let's order each vote one day after the other. # This will allow us to test the sorting nature @@ -446,27 +480,27 @@ def test_tu_index_sorting(client, tu_user): rows = get_table_rows(table) # The latest Agenda is at the top by default. - expected = [ - "Agenda #2", - "Agenda #1" - ] + expected = ["Agenda #2", "Agenda #1"] assert len(rows) == len(expected) for i, row in enumerate(rows): - assert_current_vote_html(row, [ - expected[i], - DATETIME_REGEX, - DATETIME_REGEX, - tu_user.Username, - r'^(Yes|No)$' - ]) + assert_current_vote_html( + row, + [ + expected[i], + DATETIME_REGEX, + DATETIME_REGEX, + tu_user.Username, + r"^(Yes|No)$", + ], + ) # Make another request; one that sorts the current votes # in ascending order instead of the default descending order. with client as request: - response = request.get("/tu", cookies=cookies, params={ - "cby": "asc" - }, allow_redirects=False) + response = request.get( + "/tu", cookies=cookies, params={"cby": "asc"}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) # Get lxml handles of the document. @@ -478,30 +512,37 @@ def test_tu_index_sorting(client, tu_user): rev_expected = list(reversed(expected)) assert len(rows) == len(rev_expected) for i, row in enumerate(rows): - assert_current_vote_html(row, [ - rev_expected[i], - DATETIME_REGEX, - DATETIME_REGEX, - tu_user.Username, - r'^(Yes|No)$' - ]) + assert_current_vote_html( + row, + [ + rev_expected[i], + DATETIME_REGEX, + DATETIME_REGEX, + tu_user.Username, + r"^(Yes|No)$", + ], + ) -def test_tu_index_last_votes(client: TestClient, tu_user: User, tu_user2: User, - user: User): +def test_tu_index_last_votes( + client: TestClient, tu_user: User, tu_user2: User, user: User +): ts = time.utcnow() with db.begin(): # Create a proposal which has ended. - voteinfo = db.create(TUVoteInfo, Agenda="Test agenda", - User=user.Username, - Submitted=(ts - 1000), - End=(ts - 5), - Yes=1, - No=1, - ActiveTUs=1, - Quorum=0.0, - Submitter=tu_user) + voteinfo = db.create( + TUVoteInfo, + Agenda="Test agenda", + User=user.Username, + Submitted=(ts - 1000), + End=(ts - 5), + Yes=1, + No=1, + ActiveTUs=1, + Quorum=0.0, + Submitter=tu_user, + ) # Create a vote on it from tu_user. db.create(TUVote, VoteInfo=voteinfo, User=tu_user) @@ -536,26 +577,27 @@ def test_tu_proposal_not_found(client, tu_user): assert response.status_code == int(HTTPStatus.NOT_FOUND) -def test_tu_proposal_unauthorized(client: TestClient, user: User, - proposal: Tuple[User, User, TUVoteInfo]): +def test_tu_proposal_unauthorized( + client: TestClient, user: User, proposal: Tuple[User, User, TUVoteInfo] +): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/tu/{proposal[2].ID}" with client as request: - response = request.get(endpoint, cookies=cookies, - allow_redirects=False) + response = request.get(endpoint, cookies=cookies, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" with client as request: - response = request.post(endpoint, cookies=cookies, - data={"decision": False}, - allow_redirects=False) + response = request.post( + endpoint, cookies=cookies, data={"decision": False}, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" -def test_tu_running_proposal(client: TestClient, - proposal: Tuple[User, User, TUVoteInfo]): +def test_tu_running_proposal( + client: TestClient, proposal: Tuple[User, User, TUVoteInfo] +): tu_user, user, voteinfo = proposal with db.begin(): voteinfo.ActiveTUs = 1 @@ -576,8 +618,7 @@ def test_tu_running_proposal(client: TestClient, assert vote_running.text.strip() == "This vote is still running." # Verify User field. - username = details.xpath( - './div[contains(@class, "user")]/strong/a/text()')[0] + username = details.xpath('./div[contains(@class, "user")]/strong/a/text()')[0] assert username.strip() == user.Username active = details.xpath('./div[contains(@class, "field")]')[1] @@ -585,10 +626,13 @@ def test_tu_running_proposal(client: TestClient, assert "Active Trusted Users assigned:" in content assert "1" in content - submitted = details.xpath( - './div[contains(@class, "submitted")]/text()')[0] - assert re.match(r'^Submitted: \d{4}-\d{2}-\d{2} \d{2}:\d{2} \(.+\) by$', - submitted.strip()) is not None + submitted = details.xpath('./div[contains(@class, "submitted")]/text()')[0] + assert ( + re.match( + r"^Submitted: \d{4}-\d{2}-\d{2} \d{2}:\d{2} \(.+\) by$", submitted.strip() + ) + is not None + ) submitter = details.xpath('./div[contains(@class, "submitted")]/a')[0] assert submitter.text.strip() == tu_user.Username assert submitter.attrib["href"] == f"/account/{tu_user.Username}" @@ -598,8 +642,10 @@ def test_tu_running_proposal(client: TestClient, assert end_label.strip() == "End:" end_datetime = end.xpath("./strong/text()")[0] - assert re.match(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2} \(.+\)$', - end_datetime.strip()) is not None + assert ( + re.match(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2} \(.+\)$", end_datetime.strip()) + is not None + ) # We have not voted yet. Assert that our voting form is shown. form = root.xpath('//form[contains(@class, "action-form")]')[0] @@ -630,8 +676,7 @@ def test_tu_running_proposal(client: TestClient, # Make another request now that we've voted. with client as request: - response = request.get( - "/tu", params={"id": voteinfo.ID}, cookies=cookies) + response = request.get("/tu", params={"id": voteinfo.ID}, cookies=cookies) assert response.status_code == int(HTTPStatus.OK) # Parse our new root. @@ -685,12 +730,13 @@ def test_tu_ended_proposal(client, proposal): def test_tu_proposal_vote_not_found(client, tu_user): - """ Test POST request to a missing vote. """ + """Test POST request to a missing vote.""" cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post("/tu/1", cookies=cookies, - data=data, allow_redirects=False) + response = request.post( + "/tu/1", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -703,16 +749,14 @@ def test_tu_proposal_vote(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data) + response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) assert response.status_code == int(HTTPStatus.OK) # Check that the proposal record got updated. assert voteinfo.Yes == yes + 1 # Check that the new TUVote exists. - vote = db.query(TUVote, TUVote.VoteInfo == voteinfo, - TUVote.User == tu_user).first() + vote = db.query(TUVote, TUVote.VoteInfo == voteinfo, TUVote.User == tu_user).first() assert vote is not None root = parse_root(response.text) @@ -723,7 +767,8 @@ def test_tu_proposal_vote(client, proposal): def test_tu_proposal_vote_unauthorized( - client: TestClient, proposal: Tuple[User, User, TUVoteInfo]): + client: TestClient, proposal: Tuple[User, User, TUVoteInfo] +): tu_user, user, voteinfo = proposal with db.begin(): @@ -732,8 +777,9 @@ def test_tu_proposal_vote_unauthorized( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data, allow_redirects=False) + response = request.post( + f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.UNAUTHORIZED) root = parse_root(response.text) @@ -742,8 +788,9 @@ def test_tu_proposal_vote_unauthorized( with client as request: data = {"decision": "Yes"} - response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data, allow_redirects=False) + response = request.get( + f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -761,8 +808,9 @@ def test_tu_proposal_vote_cant_self_vote(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data, allow_redirects=False) + response = request.post( + f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) root = parse_root(response.text) @@ -771,8 +819,9 @@ def test_tu_proposal_vote_cant_self_vote(client, proposal): with client as request: data = {"decision": "Yes"} - response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data, allow_redirects=False) + response = request.get( + f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -791,8 +840,9 @@ def test_tu_proposal_vote_already_voted(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data, allow_redirects=False) + response = request.post( + f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) root = parse_root(response.text) @@ -801,8 +851,9 @@ def test_tu_proposal_vote_already_voted(client, proposal): with client as request: data = {"decision": "Yes"} - response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data, allow_redirects=False) + response = request.get( + f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False + ) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -816,8 +867,7 @@ def test_tu_proposal_vote_invalid_decision(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "EVIL"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, - data=data) + response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) assert response.text == "Invalid 'decision' value." @@ -829,18 +879,17 @@ def test_tu_addvote(client: TestClient, tu_user: User): assert response.status_code == int(HTTPStatus.OK) -def test_tu_addvote_unauthorized(client: TestClient, user: User, - proposal: Tuple[User, User, TUVoteInfo]): +def test_tu_addvote_unauthorized( + client: TestClient, user: User, proposal: Tuple[User, User, TUVoteInfo] +): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.get("/addvote", cookies=cookies, - allow_redirects=False) + response = request.get("/addvote", cookies=cookies, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" with client as request: - response = request.post("/addvote", cookies=cookies, - allow_redirects=False) + response = request.post("/addvote", cookies=cookies, allow_redirects=False) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" @@ -848,8 +897,7 @@ def test_tu_addvote_unauthorized(client: TestClient, user: User, def test_tu_addvote_invalid_type(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/addvote", params={"type": "faketype"}, - cookies=cookies) + response = request.get("/addvote", params={"type": "faketype"}, cookies=cookies) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -860,11 +908,7 @@ def test_tu_addvote_invalid_type(client: TestClient, tu_user: User): def test_tu_addvote_post(client: TestClient, tu_user: User, user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} - data = { - "user": user.Username, - "type": "add_tu", - "agenda": "Blah" - } + data = {"user": user.Username, "type": "add_tu", "agenda": "Blah"} with client as request: response = request.post("/addvote", cookies=cookies, data=data) @@ -874,15 +918,12 @@ def test_tu_addvote_post(client: TestClient, tu_user: User, user: User): assert voteinfo is not None -def test_tu_addvote_post_cant_duplicate_username(client: TestClient, - tu_user: User, user: User): +def test_tu_addvote_post_cant_duplicate_username( + client: TestClient, tu_user: User, user: User +): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} - data = { - "user": user.Username, - "type": "add_tu", - "agenda": "Blah" - } + data = {"user": user.Username, "type": "add_tu", "agenda": "Blah"} with client as request: response = request.post("/addvote", cookies=cookies, data=data) @@ -904,8 +945,7 @@ def test_tu_addvote_post_invalid_username(client: TestClient, tu_user: User): assert response.status_code == int(HTTPStatus.NOT_FOUND) -def test_tu_addvote_post_invalid_type(client: TestClient, tu_user: User, - user: User): +def test_tu_addvote_post_invalid_type(client: TestClient, tu_user: User, user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} data = {"user": user.Username} with client as request: @@ -913,8 +953,7 @@ def test_tu_addvote_post_invalid_type(client: TestClient, tu_user: User, assert response.status_code == int(HTTPStatus.BAD_REQUEST) -def test_tu_addvote_post_invalid_agenda(client: TestClient, - tu_user: User, user: User): +def test_tu_addvote_post_invalid_agenda(client: TestClient, tu_user: User, user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} data = {"user": user.Username, "type": "add_tu"} with client as request: diff --git a/test/test_tu_vote.py b/test/test_tu_vote.py index 91d73ecb..8c1c08de 100644 --- a/test/test_tu_vote.py +++ b/test/test_tu_vote.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db, time @@ -17,9 +16,14 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=TRUSTED_USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=TRUSTED_USER_ID, + ) yield user @@ -27,10 +31,15 @@ def user() -> User: def tu_voteinfo(user: User) -> TUVoteInfo: ts = time.utcnow() with db.begin(): - tu_voteinfo = db.create(TUVoteInfo, Agenda="Blah blah.", - User=user.Username, - Submitted=ts, End=ts + 5, - Quorum=0.5, Submitter=user) + tu_voteinfo = db.create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=ts, + End=ts + 5, + Quorum=0.5, + Submitter=user, + ) yield tu_voteinfo diff --git a/test/test_tu_voteinfo.py b/test/test_tu_voteinfo.py index 17226048..34845b86 100644 --- a/test/test_tu_voteinfo.py +++ b/test/test_tu_voteinfo.py @@ -1,5 +1,4 @@ import pytest - from sqlalchemy.exc import IntegrityError from aurweb import db, time @@ -17,21 +16,29 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = create(User, Username="test", Email="test@example.org", - RealName="Test User", Passwd="testPassword", - AccountTypeID=TRUSTED_USER_ID) + user = create( + User, + Username="test", + Email="test@example.org", + RealName="Test User", + Passwd="testPassword", + AccountTypeID=TRUSTED_USER_ID, + ) yield user def test_tu_voteinfo_creation(user: User): ts = time.utcnow() with db.begin(): - tu_voteinfo = create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - Submitted=ts, End=ts + 5, - Quorum=0.5, - Submitter=user) + tu_voteinfo = create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=ts, + End=ts + 5, + Quorum=0.5, + Submitter=user, + ) assert bool(tu_voteinfo.ID) assert tu_voteinfo.Agenda == "Blah blah." assert tu_voteinfo.User == user.Username @@ -50,12 +57,15 @@ def test_tu_voteinfo_creation(user: User): def test_tu_voteinfo_is_running(user: User): ts = time.utcnow() with db.begin(): - tu_voteinfo = create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - Submitted=ts, End=ts + 1000, - Quorum=0.5, - Submitter=user) + tu_voteinfo = create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=ts, + End=ts + 1000, + Quorum=0.5, + Submitter=user, + ) assert tu_voteinfo.is_running() is True with db.begin(): @@ -66,12 +76,15 @@ def test_tu_voteinfo_is_running(user: User): def test_tu_voteinfo_total_votes(user: User): ts = time.utcnow() with db.begin(): - tu_voteinfo = create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - Submitted=ts, End=ts + 1000, - Quorum=0.5, - Submitter=user) + tu_voteinfo = create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=ts, + End=ts + 1000, + Quorum=0.5, + Submitter=user, + ) tu_voteinfo.Yes = 1 tu_voteinfo.No = 3 @@ -84,65 +97,81 @@ def test_tu_voteinfo_total_votes(user: User): def test_tu_voteinfo_null_submitter_raises(user: User): with pytest.raises(IntegrityError): with db.begin(): - create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - Submitted=0, End=0, - Quorum=0.50) + create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=0, + End=0, + Quorum=0.50, + ) rollback() def test_tu_voteinfo_null_agenda_raises(user: User): with pytest.raises(IntegrityError): with db.begin(): - create(TUVoteInfo, - User=user.Username, - Submitted=0, End=0, - Quorum=0.50, - Submitter=user) + create( + TUVoteInfo, + User=user.Username, + Submitted=0, + End=0, + Quorum=0.50, + Submitter=user, + ) rollback() def test_tu_voteinfo_null_user_raises(user: User): with pytest.raises(IntegrityError): with db.begin(): - create(TUVoteInfo, - Agenda="Blah blah.", - Submitted=0, End=0, - Quorum=0.50, - Submitter=user) + create( + TUVoteInfo, + Agenda="Blah blah.", + Submitted=0, + End=0, + Quorum=0.50, + Submitter=user, + ) rollback() def test_tu_voteinfo_null_submitted_raises(user: User): with pytest.raises(IntegrityError): with db.begin(): - create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - End=0, - Quorum=0.50, - Submitter=user) + create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + End=0, + Quorum=0.50, + Submitter=user, + ) rollback() def test_tu_voteinfo_null_end_raises(user: User): with pytest.raises(IntegrityError): with db.begin(): - create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - Submitted=0, - Quorum=0.50, - Submitter=user) + create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=0, + Quorum=0.50, + Submitter=user, + ) rollback() def test_tu_voteinfo_null_quorum_default(user: User): with db.begin(): - vi = create(TUVoteInfo, - Agenda="Blah blah.", - User=user.Username, - Submitted=0, End=0, - Submitter=user) + vi = create( + TUVoteInfo, + Agenda="Blah blah.", + User=user.Username, + Submitted=0, + End=0, + Submitter=user, + ) assert vi.Quorum == 0 diff --git a/test/test_tuvotereminder.py b/test/test_tuvotereminder.py index a54c52a4..0233c8b2 100644 --- a/test/test_tuvotereminder.py +++ b/test/test_tuvotereminder.py @@ -19,8 +19,13 @@ def create_vote(user: User, voteinfo: TUVoteInfo) -> TUVote: def create_user(username: str, type_id: int): with db.begin(): - user = db.create(User, AccountTypeID=type_id, Username=username, - Email=f"{username}@example.org", Passwd=str()) + user = db.create( + User, + AccountTypeID=type_id, + Username=username, + Email=f"{username}@example.org", + Passwd=str(), + ) return user @@ -32,9 +37,11 @@ def email_pieces(voteinfo: TUVoteInfo) -> Tuple[str, str]: :return: tuple(subject, content) """ subject = f"TU Vote Reminder: Proposal {voteinfo.ID}" - content = (f"Please remember to cast your vote on proposal {voteinfo.ID} " - f"[1]. The voting period\nends in less than 48 hours.\n\n" - f"[1] {aur_location}/tu/?id={voteinfo.ID}") + content = ( + f"Please remember to cast your vote on proposal {voteinfo.ID} " + f"[1]. The voting period\nends in less than 48 hours.\n\n" + f"[1] {aur_location}/tu/?id={voteinfo.ID}" + ) return (subject, content) @@ -58,14 +65,19 @@ def voteinfo(user: User) -> TUVoteInfo: now = time.utcnow() start = config.getint("tuvotereminder", "range_start") with db.begin(): - voteinfo = db.create(TUVoteInfo, Agenda="Lorem ipsum.", - User=user.Username, End=(now + start + 1), - Quorum=0.00, Submitter=user, Submitted=0) + voteinfo = db.create( + TUVoteInfo, + Agenda="Lorem ipsum.", + User=user.Username, + End=(now + start + 1), + Quorum=0.00, + Submitter=user, + Submitted=0, + ) yield voteinfo -def test_tu_vote_reminders(user: User, user2: User, user3: User, - voteinfo: TUVoteInfo): +def test_tu_vote_reminders(user: User, user2: User, user3: User, voteinfo: TUVoteInfo): reminder.main() assert Email.count() == 3 @@ -75,7 +87,7 @@ def test_tu_vote_reminders(user: User, user2: User, user3: User, # (to, content) (user.Email, subject, content), (user2.Email, subject, content), - (user3.Email, subject, content) + (user3.Email, subject, content), ] for i, element in enumerate(expectations): email, subject, content = element @@ -84,8 +96,9 @@ def test_tu_vote_reminders(user: User, user2: User, user3: User, assert emails[i].body == content -def test_tu_vote_reminders_only_unvoted(user: User, user2: User, user3: User, - voteinfo: TUVoteInfo): +def test_tu_vote_reminders_only_unvoted( + user: User, user2: User, user3: User, voteinfo: TUVoteInfo +): # Vote with user2 and user3; leaving only user to be notified. create_vote(user2, voteinfo) create_vote(user3, voteinfo) diff --git a/test/test_user.py b/test/test_user.py index 5f25f3c9..17fd0c0e 100644 --- a/test/test_user.py +++ b/test/test_user.py @@ -1,6 +1,5 @@ import hashlib import json - from datetime import datetime, timedelta import bcrypt @@ -9,10 +8,14 @@ import pytest import aurweb.auth import aurweb.config import aurweb.models.account_type as at - from aurweb import db from aurweb.auth import creds -from aurweb.models.account_type import DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID, USER_ID +from aurweb.models.account_type import ( + DEVELOPER_ID, + TRUSTED_USER_AND_DEV_ID, + TRUSTED_USER_ID, + USER_ID, +) from aurweb.models.ban import Ban from aurweb.models.package import Package from aurweb.models.package_base import PackageBase @@ -31,10 +34,14 @@ def setup(db_test): def create_user(username: str, account_type_id: int): with db.begin(): - user = db.create(User, Username=username, - Email=f"{username}@example.org", - RealName=username.title(), Passwd="testPassword", - AccountTypeID=account_type_id) + user = db.create( + User, + Username=username, + Email=f"{username}@example.org", + RealName=username.title(), + Passwd="testPassword", + AccountTypeID=account_type_id, + ) return user @@ -71,7 +78,7 @@ def package(user: User) -> Package: def test_user_login_logout(user: User): - """ Test creating a user and reading its columns. """ + """Test creating a user and reading its columns.""" # Assert that make_user created a valid user. assert bool(user.ID) @@ -89,8 +96,7 @@ def test_user_login_logout(user: User): assert user.is_authenticated() # Expect that User session relationships work right. - user_session = db.query(Session, - Session.UsersID == user.ID).first() + user_session = db.query(Session, Session.UsersID == user.ID).first() assert user_session == user.session assert user.session.SessionID == sid assert user.session.User == user @@ -111,8 +117,10 @@ def test_user_login_logout(user: User): assert result.is_authenticated() # Test out user string functions. - assert repr(user) == f"" + assert ( + repr(user) + == f"" + ) # Test logout. user.logout(request) @@ -145,9 +153,7 @@ def test_user_login_suspended(user: User): def test_legacy_user_authentication(user: User): with db.begin(): user.Salt = bcrypt.gensalt().decode() - user.Passwd = hashlib.md5( - f"{user.Salt}testPassword".encode() - ).hexdigest() + user.Passwd = hashlib.md5(f"{user.Salt}testPassword".encode()).hexdigest() assert not user.valid_password("badPassword") assert user.valid_password("testPassword") @@ -160,8 +166,12 @@ def test_user_login_with_outdated_sid(user: User): # Make a session with a LastUpdateTS 5 seconds ago, causing # user.login to update it with a new sid. with db.begin(): - db.create(Session, UsersID=user.ID, SessionID="stub", - LastUpdateTS=datetime.utcnow().timestamp() - 5) + db.create( + Session, + UsersID=user.ID, + SessionID="stub", + LastUpdateTS=datetime.utcnow().timestamp() - 5, + ) sid = user.login(Request(), "testPassword") assert sid and user.is_authenticated() assert sid != "stub" @@ -186,9 +196,12 @@ def test_user_ssh_pub_key(user: User): assert user.ssh_pub_keys.first() is None with db.begin(): - ssh_pub_key = db.create(SSHPubKey, UserID=user.ID, - Fingerprint="testFingerprint", - PubKey="testPubKey") + ssh_pub_key = db.create( + SSHPubKey, + UserID=user.ID, + Fingerprint="testFingerprint", + PubKey="testPubKey", + ) assert user.ssh_pub_keys.first() == ssh_pub_key @@ -283,8 +296,9 @@ def test_user_packages(user: User, package: Package): assert package in user.packages() -def test_can_edit_user(user: User, tu_user: User, dev_user: User, - tu_and_dev_user: User): +def test_can_edit_user( + user: User, tu_user: User, dev_user: User, tu_and_dev_user: User +): # User can edit. assert user.can_edit_user(user) diff --git a/test/test_usermaint.py b/test/test_usermaint.py index e572569a..7d7bd135 100644 --- a/test/test_usermaint.py +++ b/test/test_usermaint.py @@ -14,13 +14,18 @@ def setup(db_test): @pytest.fixture def user() -> User: with db.begin(): - user = db.create(User, Username="test", Email="test@example.org", - Passwd="testPassword", AccountTypeID=USER_ID) + user = db.create( + User, + Username="test", + Email="test@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) yield user def test_usermaint_noop(user: User): - """ Last[SSH]Login isn't expired in this test: usermaint is noop. """ + """Last[SSH]Login isn't expired in this test: usermaint is noop.""" now = time.utcnow() with db.begin(): diff --git a/test/test_util.py b/test/test_util.py index ae1de81b..686e35b4 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -1,10 +1,8 @@ import json - from http import HTTPStatus import fastapi import pytest - from fastapi.responses import JSONResponse from aurweb import filters, util @@ -18,7 +16,7 @@ def test_round(): def test_git_search(): - """ Test that git_search matches the full commit if necessary. """ + """Test that git_search matches the full commit if necessary.""" commit_hash = "0123456789abcdef" repo = {commit_hash} prefixlen = util.git_search(repo, commit_hash) @@ -26,7 +24,7 @@ def test_git_search(): def test_git_search_double_commit(): - """ Test that git_search matches a shorter prefix length. """ + """Test that git_search matches a shorter prefix length.""" commit_hash = "0123456789abcdef" repo = {commit_hash[:13]} # Locate the shortest prefix length that matches commit_hash. @@ -36,7 +34,6 @@ def test_git_search_double_commit(): @pytest.mark.asyncio async def test_error_or_result(): - async def route(request: fastapi.Request): raise RuntimeError("No response returned.") diff --git a/util/fix-coverage b/util/fix-coverage index 3446c4af..77cf29c1 100755 --- a/util/fix-coverage +++ b/util/fix-coverage @@ -48,9 +48,8 @@ def main(): files[i] = path for _, i in enumerate(files.keys()): - new_path = re.sub(r'^/aurweb', aurwebdir, files[i]) - cursor.execute("UPDATE file SET path = ? WHERE id = ?", ( - new_path, i)) + new_path = re.sub(r"^/aurweb", aurwebdir, files[i]) + cursor.execute("UPDATE file SET path = ? WHERE id = ?", (new_path, i)) db.commit() db.close() diff --git a/web/html/503.php b/web/html/503.php index 80eb4369..23e7014e 100644 --- a/web/html/503.php +++ b/web/html/503.php @@ -12,4 +12,3 @@ html_header( __("Service Unavailable") );
    - diff --git a/web/template/flag_comment.php b/web/template/flag_comment.php index 05eeacb2..dc285a97 100644 --- a/web/template/flag_comment.php +++ b/web/template/flag_comment.php @@ -24,4 +24,3 @@

    - diff --git a/web/template/header.php b/web/template/header.php index afe7a9b6..9631be91 100644 --- a/web/template/header.php +++ b/web/template/header.php @@ -80,4 +80,3 @@ - diff --git a/web/template/pkgreq_close_form.php b/web/template/pkgreq_close_form.php index 6077b325..6228f6ab 100644 --- a/web/template/pkgreq_close_form.php +++ b/web/template/pkgreq_close_form.php @@ -29,4 +29,3 @@ - diff --git a/web/template/template.phps b/web/template/template.phps index 4f8117c8..f1a0bb0d 100644 --- a/web/template/template.phps +++ b/web/template/template.phps @@ -17,4 +17,3 @@ print __("Hi, this is worth reading!")."
    \n"; html_footer(AURWEB_VERSION); - From 505eb90479df1d14c3c2e64a90a40a1ef5815765 Mon Sep 17 00:00:00 2001 From: Joakim Saario Date: Sat, 20 Aug 2022 19:29:25 +0200 Subject: [PATCH 079/415] chore: Add .git-blame-ignore-revs file The idea is to exclude commits that only contains formatting so that it's easier to backtrack actual code changes with `git blame`. --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..d3c9887b --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# style: Run pre-commit +9c6c13b78a30cb9d800043410799e29631f803d2 From de5538a40f5d706a1f7dee7a2361be32ff2760c1 Mon Sep 17 00:00:00 2001 From: Joakim Saario Date: Sun, 21 Aug 2022 22:16:52 +0200 Subject: [PATCH 080/415] ci(lint): Use pre-commit --- .gitlab-ci.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 98f99ae3..7134673c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -13,18 +13,16 @@ variables: LOG_CONFIG: logging.test.conf lint: - variables: - # Space-separated list of directories that should be linted. - REQUIRES_LINT: "aurweb test migrations" stage: .pre before_script: - pacman -Sy --noconfirm --noprogressbar --cachedir .pkg-cache archlinux-keyring - pacman -Syu --noconfirm --noprogressbar --cachedir .pkg-cache - python python-isort flake8 + git python python-pre-commit script: - - bash -c 'flake8 --count $(echo "$REQUIRES_LINT" | xargs); exit $?' - - bash -c 'isort --check-only $(echo "$REQUIRES_LINT" | xargs); exit $?' + # https://github.com/pre-commit/pre-commit/issues/2178#issuecomment-1002163763 + - export SETUPTOOLS_USE_DISTUTILS=stdlib + - pre-commit run -a test: stage: test From ce5dbf0eebb58a5f9d39736a42a1558ff0ee8b64 Mon Sep 17 00:00:00 2001 From: Joakim Saario Date: Mon, 22 Aug 2022 22:30:25 +0200 Subject: [PATCH 081/415] docs(contributing): Update Coding Style --- CONTRIBUTING.md | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 52e182c7..58612a36 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,21 +31,27 @@ Test patches that increase coverage in the codebase are always welcome. ### Coding Style -We use the `flake8` and `isort` tools to manage PEP-8 coherence and -import ordering in this project. +We use `autoflake`, `isort`, `black` and `flake8` to enforce coding style in a +PEP-8 compliant way. These tools run in GitLab CI using `pre-commit` to verify +that any pushed code changes comply with this. + +To enable the `pre-commit` git hook, install the `pre-commit` package either +with `pacman` or `pip` and then run `pre-commit install --install-hooks`. This +will ensure formatting is done before any code is commited to the git +repository. There are plugins for editors or IDEs which automate this process. Some example plugins: -- [tell-k/vim-autopep8](https://github.com/tell-k/vim-autopep8) +- [tenfyzhong/autoflake.vim](https://github.com/tenfyzhong/autoflake.vim) - [fisadev/vim-isort](https://github.com/fisadev/vim-isort) +- [psf/black](https://github.com/psf/black) +- [nvie/vim-flake8](https://github.com/nvie/vim-flake8) - [prabirshrestha/vim-lsp](https://github.com/prabirshrestha/vim-lsp) +- [dense-analysis/ale](https://github.com/dense-analysis/ale) -See `setup.cfg` for flake8 and isort specific rules. - -Note: We are planning on switching to [psf/black](https://github.com/psf/black). -For now, developers should ensure that flake8 and isort passes when submitting -merge requests or patch sets. +See `setup.cfg`, `pyproject.toml` and `.pre-commit-config.yaml` for tool +specific configurations. ### Development Environment From 57c040995820e08e4af9aadfdc5c946551d899ec Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 22 Aug 2022 23:44:56 -0700 Subject: [PATCH 082/415] style: set flake8's max-line-length=88 In accordance with black's defined style, we now expect a maximum of 88 columns for any one particular line. This change fixes remaining violations of 88 columns in the codebase (not many), and introduces the modified flake8 configuration. Signed-off-by: Kevin Morris --- schema/gendummydata.py | 13 ++++++++++--- setup.cfg | 2 +- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/schema/gendummydata.py b/schema/gendummydata.py index fa59855f..dfc8eee5 100755 --- a/schema/gendummydata.py +++ b/schema/gendummydata.py @@ -233,7 +233,8 @@ for p in list(seen_pkgs.keys()): s = ( "INSERT INTO PackageBases (ID, Name, FlaggerComment, SubmittedTS, ModifiedTS, " - "SubmitterUID, MaintainerUID, PackagerUID) VALUES (%d, '%s', '', %d, %d, %d, %s, %s);\n" + "SubmitterUID, MaintainerUID, PackagerUID) VALUES " + "(%d, '%s', '', %d, %d, %d, %s, %s);\n" ) s = s % (seen_pkgs[p], p, NOW, NOW, uuid, muid, puid) out.write(s) @@ -303,7 +304,10 @@ for p in seen_pkgs_keys: deptype = random.randrange(1, 5) if deptype == 4: dep += ": for " + random.choice(seen_pkgs_keys) - s = "INSERT INTO PackageDepends(PackageID, DepTypeID, DepName) VALUES (%d, %d, '%s');\n" + s = ( + "INSERT INTO PackageDepends(PackageID, DepTypeID, DepName) " + "VALUES (%d, %d, '%s');\n" + ) s = s % (seen_pkgs[p], deptype, dep) out.write(s) @@ -311,7 +315,10 @@ for p in seen_pkgs_keys: for i in range(0, num_deps): rel = random.choice(seen_pkgs_keys) reltype = random.randrange(1, 4) - s = "INSERT INTO PackageRelations(PackageID, RelTypeID, RelName) VALUES (%d, %d, '%s');\n" + s = ( + "INSERT INTO PackageRelations(PackageID, RelTypeID, RelName) " + "VALUES (%d, %d, '%s');\n" + ) s = s % (seen_pkgs[p], reltype, rel) out.write(s) diff --git a/setup.cfg b/setup.cfg index 3c9bf777..41978dae 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [flake8] -max-line-length = 127 +max-line-length = 88 max-complexity = 10 # Ignore some unavoidable flake8 warnings; we know this is against From fbb3e052fed5a82e334bb795c58f6e0a16f55890 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 23 Aug 2022 00:07:40 -0700 Subject: [PATCH 083/415] ci: use cache/virtualenv for test dependencies Signed-off-by: Kevin Morris --- .gitlab-ci.yml | 3 +++ Dockerfile | 3 ++- docker/scripts/install-deps.sh | 3 ++- docker/scripts/install-python-deps.sh | 7 +++---- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7134673c..4d082582 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,6 +4,7 @@ cache: paths: # For some reason Gitlab CI only supports storing cache/artifacts in a path relative to the build directory - .pkg-cache + - .venv variables: AUR_CONFIG: conf/config # Default MySQL config setup in before_script. @@ -31,6 +32,8 @@ test: before_script: - export PATH="$HOME/.poetry/bin:${PATH}" - ./docker/scripts/install-deps.sh + - virtualenv -p python3 .venv + - source .venv/bin/activate # Enable our virtualenv cache - ./docker/scripts/install-python-deps.sh - useradd -U -d /aurweb -c 'AUR User' aur - ./docker/mariadb-entrypoint.sh diff --git a/Dockerfile b/Dockerfile index 16e6514e..28bca0e4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,6 +6,7 @@ VOLUME /root/.cache/pypoetry/artifacts ENV PATH="/root/.poetry/bin:${PATH}" ENV PYTHONPATH=/aurweb ENV AUR_CONFIG=conf/config +ENV COMPOSE=1 # Install system-wide dependencies. COPY ./docker/scripts/install-deps.sh /install-deps.sh @@ -27,7 +28,7 @@ RUN cp -vf conf/config.dev conf/config RUN sed -i "s;YOUR_AUR_ROOT;/aurweb;g" conf/config # Install Python dependencies. -RUN /docker/scripts/install-python-deps.sh +RUN /docker/scripts/install-python-deps.sh compose # Compile asciidocs. RUN make -C doc diff --git a/docker/scripts/install-deps.sh b/docker/scripts/install-deps.sh index ced18c81..82496a2b 100755 --- a/docker/scripts/install-deps.sh +++ b/docker/scripts/install-deps.sh @@ -17,6 +17,7 @@ pacman -Syu --noconfirm --noprogressbar \ mariadb mariadb-libs cgit-aurweb uwsgi uwsgi-plugin-cgi \ php php-fpm memcached php-memcached python-pip pyalpm \ python-srcinfo curl libeatmydata cronie python-poetry \ - python-poetry-core step-cli step-ca asciidoc + python-poetry-core step-cli step-ca asciidoc \ + python-virtualenv exec "$@" diff --git a/docker/scripts/install-python-deps.sh b/docker/scripts/install-python-deps.sh index 3d5f28f0..01a6eaa7 100755 --- a/docker/scripts/install-python-deps.sh +++ b/docker/scripts/install-python-deps.sh @@ -4,8 +4,7 @@ set -eou pipefail # Upgrade PIP; Arch Linux's version of pip is outdated for Poetry. pip install --upgrade pip -# Install the aurweb package and deps system-wide via poetry. -poetry config virtualenvs.create false +if [ ! -z "${COMPOSE+x}" ]; then + poetry config virtualenvs.create false +fi poetry install --no-interaction --no-ansi - -exec "$@" From 929bb756a8845fea4652d1b67cae515df872e98c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 23 Aug 2022 02:32:35 -0700 Subject: [PATCH 084/415] ci(lint): add .pre-commit cache for pre-commit Signed-off-by: Kevin Morris --- .gitlab-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4d082582..23ed18f3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -5,6 +5,7 @@ cache: # For some reason Gitlab CI only supports storing cache/artifacts in a path relative to the build directory - .pkg-cache - .venv + - .pre-commit variables: AUR_CONFIG: conf/config # Default MySQL config setup in before_script. @@ -23,6 +24,7 @@ lint: script: # https://github.com/pre-commit/pre-commit/issues/2178#issuecomment-1002163763 - export SETUPTOOLS_USE_DISTUTILS=stdlib + - export XDG_CACHE_HOME=.pre-commit - pre-commit run -a test: From 8a3a7e31aca556c3a4b07f1ce717d7a0d6682f68 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Wed, 31 Aug 2022 22:01:54 -0700 Subject: [PATCH 085/415] upgrade: bump version to v6.1.1 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 4f97020c..ee14f61e 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.0.28" +AURWEB_VERSION = "v6.1.1" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 3a6dbe4d..f980ded9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.0.28" +version = "v6.1.1" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From b8a4ce4ceb085d70f7c33f7f884efb5433e65e47 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 2 Sep 2022 15:04:43 -0700 Subject: [PATCH 086/415] fix: include maint/comaint state in pkgbase post's error context Closes #386 Signed-off-by: Kevin Morris --- aurweb/routers/pkgbase.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 913e3955..076aec1e 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -587,6 +587,9 @@ async def pkgbase_disown_post( context = templates.make_context(request, "Disown Package") context["pkgbase"] = pkgbase + context["is_maint"] = request.user == pkgbase.Maintainer + context["is_comaint"] = request.user in comaints + if not confirm: context["errors"] = [ ( @@ -610,9 +613,7 @@ async def pkgbase_disown_post( request, "pkgbase/disown.html", context, status_code=HTTPStatus.BAD_REQUEST ) - if not next: - next = f"/pkgbase/{name}" - + next = next or f"/pkgbase/{name}" return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) From 6435c2b1f1f324bc717f0c12afbdc42c88e7e66b Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 2 Sep 2022 15:28:02 -0700 Subject: [PATCH 087/415] upgrade: bump to version v6.1.2 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index ee14f61e..df129c39 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.1" +AURWEB_VERSION = "v6.1.2" _parser = None diff --git a/pyproject.toml b/pyproject.toml index f980ded9..f249c80c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.1" +version = "v6.1.2" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 7fed5742b8e2267f7ce4f4a2db15087742d781e0 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 5 Sep 2022 02:33:48 -0700 Subject: [PATCH 088/415] fix: display requests for TUs which no longer have an associated User Closes #387 Signed-off-by: Kevin Morris --- aurweb/routers/requests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index c7935575..51be6d2c 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -7,7 +7,7 @@ from sqlalchemy import case from aurweb import db, defaults, time, util from aurweb.auth import creds, requires_auth from aurweb.exceptions import handle_form_exceptions -from aurweb.models import PackageRequest, User +from aurweb.models import PackageRequest from aurweb.models.package_request import PENDING_ID, REJECTED_ID from aurweb.requests.util import get_pkgreq_by_id from aurweb.scripts import notify @@ -31,8 +31,8 @@ async def requests( context["O"] = O context["PP"] = PP - # A PackageRequest query, with left inner joined User and RequestType. - query = db.query(PackageRequest).join(User, User.ID == PackageRequest.UsersID) + # A PackageRequest query + query = db.query(PackageRequest) # If the request user is not elevated (TU or Dev), then # filter PackageRequests which are owned by the request user. From a629098b9299adc67a89589ee70924ee9cf4d464 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 5 Sep 2022 02:55:20 -0700 Subject: [PATCH 089/415] fix: conditional display on Request's 'Filed by' field Since we support requests which have no associated user, we must support the case where we are displaying such a request. Signed-off-by: Kevin Morris --- templates/requests.html | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/templates/requests.html b/templates/requests.html index ff265de1..ed8f31fb 100644 --- a/templates/requests.html +++ b/templates/requests.html @@ -46,9 +46,13 @@ {{ result.Comments }} {# Filed by #} - - {{ result.User.Username }} - + {# If the record has an associated User, display a link to that user. #} + {# Otherwise, display nothing (an empty column). #} + {% if result.User %} + + {{ result.User.Username }} + + {% endif %} {% set idle_time = config_getint("options", "request_idle_time") %} {% set time_delta = (utcnow - result.RequestTS) | int %} From 83ddbd220fe7b00ef66eb5a9c8269fd1e0bf322a Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 5 Sep 2022 02:56:48 -0700 Subject: [PATCH 090/415] test: get /requests displays all requests, including those without a User Signed-off-by: Kevin Morris --- test/test_requests.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/test/test_requests.py b/test/test_requests.py index fd831674..83cdb402 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -743,6 +743,22 @@ def test_requests( assert len(rows) == 5 # There are five records left on the second page. +def test_requests_by_deleted_users( + client: TestClient, user: User, tu_user: User, pkgreq: PackageRequest +): + with db.begin(): + db.delete(user) + + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + with client as request: + resp = request.get("/requests", cookies=cookies) + assert resp.status_code == HTTPStatus.OK + + root = parse_root(resp.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 + + def test_requests_selfmade( client: TestClient, user: User, requests: list[PackageRequest] ): From 0388b12896e31bf7d4a5b0feeeb207ce6c0231dc Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 5 Sep 2022 19:25:32 -0700 Subject: [PATCH 091/415] fix: package description on /packages/{name} view ...What in the world happened here. We were literally just populating `pkg` based on `pkgbase.packages.first()`. We should have been focusing on the package passed by the context, which is always available when `show_package_details` is true. Closes #384 Signed-off-by: Kevin Morris --- templates/partials/packages/details.html | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index ca7159be..cdb62128 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -1,4 +1,3 @@ -{% set pkg = pkgbase.packages.first() %} @@ -20,13 +19,13 @@ - + - + diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index a707bbac..6e92eeff 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -304,6 +304,50 @@ def test_package(client: TestClient, package: Package): assert conflicts[0].text.strip() == ", ".join(expected) +def test_package_split_description(client: TestClient, user: User): + + with db.begin(): + pkgbase = db.create( + PackageBase, + Name="pkgbase", + Maintainer=user, + Packager=user, + ) + + pkg_a = db.create( + Package, + PackageBase=pkgbase, + Name="pkg_a", + Description="pkg_a desc", + ) + pkg_b = db.create( + Package, + PackageBase=pkgbase, + Name="pkg_b", + Description="pkg_b desc", + ) + + # Check pkg_a + with client as request: + endp = f"/packages/{pkg_a.Name}" + resp = request.get(endp) + assert resp.status_code == HTTPStatus.OK + + root = parse_root(resp.text) + row = root.xpath('//tr[@id="pkg-description"]/td')[0] + assert row.text == pkg_a.Description + + # Check pkg_b + with client as request: + endp = f"/packages/{pkg_b.Name}" + resp = request.get(endp) + assert resp.status_code == HTTPStatus.OK + + root = parse_root(resp.text) + row = root.xpath('//tr[@id="pkg-description"]/td')[0] + assert row.text == pkg_b.Description + + def paged_depends_required(client: TestClient, package: Package): maint = package.PackageBase.Maintainer new_pkgs = [] diff --git a/test/test_templates.py b/test/test_templates.py index 383f45d1..f80e68eb 100644 --- a/test/test_templates.py +++ b/test/test_templates.py @@ -293,7 +293,7 @@ def test_package_details(user: User, package: Package): "git_clone_uri_anon": GIT_CLONE_URI_ANON, "git_clone_uri_priv": GIT_CLONE_URI_PRIV, "pkgbase": package.PackageBase, - "pkg": package, + "package": package, "comaintainers": [], } ) @@ -329,7 +329,7 @@ def test_package_details_filled(user: User, package: Package): "git_clone_uri_anon": GIT_CLONE_URI_ANON, "git_clone_uri_priv": GIT_CLONE_URI_PRIV, "pkgbase": package.PackageBase, - "pkg": package, + "package": package, "comaintainers": [], "licenses": package.package_licenses, "provides": package.package_relations.filter( From 310c469ba8d7831495d6cc2e24dba7224a705d5f Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Mon, 5 Sep 2022 17:08:55 +0100 Subject: [PATCH 093/415] fix: run pre-commit checks instead of flake8 and isort Signed-off-by: Leonidas Spyropoulos --- docker/scripts/install-deps.sh | 2 +- docker/scripts/run-tests.sh | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docker/scripts/install-deps.sh b/docker/scripts/install-deps.sh index 82496a2b..85403969 100755 --- a/docker/scripts/install-deps.sh +++ b/docker/scripts/install-deps.sh @@ -18,6 +18,6 @@ pacman -Syu --noconfirm --noprogressbar \ php php-fpm memcached php-memcached python-pip pyalpm \ python-srcinfo curl libeatmydata cronie python-poetry \ python-poetry-core step-cli step-ca asciidoc \ - python-virtualenv + python-virtualenv python-pre-commit exec "$@" diff --git a/docker/scripts/run-tests.sh b/docker/scripts/run-tests.sh index a726c957..5d454ecb 100755 --- a/docker/scripts/run-tests.sh +++ b/docker/scripts/run-tests.sh @@ -21,8 +21,7 @@ rm -f /data/.coverage cp -v .coverage /data/.coverage chmod 666 /data/.coverage -# Run flake8 and isort checks. +# Run pre-commit checks for dir in aurweb test migrations; do - flake8 --count $dir - isort --check-only $dir + pre-commit run -a done From a84d115fa1715c19f66540066e021ac3d4c44a3d Mon Sep 17 00:00:00 2001 From: renovate Date: Tue, 6 Sep 2022 08:24:03 +0000 Subject: [PATCH 094/415] chore(deps): add renovate.json --- renovate.json | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..39a2b6e9 --- /dev/null +++ b/renovate.json @@ -0,0 +1,6 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:base" + ] +} From 655402a50931693b3ac376dd5dea4b0c05d893e9 Mon Sep 17 00:00:00 2001 From: renovate Date: Tue, 6 Sep 2022 10:25:02 +0000 Subject: [PATCH 095/415] chore(deps): update dependency pytest-asyncio to ^0.19.0 --- poetry.lock | 124 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 63 insertions(+), 63 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0395db3b..eddb0f95 100644 --- a/poetry.lock +++ b/poetry.lock @@ -34,9 +34,9 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] -test = ["uvloop (>=0.15)", "mock (>=4)", "uvloop (<0.15)", "contextlib2", "trustme", "pytest-mock (>=3.6.1)", "pytest (>=7.0)", "hypothesis (>=4.0)", "coverage[toml] (>=4.5)"] -doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "packaging"] [[package]] name = "asgiref" @@ -47,7 +47,7 @@ optional = false python-versions = ">=3.7" [package.extras] -tests = ["mypy (>=0.800)", "pytest-asyncio", "pytest"] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "atomicwrites" @@ -66,10 +66,10 @@ optional = false python-versions = ">=3.5" [package.extras] -tests_no_zope = ["cloudpickle", "pytest-mypy-plugins", "mypy (>=0.900,!=0.940)", "pytest (>=4.3.0)", "pympler", "hypothesis", "coverage[toml] (>=5.0.2)"] -tests = ["cloudpickle", "zope.interface", "pytest-mypy-plugins", "mypy (>=0.900,!=0.940)", "pytest (>=4.3.0)", "pympler", "hypothesis", "coverage[toml] (>=5.0.2)"] -docs = ["sphinx-notfound-page", "zope.interface", "sphinx", "furo"] -dev = ["cloudpickle", "pre-commit", "sphinx-notfound-page", "sphinx", "furo", "zope.interface", "pytest-mypy-plugins", "mypy (>=0.900,!=0.940)", "pytest (>=4.3.0)", "pympler", "hypothesis", "coverage[toml] (>=5.0.2)"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "authlib" @@ -189,11 +189,11 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "dnspython" @@ -204,12 +204,12 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -wmi = ["wmi (>=1.5.1,<2.0.0)"] -trio = ["trio (>=0.14,<0.20)"] -curio = ["sniffio (>=1.1,<2.0)", "curio (>=1.2,<2.0)"] -doh = ["requests-toolbelt (>=0.9.1,<0.10.0)", "requests (>=2.23.0,<3.0.0)", "httpx (>=0.21.1)", "h2 (>=4.1.0)"] -idna = ["idna (>=2.1,<4.0)"] +curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] dnssec = ["cryptography (>=2.6,<37.0)"] +doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] +idna = ["idna (>=2.1,<4.0)"] +trio = ["trio (>=0.14,<0.20)"] +wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "email-validator" @@ -248,8 +248,8 @@ six = ">=1.16.0,<2.0.0" sortedcontainers = ">=2.4.0,<3.0.0" [package.extras] -lua = ["lupa (>=1.13,<2.0)"] aioredis = ["aioredis (>=2.0.1,<3.0.0)"] +lua = ["lupa (>=1.13,<2.0)"] [[package]] name = "fastapi" @@ -264,10 +264,10 @@ pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1. starlette = "0.17.1" [package.extras] -test = ["types-dataclasses (==0.1.7)", "types-orjson (==3.6.0)", "types-ujson (==0.1.1)", "anyio[trio] (>=3.2.1,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "ujson (>=4.0.1,<5.0.0)", "orjson (>=3.2.1,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "peewee (>=3.13.3,<4.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "email_validator (>=1.1.1,<2.0.0)", "httpx (>=0.14.0,<0.19.0)", "requests (>=2.24.0,<3.0.0)", "isort (>=5.0.6,<6.0.0)", "black (==21.9b0)", "flake8 (>=3.8.3,<4.0.0)", "mypy (==0.910)", "pytest-cov (>=2.12.0,<4.0.0)", "pytest (>=6.2.4,<7.0.0)"] -doc = ["pyyaml (>=5.3.1,<6.0.0)", "typer-cli (>=0.0.12,<0.0.13)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mkdocs (>=1.1.2,<2.0.0)"] -dev = ["uvicorn[standard] (>=0.12.0,<0.16.0)", "flake8 (>=3.8.3,<4.0.0)", "autoflake (>=1.4.0,<2.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)"] -all = ["uvicorn[standard] (>=0.12.0,<0.16.0)", "email_validator (>=1.1.1,<2.0.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,<5.0.0)", "pyyaml (>=5.3.1,<6.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "jinja2 (>=2.11.2,<4.0.0)", "requests (>=2.24.0,<3.0.0)"] +all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,<5.0.0)", "uvicorn[standard] (>=0.12.0,<0.16.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.16.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<6.0.0)", "typer-cli (>=0.0.12,<0.0.13)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==21.9b0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.1.7)", "types-orjson (==3.6.0)", "types-ujson (==0.1.1)", "ujson (>=4.0.1,<5.0.0)"] [[package]] name = "feedgen" @@ -290,8 +290,8 @@ optional = false python-versions = ">=3.7" [package.extras] -testing = ["pytest-timeout (>=2.1)", "pytest-cov (>=3)", "pytest (>=7.1.2)", "coverage (>=6.4.2)", "covdefaults (>=2.2)"] -docs = ["sphinx-autodoc-typehints (>=1.19.1)", "sphinx (>=5.1.1)", "furo (>=2022.6.21)"] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] [[package]] name = "greenlet" @@ -378,9 +378,9 @@ rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] -http2 = ["h2 (>=3,<5)"] -cli = ["pygments (>=2.0.0,<3.0.0)", "rich (>=10.0.0,<11.0.0)", "click (>=8.0.0,<9.0.0)"] brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10.0.0,<11.0.0)"] +http2 = ["h2 (>=3,<5)"] [[package]] name = "hypercorn" @@ -398,10 +398,10 @@ toml = "*" wsproto = ">=0.14.0" [package.extras] -uvloop = ["uvloop"] -trio = ["trio (>=0.11.0)"] -tests = ["trio", "pytest-trio", "pytest-cov", "pytest-asyncio", "pytest", "mock", "hypothesis"] h3 = ["aioquic (>=0.9.0,<1.0)"] +tests = ["hypothesis", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-trio", "trio"] +trio = ["trio (>=0.11.0)"] +uvloop = ["uvloop"] [[package]] name = "hyperframe" @@ -431,9 +431,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -testing = ["importlib-resources (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "pytest-perf (>=0.9.2)", "flufl.flake8", "pyfakefs", "packaging", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -docs = ["rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -474,10 +474,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" [package.extras] -source = ["Cython (>=0.29.7)"] -htmlsoup = ["beautifulsoup4"] -html5 = ["html5lib"] cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["beautifulsoup4"] +source = ["Cython (>=0.29.7)"] [[package]] name = "mako" @@ -507,7 +507,7 @@ python-versions = ">=3.7" importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] -testing = ["pyyaml", "coverage"] +testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" @@ -569,8 +569,8 @@ optional = false python-versions = ">=3.6" [package.extras] -testing = ["pytest-benchmark", "pytest"] -dev = ["tox", "pre-commit"] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "posix-ipc" @@ -655,8 +655,8 @@ python-versions = ">=3.6.1" typing-extensions = ">=3.7.4.3" [package.extras] -email = ["email-validator (>=1.0.3)"] dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] [[package]] name = "pygit2" @@ -699,21 +699,21 @@ py = ">=1.8.2" toml = "*" [package.extras] -testing = ["xmlschema", "requests", "nose", "mock", "hypothesis (>=3.56)", "argcomplete"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.16.0" -description = "Pytest support for asyncio." +version = "0.19.0" +description = "Pytest support for asyncio" category = "dev" optional = false -python-versions = ">= 3.6" +python-versions = ">=3.7" [package.dependencies] -pytest = ">=5.4.0" +pytest = ">=6.1.0" [package.extras] -testing = ["hypothesis (>=5.7.1)", "coverage"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] name = "pytest-cov" @@ -728,7 +728,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-forked" @@ -768,9 +768,9 @@ pytest = ">=6.2.0" pytest-forked = "*" [package.extras] -testing = ["filelock"] -setproctitle = ["setproctitle"] psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] [[package]] name = "python-dateutil" @@ -820,8 +820,8 @@ idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rfc3986" @@ -873,24 +873,24 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] -aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] -aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] mssql_pymssql = ["pymssql"] mssql_pyodbc = ["pyodbc"] -mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql_connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] +postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql (<1)", "pymysql"] +pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3-binary"] [[package]] @@ -916,7 +916,7 @@ python-versions = ">=3.6" anyio = ">=3.0.0,<4" [package.extras] -full = ["requests", "pyyaml", "python-multipart", "jinja2", "itsdangerous"] +full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] [[package]] name = "tap.py" @@ -962,9 +962,9 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -secure = ["ipaddress", "certifi", "idna (>=2.0.0)", "cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"] -brotli = ["brotlipy (>=0.6.0)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] [[package]] name = "uvicorn" @@ -980,7 +980,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "PyYAML (>=5.1)", "python-dotenv (>=0.13)", "watchgod (>=0.6)", "httptools (>=0.2.0,<0.3.0)", "websockets (>=9.1)"] +standard = ["PyYAML (>=5.1)", "colorama (>=0.4)", "httptools (>=0.2.0,<0.3.0)", "python-dotenv (>=0.13)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchgod (>=0.6)", "websockets (>=9.1)"] [[package]] name = "webencodings" @@ -1024,13 +1024,13 @@ optional = false python-versions = ">=3.7" [package.extras] -testing = ["pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "func-timeout", "jaraco.itertools", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] -docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "7630feca99b54b3d08fd947d5c5857590ca8af8b6c3a9f0bed7eecf03385597e" +content-hash = "5326e59079df0c0520a8654e8e92e936a50df127e2e5eb6c81f465e0a3dfd339" [metadata.files] aiofiles = [ @@ -1698,8 +1698,8 @@ pytest = [ {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, - {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, + {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, + {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, ] pytest-cov = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, diff --git a/pyproject.toml b/pyproject.toml index f249c80c..283b8101 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,7 @@ srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] coverage = "^6.0.2" pytest = "^6.2.5" -pytest-asyncio = "^0.16.0" +pytest-asyncio = "^0.19.0" pytest-cov = "^3.0.0" pytest-tap = "^3.2" From b38e765dfe552d68a9fdcf14116e06efcc3b4b61 Mon Sep 17 00:00:00 2001 From: renovate Date: Tue, 6 Sep 2022 22:24:52 +0000 Subject: [PATCH 096/415] fix(deps): update dependency aiofiles to ^0.8.0 --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index eddb0f95..61782e65 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,6 +1,6 @@ [[package]] name = "aiofiles" -version = "0.7.0" +version = "0.8.0" description = "File support for asyncio." category = "main" optional = false @@ -1030,12 +1030,12 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "5326e59079df0c0520a8654e8e92e936a50df127e2e5eb6c81f465e0a3dfd339" +content-hash = "cf2d693b3a53f8c1d47b46c9787d710cb39ffdba5c3285e7d5cd0c02ec191154" [metadata.files] aiofiles = [ - {file = "aiofiles-0.7.0-py3-none-any.whl", hash = "sha256:c67a6823b5f23fcab0a2595a289cec7d8c863ffcb4322fb8cd6b90400aedfdbc"}, - {file = "aiofiles-0.7.0.tar.gz", hash = "sha256:a1c4fc9b2ff81568c83e21392a82f344ea9d23da906e4f6a52662764545e19d4"}, + {file = "aiofiles-0.8.0-py3-none-any.whl", hash = "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937"}, + {file = "aiofiles-0.8.0.tar.gz", hash = "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59"}, ] alembic = [ {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, diff --git a/pyproject.toml b/pyproject.toml index 283b8101..a1112d35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ python = ">=3.9,<3.11" # based on git tags. # General -aiofiles = "^0.7.0" +aiofiles = "^0.8.0" asgiref = "^3.4.1" bcrypt = "^3.2.0" bleach = "^4.1.0" From cdc7bd618c8ce06b52da87d2a6efe81a3dcb896e Mon Sep 17 00:00:00 2001 From: renovate Date: Tue, 6 Sep 2022 23:24:49 +0000 Subject: [PATCH 097/415] fix(deps): update dependency email-validator to v1.2.1 --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 61782e65..691ae494 100644 --- a/poetry.lock +++ b/poetry.lock @@ -213,8 +213,8 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "email-validator" -version = "1.1.3" -description = "A robust email syntax and deliverability validation library for Python 2.x/3.x." +version = "1.2.1" +description = "A robust email syntax and deliverability validation library." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" @@ -1030,7 +1030,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "cf2d693b3a53f8c1d47b46c9787d710cb39ffdba5c3285e7d5cd0c02ec191154" +content-hash = "5c5c0ec98e190669e257f4d717162794dc5841dca9168d7a941f5e72ea85f03f" [metadata.files] aiofiles = [ @@ -1240,8 +1240,8 @@ dnspython = [ {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] email-validator = [ - {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, - {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, + {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"}, + {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"}, ] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, diff --git a/pyproject.toml b/pyproject.toml index a1112d35..27369c45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ aiofiles = "^0.8.0" asgiref = "^3.4.1" bcrypt = "^3.2.0" bleach = "^4.1.0" -email-validator = "1.1.3" +email-validator = "1.2.1" fakeredis = "^1.6.1" feedgen = "^0.9.0" httpx = "^0.20.0" From a981ae4052fd064e0ea23fb91fcd8a0d16f36c58 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 00:25:32 +0000 Subject: [PATCH 098/415] fix(deps): update dependency httpx to ^0.23.0 --- poetry.lock | 26 ++++++++++++++------------ pyproject.toml | 2 +- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index 691ae494..1a1a8c62 100644 --- a/poetry.lock +++ b/poetry.lock @@ -348,39 +348,41 @@ python-versions = ">=3.6.1" [[package]] name = "httpcore" -version = "0.13.7" +version = "0.15.0" description = "A minimal low-level HTTP client." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] anyio = ">=3.0.0,<4.0.0" +certifi = "*" h11 = ">=0.11,<0.13" sniffio = ">=1.0.0,<2.0.0" [package.extras] http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" -version = "0.20.0" +version = "0.23.0" description = "The next generation HTTP client." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] certifi = "*" -charset-normalizer = "*" -httpcore = ">=0.13.3,<0.14.0" +httpcore = ">=0.15.0,<0.16.0" rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10.0.0,<11.0.0)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "hypercorn" @@ -1030,7 +1032,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "5c5c0ec98e190669e257f4d717162794dc5841dca9168d7a941f5e72ea85f03f" +content-hash = "879b45a5c84c40462afe971096ff654f7ef6981bbdcea5d5e6107e7f68355802" [metadata.files] aiofiles = [ @@ -1336,12 +1338,12 @@ hpack = [ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, ] httpcore = [ - {file = "httpcore-0.13.7-py3-none-any.whl", hash = "sha256:369aa481b014cf046f7067fddd67d00560f2f00426e79569d99cb11245134af0"}, - {file = "httpcore-0.13.7.tar.gz", hash = "sha256:036f960468759e633574d7c121afba48af6419615d36ab8ede979f1ad6276fa3"}, + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, ] httpx = [ - {file = "httpx-0.20.0-py3-none-any.whl", hash = "sha256:33af5aad9bdc82ef1fc89219c1e36f5693bf9cd0ebe330884df563445682c0f8"}, - {file = "httpx-0.20.0.tar.gz", hash = "sha256:09606d630f070d07f9ff28104fbcea429ea0014c1e89ac90b4d8de8286c40e7b"}, + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, ] hypercorn = [ {file = "Hypercorn-0.11.2-py3-none-any.whl", hash = "sha256:8007c10f81566920f8ae12c0e26e146f94ca70506da964b5a727ad610aa1d821"}, diff --git a/pyproject.toml b/pyproject.toml index 27369c45..e0c7ba00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ bleach = "^4.1.0" email-validator = "1.2.1" fakeredis = "^1.6.1" feedgen = "^0.9.0" -httpx = "^0.20.0" +httpx = "^0.23.0" itsdangerous = "^2.0.1" lxml = "^4.6.3" orjson = "^3.6.4" From a73af3e76d3fd4a89cba2cf23ee91f431ad2a990 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 01:25:03 +0000 Subject: [PATCH 099/415] fix(deps): update dependency hypercorn to ^0.14.0 --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1a1a8c62..801d9e95 100644 --- a/poetry.lock +++ b/poetry.lock @@ -386,8 +386,8 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "hypercorn" -version = "0.11.2" -description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn." +version = "0.14.3" +description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn" category = "main" optional = false python-versions = ">=3.7" @@ -400,8 +400,8 @@ toml = "*" wsproto = ">=0.14.0" [package.extras] +docs = ["pydata-sphinx-theme"] h3 = ["aioquic (>=0.9.0,<1.0)"] -tests = ["hypothesis", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-trio", "trio"] trio = ["trio (>=0.11.0)"] uvloop = ["uvloop"] @@ -1032,7 +1032,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "879b45a5c84c40462afe971096ff654f7ef6981bbdcea5d5e6107e7f68355802" +content-hash = "0afd4b5faa1d291565d5a1a90d6d916ffc37537913d4037660a99c86bb3b3ed1" [metadata.files] aiofiles = [ @@ -1346,8 +1346,8 @@ httpx = [ {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, ] hypercorn = [ - {file = "Hypercorn-0.11.2-py3-none-any.whl", hash = "sha256:8007c10f81566920f8ae12c0e26e146f94ca70506da964b5a727ad610aa1d821"}, - {file = "Hypercorn-0.11.2.tar.gz", hash = "sha256:5ba1e719c521080abd698ff5781a2331e34ef50fc1c89a50960538115a896a9a"}, + {file = "Hypercorn-0.14.3-py3-none-any.whl", hash = "sha256:7c491d5184f28ee960dcdc14ab45d14633ca79d72ddd13cf4fcb4cb854d679ab"}, + {file = "Hypercorn-0.14.3.tar.gz", hash = "sha256:4a87a0b7bbe9dc75fab06dbe4b301b9b90416e9866c23a377df21a969d6ab8dd"}, ] hyperframe = [ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, diff --git a/pyproject.toml b/pyproject.toml index e0c7ba00..e6cb2c83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,7 @@ SQLAlchemy = "^1.4.26" # ASGI uvicorn = "^0.15.0" gunicorn = "^20.1.0" -Hypercorn = "^0.11.2" +Hypercorn = "^0.14.0" prometheus-fastapi-instrumentator = "^5.7.1" pytest-xdist = "^2.4.0" filelock = "^3.3.2" From bb310bdf65add1559c8b459b1be71cf70864dbb1 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 02:24:55 +0000 Subject: [PATCH 100/415] fix(deps): update dependency uvicorn to ^0.18.0 --- poetry.lock | 13 ++++++------- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 801d9e95..4d51292d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -970,19 +970,18 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" -version = "0.15.0" +version = "0.18.3" description = "The lightning-fast ASGI server." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] -asgiref = ">=3.4.0" click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["PyYAML (>=5.1)", "colorama (>=0.4)", "httptools (>=0.2.0,<0.3.0)", "python-dotenv (>=0.13)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchgod (>=0.6)", "websockets (>=9.1)"] +standard = ["colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] [[package]] name = "webencodings" @@ -1032,7 +1031,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "0afd4b5faa1d291565d5a1a90d6d916ffc37537913d4037660a99c86bb3b3ed1" +content-hash = "17c8d99957aa94e4b9b0a8fa14098122d402ef52da860c13049a690e5dd18792" [metadata.files] aiofiles = [ @@ -1817,8 +1816,8 @@ urllib3 = [ {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, ] uvicorn = [ - {file = "uvicorn-0.15.0-py3-none-any.whl", hash = "sha256:17f898c64c71a2640514d4089da2689e5db1ce5d4086c2d53699bf99513421c1"}, - {file = "uvicorn-0.15.0.tar.gz", hash = "sha256:d9a3c0dd1ca86728d3e235182683b4cf94cd53a867c288eaeca80ee781b2caff"}, + {file = "uvicorn-0.18.3-py3-none-any.whl", hash = "sha256:0abd429ebb41e604ed8d2be6c60530de3408f250e8d2d84967d85ba9e86fe3af"}, + {file = "uvicorn-0.18.3.tar.gz", hash = "sha256:9a66e7c42a2a95222f76ec24a4b754c158261c4696e683b9dadc72b590e0311b"}, ] webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, diff --git a/pyproject.toml b/pyproject.toml index e6cb2c83..4122241d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,7 +85,7 @@ Werkzeug = "^2.0.2" SQLAlchemy = "^1.4.26" # ASGI -uvicorn = "^0.15.0" +uvicorn = "^0.18.0" gunicorn = "^20.1.0" Hypercorn = "^0.14.0" prometheus-fastapi-instrumentator = "^5.7.1" From a39f34d695ae8f191957b38d43b2d04a7aaf1c38 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 03:25:30 +0000 Subject: [PATCH 101/415] chore(deps): update dependency pytest to v7 --- poetry.lock | 28 ++++++++-------------------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 21 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4d51292d..bea03647 100644 --- a/poetry.lock +++ b/poetry.lock @@ -49,14 +49,6 @@ python-versions = ">=3.7" [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "attrs" version = "22.1.0" @@ -684,24 +676,23 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "6.2.5" +version = "7.1.3" description = "pytest: simple powerful testing with Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" py = ">=1.8.2" -toml = "*" +tomli = ">=1.0.0" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -943,7 +934,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1031,7 +1022,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "17c8d99957aa94e4b9b0a8fa14098122d402ef52da860c13049a690e5dd18792" +content-hash = "f6a259093ff2796b5a3f579fcd00cc1c6a841d769abd9c898a91a8a6a2eec76f" [metadata.files] aiofiles = [ @@ -1050,9 +1041,6 @@ asgiref = [ {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, ] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] attrs = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, @@ -1695,8 +1683,8 @@ pyparsing = [ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, diff --git a/pyproject.toml b/pyproject.toml index 4122241d..4c0df93c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,7 +98,7 @@ srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] coverage = "^6.0.2" -pytest = "^6.2.5" +pytest = "^7.0.0" pytest-asyncio = "^0.19.0" pytest-cov = "^3.0.0" pytest-tap = "^3.2" From 486f8bd61c458b44232a4bb7c07d08e0e15b86f8 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 04:24:53 +0000 Subject: [PATCH 102/415] fix(deps): update dependency aiofiles to v22 --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index bea03647..a12b6aa4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,10 @@ [[package]] name = "aiofiles" -version = "0.8.0" +version = "22.1.0" description = "File support for asyncio." category = "main" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.7,<4.0" [[package]] name = "alembic" @@ -1022,12 +1022,12 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "f6a259093ff2796b5a3f579fcd00cc1c6a841d769abd9c898a91a8a6a2eec76f" +content-hash = "888f848aad23900dcff3c089e13b88547605ef760dc3714a9872a89346e150e2" [metadata.files] aiofiles = [ - {file = "aiofiles-0.8.0-py3-none-any.whl", hash = "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937"}, - {file = "aiofiles-0.8.0.tar.gz", hash = "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59"}, + {file = "aiofiles-22.1.0-py3-none-any.whl", hash = "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad"}, + {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, ] alembic = [ {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, diff --git a/pyproject.toml b/pyproject.toml index 4c0df93c..78d7e73a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ python = ">=3.9,<3.11" # based on git tags. # General -aiofiles = "^0.8.0" +aiofiles = "^22.0.0" asgiref = "^3.4.1" bcrypt = "^3.2.0" bleach = "^4.1.0" From 6ab9663b7684dd25bedd5b2ee75e774ddf440fe0 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 06:25:25 +0000 Subject: [PATCH 103/415] fix(deps): update dependency authlib to v1 --- poetry.lock | 15 ++++++--------- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/poetry.lock b/poetry.lock index a12b6aa4..69721ec2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -65,17 +65,14 @@ tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy [[package]] name = "authlib" -version = "0.15.5" -description = "The ultimate Python library in building OAuth and OpenID Connect servers." +version = "1.0.1" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." category = "main" optional = false python-versions = "*" [package.dependencies] -cryptography = "*" - -[package.extras] -client = ["requests"] +cryptography = ">=3.2" [[package]] name = "bcrypt" @@ -1022,7 +1019,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "888f848aad23900dcff3c089e13b88547605ef760dc3714a9872a89346e150e2" +content-hash = "c2412181a05b96ad1daab6e9bddff8e1d4ce2b0b7671536ccccd69c66924c27d" [metadata.files] aiofiles = [ @@ -1046,8 +1043,8 @@ attrs = [ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] authlib = [ - {file = "Authlib-0.15.5-py2.py3-none-any.whl", hash = "sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf"}, - {file = "Authlib-0.15.5.tar.gz", hash = "sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252"}, + {file = "Authlib-1.0.1-py2.py3-none-any.whl", hash = "sha256:1286e2d5ef5bfe5a11cc2d0a0d1031f0393f6ce4d61f5121cfe87fa0054e98bd"}, + {file = "Authlib-1.0.1.tar.gz", hash = "sha256:6e74a4846ac36dfc882b3cc2fbd3d9eb410a627f2f2dc11771276655345223b1"}, ] bcrypt = [ {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, diff --git a/pyproject.toml b/pyproject.toml index 78d7e73a..14182ed2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ paginate = "^0.5.6" # SQL alembic = "^1.7.4" mysqlclient = "^2.0.3" -Authlib = "^0.15.5" +Authlib = "^1.0.0" Jinja2 = "^3.0.2" Markdown = "^3.3.6" Werkzeug = "^2.0.2" From 7ad22d81433bc9507738e8b52f68fd1ba9c0a4b6 Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 7 Sep 2022 14:24:55 +0000 Subject: [PATCH 104/415] fix(deps): update dependency bcrypt to v4 --- poetry.lock | 30 ++++++++++++++---------------- pyproject.toml | 2 +- 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 69721ec2..80104bee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -76,15 +76,12 @@ cryptography = ">=3.2" [[package]] name = "bcrypt" -version = "3.2.2" +version = "4.0.0" description = "Modern password hashing for your software and your servers" category = "main" optional = false python-versions = ">=3.6" -[package.dependencies] -cffi = ">=1.1" - [package.extras] tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] @@ -1019,7 +1016,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "c2412181a05b96ad1daab6e9bddff8e1d4ce2b0b7671536ccccd69c66924c27d" +content-hash = "38f6da4f493e57dbbfa462388d4b549fb54e7fd9481dc114602210e846770a9f" [metadata.files] aiofiles = [ @@ -1047,17 +1044,18 @@ authlib = [ {file = "Authlib-1.0.1.tar.gz", hash = "sha256:6e74a4846ac36dfc882b3cc2fbd3d9eb410a627f2f2dc11771276655345223b1"}, ] bcrypt = [ - {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, - {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, - {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, - {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, + {file = "bcrypt-4.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:845b1daf4df2dd94d2fdbc9454953ca9dd0e12970a0bfc9f3dcc6faea3fa96e4"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8780e69f9deec9d60f947b169507d2c9816e4f11548f1f7ebee2af38b9b22ae4"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3334446fac200499e8bc04a530ce3cf0b3d7151e0e4ac5c0dddd3d95e97843"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb67f6a6c72dfb0a02f3df51550aa1862708e55128b22543e2b42c74f3620d7"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:7c7dd6c1f05bf89e65261d97ac3a6520f34c2acb369afb57e3ea4449be6ff8fd"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:594780b364fb45f2634c46ec8d3e61c1c0f1811c4f2da60e8eb15594ecbf93ed"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2d0dd19aad87e4ab882ef1d12df505f4c52b28b69666ce83c528f42c07379227"}, + {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bf413f2a9b0a2950fc750998899013f2e718d20fa4a58b85ca50b6df5ed1bbf9"}, + {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ede0f506554571c8eda80db22b83c139303ec6b595b8f60c4c8157bdd0bdee36"}, + {file = "bcrypt-4.0.0-cp36-abi3-win32.whl", hash = "sha256:dc6ec3dc19b1c193b2f7cf279d3e32e7caf447532fbcb7af0906fe4398900c33"}, + {file = "bcrypt-4.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:0b0f0c7141622a31e9734b7f649451147c04ebb5122327ac0bd23744df84be90"}, + {file = "bcrypt-4.0.0.tar.gz", hash = "sha256:c59c170fc9225faad04dde1ba61d85b413946e8ce2e5f5f5ff30dfd67283f319"}, ] bleach = [ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, diff --git a/pyproject.toml b/pyproject.toml index 14182ed2..52629cfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,7 +59,7 @@ python = ">=3.9,<3.11" # General aiofiles = "^22.0.0" asgiref = "^3.4.1" -bcrypt = "^3.2.0" +bcrypt = "^4.0.0" bleach = "^4.1.0" email-validator = "1.2.1" fakeredis = "^1.6.1" From 3de17311cfb92755f4b91e34dcf5e43f66652ea4 Mon Sep 17 00:00:00 2001 From: renovate Date: Sat, 10 Sep 2022 00:25:02 +0000 Subject: [PATCH 105/415] fix(deps): update dependency bleach to v5 --- poetry.lock | 15 +++++++++------ pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 80104bee..f13e1df2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -88,17 +88,20 @@ typecheck = ["mypy"] [[package]] name = "bleach" -version = "4.1.0" +version = "5.0.1" description = "An easy safelist-based HTML-sanitizing tool." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -packaging = "*" six = ">=1.9.0" webencodings = "*" +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.2)"] +dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] + [[package]] name = "certifi" version = "2022.6.15" @@ -1016,7 +1019,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "38f6da4f493e57dbbfa462388d4b549fb54e7fd9481dc114602210e846770a9f" +content-hash = "ac45bb4ee013a8f79016947fe222a3158ffe716008349a81086e2dbeac6b914c" [metadata.files] aiofiles = [ @@ -1058,8 +1061,8 @@ bcrypt = [ {file = "bcrypt-4.0.0.tar.gz", hash = "sha256:c59c170fc9225faad04dde1ba61d85b413946e8ce2e5f5f5ff30dfd67283f319"}, ] bleach = [ - {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, - {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, + {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, + {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, ] certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, diff --git a/pyproject.toml b/pyproject.toml index 52629cfb..704e581a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ python = ">=3.9,<3.11" aiofiles = "^22.0.0" asgiref = "^3.4.1" bcrypt = "^4.0.0" -bleach = "^4.1.0" +bleach = "^5.0.0" email-validator = "1.2.1" fakeredis = "^1.6.1" feedgen = "^0.9.0" From 307d944cf1aebae5474695932a5530716794e1f7 Mon Sep 17 00:00:00 2001 From: renovate Date: Sat, 10 Sep 2022 03:25:08 +0000 Subject: [PATCH 106/415] fix(deps): update dependency protobuf to v4 --- poetry.lock | 44 +++++++++++++++++--------------------------- pyproject.toml | 2 +- 2 files changed, 18 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index f13e1df2..b2342cb4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -604,8 +604,8 @@ prometheus-client = ">=0.8.0,<1.0.0" [[package]] name = "protobuf" -version = "3.20.1" -description = "Protocol Buffers" +version = "4.21.5" +description = "" category = "main" optional = false python-versions = ">=3.7" @@ -1019,7 +1019,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "ac45bb4ee013a8f79016947fe222a3158ffe716008349a81086e2dbeac6b914c" +content-hash = "478ba8d01d46e13dd56df2b19835750dda11e9a8bfe46ee8e7e22cb4579cf7b5" [metadata.files] aiofiles = [ @@ -1568,30 +1568,20 @@ prometheus-fastapi-instrumentator = [ {file = "prometheus_fastapi_instrumentator-5.8.2-py3-none-any.whl", hash = "sha256:5bfec239a924e1fed4ba94eb0addc73422d11821e894200b6d0e36a61c966827"}, ] protobuf = [ - {file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"}, - {file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"}, - {file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"}, - {file = "protobuf-3.20.1-cp310-cp310-win32.whl", hash = "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c"}, - {file = "protobuf-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7"}, - {file = "protobuf-3.20.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153"}, - {file = "protobuf-3.20.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f"}, - {file = "protobuf-3.20.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20"}, - {file = "protobuf-3.20.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531"}, - {file = "protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e"}, - {file = "protobuf-3.20.1-cp37-cp37m-win32.whl", hash = "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c"}, - {file = "protobuf-3.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067"}, - {file = "protobuf-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf"}, - {file = "protobuf-3.20.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab"}, - {file = "protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c"}, - {file = "protobuf-3.20.1-cp38-cp38-win32.whl", hash = "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7"}, - {file = "protobuf-3.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739"}, - {file = "protobuf-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7"}, - {file = "protobuf-3.20.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f"}, - {file = "protobuf-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9"}, - {file = "protobuf-3.20.1-cp39-cp39-win32.whl", hash = "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8"}, - {file = "protobuf-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91"}, - {file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"}, - {file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"}, + {file = "protobuf-4.21.5-cp310-abi3-win32.whl", hash = "sha256:5310cbe761e87f0c1decce019d23f2101521d4dfff46034f8a12a53546036ec7"}, + {file = "protobuf-4.21.5-cp310-abi3-win_amd64.whl", hash = "sha256:e5c5a2886ae48d22a9d32fbb9b6636a089af3cd26b706750258ce1ca96cc0116"}, + {file = "protobuf-4.21.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ee04f5823ed98bb9a8c3b1dc503c49515e0172650875c3f76e225b223793a1f2"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b04484d6f42f48c57dd2737a72692f4c6987529cdd148fb5b8e5f616862a2e37"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e0b272217aad8971763960238c1a1e6a65d50ef7824e23300da97569a251c55"}, + {file = "protobuf-4.21.5-cp37-cp37m-win32.whl", hash = "sha256:5eb0724615e90075f1d763983e708e1cef08e66b1891d8b8b6c33bc3b2f1a02b"}, + {file = "protobuf-4.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:011c0f267e85f5d73750b6c25f0155d5db1e9443cd3590ab669a6221dd8fcdb0"}, + {file = "protobuf-4.21.5-cp38-cp38-win32.whl", hash = "sha256:7b6f22463e2d1053d03058b7b4ceca6e4ed4c14f8c286c32824df751137bf8e7"}, + {file = "protobuf-4.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:b52e7a522911a40445a5f588bd5b5e584291bfc5545e09b7060685e4b2ff814f"}, + {file = "protobuf-4.21.5-cp39-cp39-win32.whl", hash = "sha256:a7faa62b183d6a928e3daffd06af843b4287d16ef6e40f331575ecd236a7974d"}, + {file = "protobuf-4.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:5e0ce02418ef03d7657a420ae8fd6fec4995ac713a3cb09164e95f694dbcf085"}, + {file = "protobuf-4.21.5-py2.py3-none-any.whl", hash = "sha256:bf711b451212dc5b0fa45ae7dada07d8e71a4b0ff0bc8e4783ee145f47ac4f82"}, + {file = "protobuf-4.21.5-py3-none-any.whl", hash = "sha256:3ec6f5b37935406bb9df9b277e79f8ed81d697146e07ef2ba8a5a272fb24b2c9"}, + {file = "protobuf-4.21.5.tar.gz", hash = "sha256:eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, diff --git a/pyproject.toml b/pyproject.toml index 704e581a..b44291d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ httpx = "^0.23.0" itsdangerous = "^2.0.1" lxml = "^4.6.3" orjson = "^3.6.4" -protobuf = "^3.19.0" +protobuf = "^4.0.0" pygit2 = "^1.7.0" python-multipart = "^0.0.5" redis = "^3.5.3" From 69d67247498123bb0b14b731b3f662935c1867a6 Mon Sep 17 00:00:00 2001 From: renovate Date: Sat, 10 Sep 2022 05:25:06 +0000 Subject: [PATCH 107/415] fix(deps): update dependency redis to v4 --- poetry.lock | 124 ++++++++++++++++++++++++++++++++++++++++++++++--- pyproject.toml | 2 +- 2 files changed, 118 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index b2342cb4..ef0fc1f7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -49,6 +49,14 @@ python-versions = ">=3.7" [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "attrs" version = "22.1.0" @@ -184,6 +192,20 @@ sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +[[package]] +name = "deprecated" +version = "1.2.13" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] + [[package]] name = "dnspython" version = "2.2.1" @@ -786,14 +808,20 @@ six = ">=1.4.0" [[package]] name = "redis" -version = "3.5.3" -description = "Python client for Redis key-value store" +version = "4.3.4" +description = "Python client for Redis database and key-value store" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" + +[package.dependencies] +async-timeout = ">=4.0.2" +deprecated = ">=1.2.3" +packaging = ">=20.4" [package.extras] -hiredis = ["hiredis (>=0.1.3)"] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] [[package]] name = "requests" @@ -993,6 +1021,14 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog"] +[[package]] +name = "wrapt" +version = "1.14.1" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + [[package]] name = "wsproto" version = "1.1.0" @@ -1019,7 +1055,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "478ba8d01d46e13dd56df2b19835750dda11e9a8bfe46ee8e7e22cb4579cf7b5" +content-hash = "e084bad4236ac74fb90fcf4537c78c228b2de606e83c63ac2557a677d681e743" [metadata.files] aiofiles = [ @@ -1038,6 +1074,10 @@ asgiref = [ {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, ] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] attrs = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, @@ -1222,6 +1262,10 @@ cryptography = [ {file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"}, {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, ] +deprecated = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, @@ -1702,8 +1746,8 @@ python-multipart = [ {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"}, ] redis = [ - {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, - {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, + {file = "redis-4.3.4-py3-none-any.whl", hash = "sha256:a52d5694c9eb4292770084fa8c863f79367ca19884b329ab574d5cb2036b3e54"}, + {file = "redis-4.3.4.tar.gz", hash = "sha256:ddf27071df4adf3821c4f2ca59d67525c3a82e5f268bed97b813cb4fabf87880"}, ] requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, @@ -1803,6 +1847,72 @@ werkzeug = [ {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, ] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] wsproto = [ {file = "wsproto-1.1.0-py3-none-any.whl", hash = "sha256:2218cb57952d90b9fca325c0dcfb08c3bda93e8fd8070b0a17f048e2e47a521b"}, {file = "wsproto-1.1.0.tar.gz", hash = "sha256:a2e56bfd5c7cd83c1369d83b5feccd6d37798b74872866e62616e0ecf111bda8"}, diff --git a/pyproject.toml b/pyproject.toml index b44291d7..8f9624dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,7 @@ orjson = "^3.6.4" protobuf = "^4.0.0" pygit2 = "^1.7.0" python-multipart = "^0.0.5" -redis = "^3.5.3" +redis = "^4.0.0" requests = "^2.28.1" paginate = "^0.5.6" From a2d08e441ed0e769a5ec44312eba996bcd7f227c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 17:59:45 -0700 Subject: [PATCH 108/415] fix(docker): run `pre-commit run -a` once Signed-off-by: Kevin Morris --- docker/scripts/run-tests.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docker/scripts/run-tests.sh b/docker/scripts/run-tests.sh index 5d454ecb..75e562b0 100755 --- a/docker/scripts/run-tests.sh +++ b/docker/scripts/run-tests.sh @@ -22,6 +22,4 @@ cp -v .coverage /data/.coverage chmod 666 /data/.coverage # Run pre-commit checks -for dir in aurweb test migrations; do - pre-commit run -a -done +pre-commit run -a From 03776c4663dda25195b262d483fd46b2a08dc5b9 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 18:00:11 -0700 Subject: [PATCH 109/415] fix(docker): cache & install pre-commit deps during image build Signed-off-by: Kevin Morris --- Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Dockerfile b/Dockerfile index 28bca0e4..1f667611 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,7 @@ FROM archlinux:base-devel VOLUME /root/.cache/pypoetry/cache VOLUME /root/.cache/pypoetry/artifacts +VOLUME /root/.cache/pre-commit ENV PATH="/root/.poetry/bin:${PATH}" ENV PYTHONPATH=/aurweb @@ -41,3 +42,6 @@ RUN ln -sf /usr/share/zoneinfo/UTC /etc/localtime # Install translations. RUN make -C po all install + +# Install pre-commit repositories and run lint check. +RUN pre-commit run -a From b3853e01b82372bc0ceb41a9352e5a54a6190dda Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 18:07:54 -0700 Subject: [PATCH 110/415] fix(pre-commit): include migrations in fixes/checks We want all python files related to the project to be checked, really. Some of which are still included, but migrations are a core part of FastAPI aurweb and should be included. Signed-off-by: Kevin Morris --- .pre-commit-config.yaml | 2 - ...2ce8e2ffa_utf8mb4_charset_and_collation.py | 52 +++++++++---------- .../be7adae47ac3_upgrade_voteinfo_integers.py | 6 +-- .../d64e5571bc8d_fix_pkgvote_votets.py | 7 ++- ...6e1cd_add_sso_account_id_in_table_users.py | 21 ++++---- .../versions/f47cad5d6d03_initial_revision.py | 2 +- 6 files changed, 44 insertions(+), 46 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1480d2b8..09659269 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,3 @@ -exclude: ^migrations/versions - repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 diff --git a/migrations/versions/56e2ce8e2ffa_utf8mb4_charset_and_collation.py b/migrations/versions/56e2ce8e2ffa_utf8mb4_charset_and_collation.py index c3b79dab..5a9d5f39 100644 --- a/migrations/versions/56e2ce8e2ffa_utf8mb4_charset_and_collation.py +++ b/migrations/versions/56e2ce8e2ffa_utf8mb4_charset_and_collation.py @@ -10,41 +10,41 @@ from alembic import op import aurweb.config # revision identifiers, used by Alembic. -revision = '56e2ce8e2ffa' -down_revision = 'ef39fcd6e1cd' +revision = "56e2ce8e2ffa" +down_revision = "ef39fcd6e1cd" branch_labels = None depends_on = None # Tables affected by charset/collate change tables = [ - ('AccountTypes', 'utf8mb4', 'utf8mb4_general_ci'), - ('ApiRateLimit', 'utf8mb4', 'utf8mb4_general_ci'), - ('Bans', 'utf8mb4', 'utf8mb4_general_ci'), - ('DependencyTypes', 'utf8mb4', 'utf8mb4_general_ci'), - ('Groups', 'utf8mb4', 'utf8mb4_general_ci'), - ('Licenses', 'utf8mb4', 'utf8mb4_general_ci'), - ('OfficialProviders', 'utf8mb4', 'utf8mb4_bin'), - ('PackageBases', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageBlacklist', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageComments', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageDepends', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageKeywords', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageRelations', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageRequests', 'utf8mb4', 'utf8mb4_general_ci'), - ('PackageSources', 'utf8mb4', 'utf8mb4_general_ci'), - ('Packages', 'utf8mb4', 'utf8mb4_general_ci'), - ('RelationTypes', 'utf8mb4', 'utf8mb4_general_ci'), - ('RequestTypes', 'utf8mb4', 'utf8mb4_general_ci'), - ('SSHPubKeys', 'utf8mb4', 'utf8mb4_bin'), - ('Sessions', 'utf8mb4', 'utf8mb4_bin'), - ('TU_VoteInfo', 'utf8mb4', 'utf8mb4_general_ci'), - ('Terms', 'utf8mb4', 'utf8mb4_general_ci'), - ('Users', 'utf8mb4', 'utf8mb4_general_ci') + ("AccountTypes", "utf8mb4", "utf8mb4_general_ci"), + ("ApiRateLimit", "utf8mb4", "utf8mb4_general_ci"), + ("Bans", "utf8mb4", "utf8mb4_general_ci"), + ("DependencyTypes", "utf8mb4", "utf8mb4_general_ci"), + ("Groups", "utf8mb4", "utf8mb4_general_ci"), + ("Licenses", "utf8mb4", "utf8mb4_general_ci"), + ("OfficialProviders", "utf8mb4", "utf8mb4_bin"), + ("PackageBases", "utf8mb4", "utf8mb4_general_ci"), + ("PackageBlacklist", "utf8mb4", "utf8mb4_general_ci"), + ("PackageComments", "utf8mb4", "utf8mb4_general_ci"), + ("PackageDepends", "utf8mb4", "utf8mb4_general_ci"), + ("PackageKeywords", "utf8mb4", "utf8mb4_general_ci"), + ("PackageRelations", "utf8mb4", "utf8mb4_general_ci"), + ("PackageRequests", "utf8mb4", "utf8mb4_general_ci"), + ("PackageSources", "utf8mb4", "utf8mb4_general_ci"), + ("Packages", "utf8mb4", "utf8mb4_general_ci"), + ("RelationTypes", "utf8mb4", "utf8mb4_general_ci"), + ("RequestTypes", "utf8mb4", "utf8mb4_general_ci"), + ("SSHPubKeys", "utf8mb4", "utf8mb4_bin"), + ("Sessions", "utf8mb4", "utf8mb4_bin"), + ("TU_VoteInfo", "utf8mb4", "utf8mb4_general_ci"), + ("Terms", "utf8mb4", "utf8mb4_general_ci"), + ("Users", "utf8mb4", "utf8mb4_general_ci"), ] # Indexes affected by charset/collate change # Map of Unique Indexes key = index_name, value = [table_name, column1, column2] -indexes = {'ProviderNameProvides': ['OfficialProviders', 'Name', 'Provides']} +indexes = {"ProviderNameProvides": ["OfficialProviders", "Name", "Provides"]} # Source charset/collation, before this migration is run. src_charset = "utf8" diff --git a/migrations/versions/be7adae47ac3_upgrade_voteinfo_integers.py b/migrations/versions/be7adae47ac3_upgrade_voteinfo_integers.py index d910a14b..d273804f 100644 --- a/migrations/versions/be7adae47ac3_upgrade_voteinfo_integers.py +++ b/migrations/versions/be7adae47ac3_upgrade_voteinfo_integers.py @@ -19,8 +19,8 @@ from alembic import op from sqlalchemy.dialects.mysql import INTEGER, TINYINT # revision identifiers, used by Alembic. -revision = 'be7adae47ac3' -down_revision = '56e2ce8e2ffa' +revision = "be7adae47ac3" +down_revision = "56e2ce8e2ffa" branch_labels = None depends_on = None @@ -32,7 +32,7 @@ DOWNGRADE_T = TINYINT(3, unsigned=True) def upgrade(): - """ Upgrade 'Yes', 'No', 'Abstain' and 'ActiveTUs' to unsigned INTEGER. """ + """Upgrade 'Yes', 'No', 'Abstain' and 'ActiveTUs' to unsigned INTEGER.""" op.alter_column("TU_VoteInfo", "Yes", type_=UPGRADE_T) op.alter_column("TU_VoteInfo", "No", type_=UPGRADE_T) op.alter_column("TU_VoteInfo", "Abstain", type_=UPGRADE_T) diff --git a/migrations/versions/d64e5571bc8d_fix_pkgvote_votets.py b/migrations/versions/d64e5571bc8d_fix_pkgvote_votets.py index a89d97ef..a20b80fa 100644 --- a/migrations/versions/d64e5571bc8d_fix_pkgvote_votets.py +++ b/migrations/versions/d64e5571bc8d_fix_pkgvote_votets.py @@ -8,20 +8,19 @@ Create Date: 2022-02-18 12:47:05.322766 from datetime import datetime import sqlalchemy as sa - from alembic import op from aurweb import db from aurweb.models import PackageVote # revision identifiers, used by Alembic. -revision = 'd64e5571bc8d' -down_revision = 'be7adae47ac3' +revision = "d64e5571bc8d" +down_revision = "be7adae47ac3" branch_labels = None depends_on = None table = PackageVote.__tablename__ -column = 'VoteTS' +column = "VoteTS" epoch = datetime(1970, 1, 1) diff --git a/migrations/versions/ef39fcd6e1cd_add_sso_account_id_in_table_users.py b/migrations/versions/ef39fcd6e1cd_add_sso_account_id_in_table_users.py index 49bf055a..3cf369e7 100644 --- a/migrations/versions/ef39fcd6e1cd_add_sso_account_id_in_table_users.py +++ b/migrations/versions/ef39fcd6e1cd_add_sso_account_id_in_table_users.py @@ -6,31 +6,32 @@ Create Date: 2020-06-08 10:04:13.898617 """ import sqlalchemy as sa - from alembic import op from sqlalchemy.engine.reflection import Inspector # revision identifiers, used by Alembic. -revision = 'ef39fcd6e1cd' -down_revision = 'f47cad5d6d03' +revision = "ef39fcd6e1cd" +down_revision = "f47cad5d6d03" branch_labels = None depends_on = None def table_has_column(table, column_name): for element in Inspector.from_engine(op.get_bind()).get_columns(table): - if element.get('name') == column_name: + if element.get("name") == column_name: return True return False def upgrade(): - if not table_has_column('Users', 'SSOAccountID'): - op.add_column('Users', sa.Column('SSOAccountID', sa.String(length=255), nullable=True)) - op.create_unique_constraint(None, 'Users', ['SSOAccountID']) + if not table_has_column("Users", "SSOAccountID"): + op.add_column( + "Users", sa.Column("SSOAccountID", sa.String(length=255), nullable=True) + ) + op.create_unique_constraint(None, "Users", ["SSOAccountID"]) def downgrade(): - if table_has_column('Users', 'SSOAccountID'): - op.drop_constraint('SSOAccountID', 'Users', type_='unique') - op.drop_column('Users', 'SSOAccountID') + if table_has_column("Users", "SSOAccountID"): + op.drop_constraint("SSOAccountID", "Users", type_="unique") + op.drop_column("Users", "SSOAccountID") diff --git a/migrations/versions/f47cad5d6d03_initial_revision.py b/migrations/versions/f47cad5d6d03_initial_revision.py index b214beea..7373e0fb 100644 --- a/migrations/versions/f47cad5d6d03_initial_revision.py +++ b/migrations/versions/f47cad5d6d03_initial_revision.py @@ -5,7 +5,7 @@ Create Date: 2020-02-23 13:23:32.331396 """ # revision identifiers, used by Alembic. -revision = 'f47cad5d6d03' +revision = "f47cad5d6d03" down_revision = None branch_labels = None depends_on = None From 4e0618469df308340cb3ddb2f1c74d04c470c57a Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 18:40:31 -0700 Subject: [PATCH 111/415] fix(test): JSONResponse() requires a content argument with fastapi 0.83.0 Signed-off-by: Kevin Morris --- test/test_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_util.py b/test/test_util.py index 686e35b4..2e8b2e4e 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -44,7 +44,7 @@ async def test_error_or_result(): assert data.get("error") == "No response returned." async def good_route(request: fastapi.Request): - return JSONResponse() + return JSONResponse("{}") response = await util.error_or_result(good_route, Request()) assert response.status_code == HTTPStatus.OK From bb6e602e13184b79f8d5644866ab76215b723853 Mon Sep 17 00:00:00 2001 From: renovate Date: Mon, 12 Sep 2022 01:24:39 +0000 Subject: [PATCH 112/415] fix(deps): update dependency fastapi to ^0.83.0 --- poetry.lock | 27 ++++++++++++++------------- pyproject.toml | 2 +- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/poetry.lock b/poetry.lock index ef0fc1f7..ef2c70f9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -264,7 +264,7 @@ lua = ["lupa (>=1.13,<2.0)"] [[package]] name = "fastapi" -version = "0.71.0" +version = "0.83.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false @@ -272,13 +272,13 @@ python-versions = ">=3.6.1" [package.dependencies] pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" -starlette = "0.17.1" +starlette = "0.19.1" [package.extras] -all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,<5.0.0)", "uvicorn[standard] (>=0.12.0,<0.16.0)"] -dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.16.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<6.0.0)", "typer-cli (>=0.0.12,<0.0.13)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==21.9b0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.1.7)", "types-orjson (==3.6.0)", "types-ujson (==0.1.1)", "ujson (>=4.0.1,<5.0.0)"] +all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "feedgen" @@ -924,14 +924,15 @@ parse = "*" [[package]] name = "starlette" -version = "0.17.1" +version = "0.19.1" description = "The little ASGI library that shines." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -anyio = ">=3.0.0,<4" +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] @@ -1055,7 +1056,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "e084bad4236ac74fb90fcf4537c78c228b2de606e83c63ac2557a677d681e743" +content-hash = "e1f9d796eea832af84c40c754ee3c58e633e98bd7cdb42a985b2c8657e82037e" [metadata.files] aiofiles = [ @@ -1283,8 +1284,8 @@ fakeredis = [ {file = "fakeredis-1.9.0.tar.gz", hash = "sha256:60639946e3bb1274c30416f539f01f9d73b4ea68c244c1442f5524e45f51e882"}, ] fastapi = [ - {file = "fastapi-0.71.0-py3-none-any.whl", hash = "sha256:a78eca6b084de9667f2d5f37e2ae297270e5a119cd01c2f04815795da92fc87f"}, - {file = "fastapi-0.71.0.tar.gz", hash = "sha256:2b5ac0ae89c80b40d1dd4b2ea0bb1f78d7c4affd3644d080bf050f084759fff2"}, + {file = "fastapi-0.83.0-py3-none-any.whl", hash = "sha256:694a2b6c2607a61029a4be1c6613f84d74019cb9f7a41c7a475dca8e715f9368"}, + {file = "fastapi-0.83.0.tar.gz", hash = "sha256:96eb692350fe13d7a9843c3c87a874f0d45102975257dd224903efd6c0fde3bd"}, ] feedgen = [ {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, @@ -1812,8 +1813,8 @@ srcinfo = [ {file = "srcinfo-0.0.8.tar.gz", hash = "sha256:5ac610cf8b15d4b0a0374bd1f7ad301675c2938f0414addf3ef7d7e3fcaf5c65"}, ] starlette = [ - {file = "starlette-0.17.1-py3-none-any.whl", hash = "sha256:26a18cbda5e6b651c964c12c88b36d9898481cd428ed6e063f5f29c418f73050"}, - {file = "starlette-0.17.1.tar.gz", hash = "sha256:57eab3cc975a28af62f6faec94d355a410634940f10b30d68d31cb5ec1b44ae8"}, + {file = "starlette-0.19.1-py3-none-any.whl", hash = "sha256:5a60c5c2d051f3a8eb546136aa0c9399773a689595e099e0877704d5888279bf"}, + {file = "starlette-0.19.1.tar.gz", hash = "sha256:c6d21096774ecb9639acad41b86b7706e52ba3bf1dc13ea4ed9ad593d47e24c7"}, ] "tap.py" = [ {file = "tap.py-3.1-py3-none-any.whl", hash = "sha256:928c852f3361707b796c93730cc5402c6378660b161114461066acf53d65bf5d"}, diff --git a/pyproject.toml b/pyproject.toml index 8f9624dc..4649d74f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,7 +93,7 @@ pytest-xdist = "^2.4.0" filelock = "^3.3.2" posix-ipc = "^1.0.5" pyalpm = "^0.10.6" -fastapi = "^0.71.0" +fastapi = "^0.83.0" srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] From df0a4a2be242a8bd5d318e71dcca0d90e0e1cc6a Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 19:04:42 -0700 Subject: [PATCH 113/415] feat(rpc): add /rpc/v5/{type} openapi-compatible routes We will be modeling future RPC implementations on an OpenAPI spec. While this commit does not completely cohere to OpenAPI in terms of response data, this is a good start and will allow us to cleanly document these openapi routes in the current and future. This commit brings in the new RPC routes: - GET /rpc/v5/info/{pkgname} - GET /rpc/v5/info?arg[]=pkg1&arg[]=pkg2 - POST /rpc/v5/info with JSON data `{"arg": ["pkg1", "pkg2"]}` - GET /rpc/v5/search?arg=keywords&by=valid-by-value - POST /rpc/v5/search with JSON data `{"by": "valid-by-value", "arg": "keywords"}` Signed-off-by: Kevin Morris --- aurweb/routers/rpc.py | 104 ++++++++++++++++++++++++++++++++++++++++++ test/test_rpc.py | 95 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 199 insertions(+) diff --git a/aurweb/routers/rpc.py b/aurweb/routers/rpc.py index a0cf5019..9777c0a2 100644 --- a/aurweb/routers/rpc.py +++ b/aurweb/routers/rpc.py @@ -160,3 +160,107 @@ async def rpc_post( callback: Optional[str] = Form(default=None), ): return await rpc_request(request, v, type, by, arg, args, callback) + + +@router.get("/rpc/v{version}/info/{name}") +async def rpc_openapi_info(request: Request, version: int, name: str): + return await rpc_request( + request, + version, + "info", + defaults.RPC_SEARCH_BY, + name, + [], + ) + + +@router.get("/rpc/v{version}/info") +async def rpc_openapi_multiinfo( + request: Request, + version: int, + args: Optional[list[str]] = Query(default=[], alias="arg[]"), +): + arg = args.pop(0) if args else None + return await rpc_request( + request, + version, + "info", + defaults.RPC_SEARCH_BY, + arg, + args, + ) + + +@router.post("/rpc/v{version}/info") +async def rpc_openapi_multiinfo_post( + request: Request, + version: int, +): + data = await request.json() + + args = data.get("arg", []) + if not isinstance(args, list): + rpc = RPC(version, "info") + return JSONResponse( + rpc.error("the 'arg' parameter must be of array type"), + status_code=HTTPStatus.BAD_REQUEST, + ) + + arg = args.pop(0) if args else None + return await rpc_request( + request, + version, + "info", + defaults.RPC_SEARCH_BY, + arg, + args, + ) + + +@router.get("/rpc/v{version}/search") +async def rpc_openapi_search( + request: Request, + version: int, + by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), + arg: Optional[str] = Query(default=str()), +): + return await rpc_request( + request, + version, + "search", + by, + arg, + [], + ) + + +@router.post("/rpc/v{version}/search") +async def rpc_openapi_search_post( + request: Request, + version: int, +): + data = await request.json() + by = data.get("by", defaults.RPC_SEARCH_BY) + if not isinstance(by, str): + rpc = RPC(version, "search") + return JSONResponse( + rpc.error("the 'by' parameter must be of string type"), + status_code=HTTPStatus.BAD_REQUEST, + ) + + arg = data.get("arg", str()) + if not isinstance(arg, str): + rpc = RPC(version, "search") + return JSONResponse( + rpc.error("the 'arg' parameter must be of string type"), + status_code=HTTPStatus.BAD_REQUEST, + ) + + return await rpc_request( + request, + version, + "search", + by, + arg, + [], + ) diff --git a/test/test_rpc.py b/test/test_rpc.py index ed7e8894..0edd3e2e 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -933,3 +933,98 @@ def test_rpc_too_many_info_results(client: TestClient, packages: list[Package]): with client as request: resp = request.get("/rpc", params=params) assert resp.json().get("error") == "Too many package results." + + +def test_rpc_openapi_info(client: TestClient, packages: list[Package]): + pkgname = packages[0].Name + + with client as request: + endp = f"/rpc/v5/info/{pkgname}" + resp = request.get(endp) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data.get("resultcount") == 1 + + +def test_rpc_openapi_multiinfo(client: TestClient, packages: list[Package]): + pkgname = packages[0].Name + + with client as request: + endp = "/rpc/v5/info" + resp = request.get(endp, params={"arg[]": [pkgname]}) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data.get("resultcount") == 1 + + +def test_rpc_openapi_multiinfo_post(client: TestClient, packages: list[Package]): + pkgname = packages[0].Name + + with client as request: + endp = "/rpc/v5/info" + resp = request.post(endp, json={"arg": [pkgname]}) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data.get("resultcount") == 1 + + +def test_rpc_openapi_multiinfo_post_bad_request( + client: TestClient, packages: list[Package] +): + pkgname = packages[0].Name + + with client as request: + endp = "/rpc/v5/info" + resp = request.post(endp, json={"arg": pkgname}) + assert resp.status_code == HTTPStatus.BAD_REQUEST + + data = resp.json() + expected = "the 'arg' parameter must be of array type" + assert data.get("error") == expected + + +def test_rpc_openapi_search(client: TestClient, packages: list[Package]): + pkgname = packages[0].Name + + with client as request: + endp = "/rpc/v5/search" + resp = request.get(endp, params={"arg": pkgname}) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data.get("resultcount") == 1 + + +def test_rpc_openapi_search_post(client: TestClient, packages: list[Package]): + pkgname = packages[0].Name + + with client as request: + endp = "/rpc/v5/search" + resp = request.post(endp, json={"arg": pkgname}) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data.get("resultcount") == 1 + + +def test_rpc_openapi_search_post_bad_request(client: TestClient): + # Test by parameter + with client as request: + endp = "/rpc/v5/search" + resp = request.post(endp, json={"by": 1}) + assert resp.status_code == HTTPStatus.BAD_REQUEST + data = resp.json() + expected = "the 'by' parameter must be of string type" + assert data.get("error") == expected + + # Test arg parameter + with client as request: + endp = "/rpc/v5/search" + resp = request.post(endp, json={"arg": ["a", "list"]}) + assert resp.status_code == HTTPStatus.BAD_REQUEST + data = resp.json() + expected = "the 'arg' parameter must be of string type" + assert data.get("error") == expected From 9faa7b801d54fb853bcb54c720ae0a3e297e0b10 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 15:22:10 -0700 Subject: [PATCH 114/415] feat: add cdn.jsdelivr.net to script/style CSP Signed-off-by: Kevin Morris --- aurweb/asgi.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/aurweb/asgi.py b/aurweb/asgi.py index ccca3fc5..d1703c10 100644 --- a/aurweb/asgi.py +++ b/aurweb/asgi.py @@ -253,10 +253,14 @@ async def add_security_headers(request: Request, call_next: typing.Callable): # Add CSP header. nonce = request.user.nonce csp = "default-src 'self'; " - script_hosts = [] + + # swagger-ui needs access to cdn.jsdelivr.net javascript + script_hosts = ["cdn.jsdelivr.net"] csp += f"script-src 'self' 'nonce-{nonce}' " + " ".join(script_hosts) - # It's fine if css is inlined. - csp += "; style-src 'self' 'unsafe-inline'" + + # swagger-ui needs access to cdn.jsdelivr.net css + css_hosts = ["cdn.jsdelivr.net"] + csp += "; style-src 'self' 'unsafe-inline' " + " ".join(css_hosts) response.headers["Content-Security-Policy"] = csp # Add XTCO header. From 5e75a00c17609dc72fb8600cb309f07a7dde41e5 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sun, 11 Sep 2022 19:59:16 -0700 Subject: [PATCH 115/415] upgrade: bump to version v6.1.3 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index df129c39..8b97cd0e 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.2" +AURWEB_VERSION = "v6.1.3" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 4649d74f..303b7637 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.2" +version = "v6.1.3" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 8e8b746a5b82511716b397c31e42f199a87e65d9 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 12 Sep 2022 06:49:20 -0700 Subject: [PATCH 116/415] feat(rpc): add GET /rpc/v5/search/{arg} openapi route Signed-off-by: Kevin Morris --- aurweb/routers/rpc.py | 19 ++++++++++++++++++- test/test_rpc.py | 12 ++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/aurweb/routers/rpc.py b/aurweb/routers/rpc.py index 9777c0a2..25574ff8 100644 --- a/aurweb/routers/rpc.py +++ b/aurweb/routers/rpc.py @@ -217,12 +217,29 @@ async def rpc_openapi_multiinfo_post( ) +@router.get("/rpc/v{version}/search/{arg}") +async def rpc_openapi_search_arg( + request: Request, + version: int, + arg: str, + by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), +): + return await rpc_request( + request, + version, + "search", + by, + arg, + [], + ) + + @router.get("/rpc/v{version}/search") async def rpc_openapi_search( request: Request, version: int, - by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), arg: Optional[str] = Query(default=str()), + by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY), ): return await rpc_request( request, diff --git a/test/test_rpc.py b/test/test_rpc.py index 0edd3e2e..e5b37542 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -986,6 +986,18 @@ def test_rpc_openapi_multiinfo_post_bad_request( assert data.get("error") == expected +def test_rpc_openapi_search_arg(client: TestClient, packages: list[Package]): + pkgname = packages[0].Name + + with client as request: + endp = f"/rpc/v5/search/{pkgname}" + resp = request.get(endp) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data.get("resultcount") == 1 + + def test_rpc_openapi_search(client: TestClient, packages: list[Package]): pkgname = packages[0].Name From 17f2c05fd35cb105a5346671bd2e2ae178b83f02 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 12 Sep 2022 06:49:54 -0700 Subject: [PATCH 117/415] feat(rpc): add GET /rpc/v5/suggest/{arg} openapi route Signed-off-by: Kevin Morris --- aurweb/routers/rpc.py | 12 ++++++++++++ test/test_rpc.py | 16 ++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/aurweb/routers/rpc.py b/aurweb/routers/rpc.py index 25574ff8..23978f1d 100644 --- a/aurweb/routers/rpc.py +++ b/aurweb/routers/rpc.py @@ -281,3 +281,15 @@ async def rpc_openapi_search_post( arg, [], ) + + +@router.get("/rpc/v{version}/suggest/{arg}") +async def rpc_openapi_suggest(request: Request, version: int, arg: str): + return await rpc_request( + request, + version, + "suggest", + defaults.RPC_SEARCH_BY, + arg, + [], + ) diff --git a/test/test_rpc.py b/test/test_rpc.py index e5b37542..84ddd8d7 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -1040,3 +1040,19 @@ def test_rpc_openapi_search_post_bad_request(client: TestClient): data = resp.json() expected = "the 'arg' parameter must be of string type" assert data.get("error") == expected + + +def test_rpc_openapi_suggest(client: TestClient, packages: list[Package]): + suggestions = { + "big": ["big-chungus"], + "chungy": ["chungy-chungus"], + } + + for term, expected in suggestions.items(): + with client as request: + endp = f"/rpc/v5/suggest/{term}" + resp = request.get(endp) + assert resp.status_code == HTTPStatus.OK + + data = resp.json() + assert data == expected From 624954042b173c285e9ef5a87adc6319c3293685 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 12 Sep 2022 06:59:52 -0700 Subject: [PATCH 118/415] doc(rpc): include route doc at the top of aurweb.routers.rpc Signed-off-by: Kevin Morris --- aurweb/routers/rpc.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/aurweb/routers/rpc.py b/aurweb/routers/rpc.py index 23978f1d..f15b9781 100644 --- a/aurweb/routers/rpc.py +++ b/aurweb/routers/rpc.py @@ -1,3 +1,28 @@ +""" +RPC API routing module + +For legacy route documentation, see https://aur.archlinux.org/rpc + +Legacy Routes: +- GET /rpc +- POST /rpc + +Legacy example (version 5): /rpc?v=5&type=info&arg=my-package + +For OpenAPI route documentation, see https://aur.archlinux.org/docs + +OpenAPI Routes: +- GET /rpc/v{version}/info/{arg} +- GET /rpc/v{version}/info +- POST /rpc/v{version}/info +- GET /rpc/v{version}/search/{arg} +- GET /rpc/v{version}/search +- POST /rpc/v{version}/search +- GET /rpc/v{version}/suggest/{arg} + +OpenAPI example (version 5): /rpc/v5/info/my-package + +""" import hashlib import re from http import HTTPStatus From 37c7dee099841cfe368c64c93ae7432cc4364858 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 12 Sep 2022 10:36:50 -0700 Subject: [PATCH 119/415] fix: produce DeleteNotification a line before handle_request With this on a single line, the argument ordering and class/func execution was a bit too RNG causing exceptions to be thrown when producing a notification based off of a deleted pkgbase object. Signed-off-by: Kevin Morris --- aurweb/pkgbase/actions.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index 4834f8dd..9e7b0df5 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -99,9 +99,8 @@ def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_delete_instance( request: Request, pkgbase: PackageBase, comments: str = str() ) -> list[notify.Notification]: - notifs = handle_request(request, DELETION_ID, pkgbase) + [ - notify.DeleteNotification(request.user.ID, pkgbase.ID) - ] + notif = notify.DeleteNotification(request.user.ID, pkgbase.ID) + notifs = handle_request(request, DELETION_ID, pkgbase) + [notif] with db.begin(): update_closure_comment(pkgbase, DELETION_ID, comments) From adc3a218636e836988105f31872b139d88c5bcc1 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 12 Sep 2022 12:28:42 -0700 Subject: [PATCH 120/415] fix: add 'unsafe-inline' to script-src CSP swagger-ui uses inline javascript to bootstrap itself, so we need to allow unsafe inline because we can't give swagger-ui a nonce to embed. Signed-off-by: Kevin Morris --- aurweb/asgi.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/aurweb/asgi.py b/aurweb/asgi.py index d1703c10..72b47b4c 100644 --- a/aurweb/asgi.py +++ b/aurweb/asgi.py @@ -256,7 +256,9 @@ async def add_security_headers(request: Request, call_next: typing.Callable): # swagger-ui needs access to cdn.jsdelivr.net javascript script_hosts = ["cdn.jsdelivr.net"] - csp += f"script-src 'self' 'nonce-{nonce}' " + " ".join(script_hosts) + csp += f"script-src 'self' 'unsafe-inline' 'nonce-{nonce}' " + " ".join( + script_hosts + ) # swagger-ui needs access to cdn.jsdelivr.net css css_hosts = ["cdn.jsdelivr.net"] From f450b5dfc7e684392b85c253f44521bf097f095b Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 12 Sep 2022 12:29:57 -0700 Subject: [PATCH 121/415] upgrade: bump to version v6.1.4 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 8b97cd0e..c1f87984 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.3" +AURWEB_VERSION = "v6.1.4" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 303b7637..f732f2e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.3" +version = "v6.1.4" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From ec3152014b05c2c6730e8363f32be01609c711b0 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 13 Sep 2022 12:47:52 -0700 Subject: [PATCH 122/415] fix: retry transactions who fail due to deadlocks In my opinion, this kind of handling of transactions is pretty ugly. The being said, we have issues with running into deadlocks on aur.al, so this commit works against that immediate bug. An ideal solution would be to deal with retrying transactions through the `db.begin()` scope, so we wouldn't have to explicitly annotate functions as "retry functions," which is what this commit does. Closes #376 Signed-off-by: Kevin Morris --- aurweb/auth/__init__.py | 4 +- aurweb/db.py | 40 +++++++++++++++ aurweb/models/user.py | 2 +- aurweb/packages/requests.py | 23 +++++---- aurweb/packages/util.py | 20 ++++---- aurweb/pkgbase/actions.py | 91 ++++++++++++++++++++++++---------- aurweb/pkgbase/util.py | 2 + aurweb/ratelimit.py | 23 ++++++--- aurweb/routers/accounts.py | 26 +++++----- aurweb/routers/auth.py | 29 ++++++++--- aurweb/routers/html.py | 1 + aurweb/routers/pkgbase.py | 19 +++++++ aurweb/routers/requests.py | 1 + aurweb/routers/trusted_user.py | 16 +++--- aurweb/users/update.py | 6 +++ test/test_db.py | 20 ++++++++ 16 files changed, 241 insertions(+), 82 deletions(-) diff --git a/aurweb/auth/__init__.py b/aurweb/auth/__init__.py index 0c8bba69..b8056f91 100644 --- a/aurweb/auth/__init__.py +++ b/aurweb/auth/__init__.py @@ -96,6 +96,7 @@ class AnonymousUser: class BasicAuthBackend(AuthenticationBackend): + @db.async_retry_deadlock async def authenticate(self, conn: HTTPConnection): unauthenticated = (None, AnonymousUser()) sid = conn.cookies.get("AURSID") @@ -122,8 +123,7 @@ class BasicAuthBackend(AuthenticationBackend): # At this point, we cannot have an invalid user if the record # exists, due to ForeignKey constraints in the schema upheld # by mysqlclient. - with db.begin(): - user = db.query(User).filter(User.ID == record.UsersID).first() + user = db.query(User).filter(User.ID == record.UsersID).first() user.nonce = util.make_nonce() user.authenticated = True diff --git a/aurweb/db.py b/aurweb/db.py index 7425d928..ab0f80b8 100644 --- a/aurweb/db.py +++ b/aurweb/db.py @@ -161,6 +161,46 @@ def begin(): return get_session().begin() +def retry_deadlock(func): + from sqlalchemy.exc import OperationalError + + def wrapper(*args, _i: int = 0, **kwargs): + # Retry 10 times, then raise the exception + # If we fail before the 10th, recurse into `wrapper` + # If we fail on the 10th, continue to throw the exception + limit = 10 + try: + return func(*args, **kwargs) + except OperationalError as exc: + if _i < limit and "Deadlock found" in str(exc): + # Retry on deadlock by recursing into `wrapper` + return wrapper(*args, _i=_i + 1, **kwargs) + # Otherwise, just raise the exception + raise exc + + return wrapper + + +def async_retry_deadlock(func): + from sqlalchemy.exc import OperationalError + + async def wrapper(*args, _i: int = 0, **kwargs): + # Retry 10 times, then raise the exception + # If we fail before the 10th, recurse into `wrapper` + # If we fail on the 10th, continue to throw the exception + limit = 10 + try: + return await func(*args, **kwargs) + except OperationalError as exc: + if _i < limit and "Deadlock found" in str(exc): + # Retry on deadlock by recursing into `wrapper` + return await wrapper(*args, _i=_i + 1, **kwargs) + # Otherwise, just raise the exception + raise exc + + return wrapper + + def get_sqlalchemy_url(): """ Build an SQLAlchemy URL for use with create_engine. diff --git a/aurweb/models/user.py b/aurweb/models/user.py index 0404c77a..0d638677 100644 --- a/aurweb/models/user.py +++ b/aurweb/models/user.py @@ -151,7 +151,7 @@ class User(Base): return has_credential(self, credential, approved) - def logout(self, request: Request): + def logout(self, request: Request) -> None: self.authenticated = False if self.session: with db.begin(): diff --git a/aurweb/packages/requests.py b/aurweb/packages/requests.py index 7309a880..c09082f5 100644 --- a/aurweb/packages/requests.py +++ b/aurweb/packages/requests.py @@ -151,6 +151,7 @@ def close_pkgreq( pkgreq.ClosedTS = now +@db.retry_deadlock def handle_request( request: Request, reqtype_id: int, pkgbase: PackageBase, target: PackageBase = None ) -> list[notify.Notification]: @@ -239,15 +240,19 @@ def handle_request( to_accept.append(pkgreq) # Update requests with their new status and closures. - with db.begin(): - util.apply_all( - to_accept, - lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID), - ) - util.apply_all( - to_reject, - lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID), - ) + @db.retry_deadlock + def retry_closures(): + with db.begin(): + util.apply_all( + to_accept, + lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID), + ) + util.apply_all( + to_reject, + lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID), + ) + + retry_closures() # Create RequestCloseNotifications for all requests involved. for pkgreq in to_accept + to_reject: diff --git a/aurweb/packages/util.py b/aurweb/packages/util.py index 1ae7f9fe..b6ba7e20 100644 --- a/aurweb/packages/util.py +++ b/aurweb/packages/util.py @@ -99,8 +99,7 @@ def get_pkg_or_base( :raises HTTPException: With status code 404 if record doesn't exist :return: {Package,PackageBase} instance """ - with db.begin(): - instance = db.query(cls).filter(cls.Name == name).first() + instance = db.query(cls).filter(cls.Name == name).first() if not instance: raise HTTPException(status_code=HTTPStatus.NOT_FOUND) return instance @@ -133,16 +132,15 @@ def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Packag # If we already have a cache, deserialize it and return. return orjson.loads(packages) - with db.begin(): - query = ( - db.query(models.Package) - .join(models.PackageBase) - .filter(models.PackageBase.PackagerUID.isnot(None)) - .order_by(models.PackageBase.ModifiedTS.desc()) - ) + query = ( + db.query(models.Package) + .join(models.PackageBase) + .filter(models.PackageBase.PackagerUID.isnot(None)) + .order_by(models.PackageBase.ModifiedTS.desc()) + ) - if limit: - query = query.limit(limit) + if limit: + query = query.limit(limit) packages = [] for pkg in query: diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index 9e7b0df5..a453cb36 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -2,7 +2,7 @@ from fastapi import Request from aurweb import db, logging, util from aurweb.auth import creds -from aurweb.models import PackageBase +from aurweb.models import PackageBase, User from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_notification import PackageNotification from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID @@ -13,6 +13,12 @@ from aurweb.scripts import notify, popupdate logger = logging.get_logger(__name__) +@db.retry_deadlock +def _retry_notify(user: User, pkgbase: PackageBase) -> None: + with db.begin(): + db.create(PackageNotification, PackageBase=pkgbase, User=user) + + def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None: notif = db.query( pkgbase.notifications.filter( @@ -21,8 +27,13 @@ def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None: ).scalar() has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY) if has_cred and not notif: - with db.begin(): - db.create(PackageNotification, PackageBase=pkgbase, User=request.user) + _retry_notify(request.user, pkgbase) + + +@db.retry_deadlock +def _retry_unnotify(notif: PackageNotification, pkgbase: PackageBase) -> None: + with db.begin(): + db.delete(notif) def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None: @@ -31,8 +42,15 @@ def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None: ).first() has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY) if has_cred and notif: - with db.begin(): - db.delete(notif) + _retry_unnotify(notif, pkgbase) + + +@db.retry_deadlock +def _retry_unflag(pkgbase: PackageBase) -> None: + with db.begin(): + pkgbase.OutOfDateTS = None + pkgbase.Flagger = None + pkgbase.FlaggerComment = str() def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None: @@ -42,20 +60,17 @@ def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None: + [c.User for c in pkgbase.comaintainers], ) if has_cred: - with db.begin(): - pkgbase.OutOfDateTS = None - pkgbase.Flagger = None - pkgbase.FlaggerComment = str() + _retry_unflag(pkgbase) -def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None: - disowner = request.user - notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)] +@db.retry_deadlock +def _retry_disown(request: Request, pkgbase: PackageBase): + notifs: list[notify.Notification] = [] - is_maint = disowner == pkgbase.Maintainer + is_maint = request.user == pkgbase.Maintainer comaint = pkgbase.comaintainers.filter( - PackageComaintainer.User == disowner + PackageComaintainer.User == request.user ).one_or_none() is_comaint = comaint is not None @@ -85,38 +100,48 @@ def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None: pkgbase.Maintainer = None db.delete_all(pkgbase.comaintainers) + return notifs + + +def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None: + disowner = request.user + notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)] + notifs += _retry_disown(request, pkgbase) util.apply_all(notifs, lambda n: n.send()) -def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None: +@db.retry_deadlock +def _retry_adopt(request: Request, pkgbase: PackageBase) -> None: with db.begin(): pkgbase.Maintainer = request.user + +def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None: + _retry_adopt(request, pkgbase) notif = notify.AdoptNotification(request.user.ID, pkgbase.ID) notif.send() +@db.retry_deadlock +def _retry_delete(pkgbase: PackageBase, comments: str) -> None: + with db.begin(): + update_closure_comment(pkgbase, DELETION_ID, comments) + db.delete(pkgbase) + + def pkgbase_delete_instance( request: Request, pkgbase: PackageBase, comments: str = str() ) -> list[notify.Notification]: notif = notify.DeleteNotification(request.user.ID, pkgbase.ID) notifs = handle_request(request, DELETION_ID, pkgbase) + [notif] - with db.begin(): - update_closure_comment(pkgbase, DELETION_ID, comments) - db.delete(pkgbase) + _retry_delete(pkgbase, comments) return notifs -def pkgbase_merge_instance( - request: Request, pkgbase: PackageBase, target: PackageBase, comments: str = str() -) -> None: - pkgbasename = str(pkgbase.Name) - - # Create notifications. - notifs = handle_request(request, MERGE_ID, pkgbase, target) - +@db.retry_deadlock +def _retry_merge(pkgbase: PackageBase, target: PackageBase) -> None: # Target votes and notifications sets of user IDs that are # looking to be migrated. target_votes = set(v.UsersID for v in target.package_votes) @@ -146,6 +171,20 @@ def pkgbase_merge_instance( db.delete(pkg) db.delete(pkgbase) + +def pkgbase_merge_instance( + request: Request, + pkgbase: PackageBase, + target: PackageBase, + comments: str = str(), +) -> None: + pkgbasename = str(pkgbase.Name) + + # Create notifications. + notifs = handle_request(request, MERGE_ID, pkgbase, target) + + _retry_merge(pkgbase, target) + # Log this out for accountability purposes. logger.info( f"Trusted User '{request.user.Username}' merged " diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 223c3013..968135d1 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -106,6 +106,7 @@ def remove_comaintainer( return notif +@db.retry_deadlock def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None: """ Remove comaintainers from `pkgbase`. @@ -155,6 +156,7 @@ class NoopComaintainerNotification: return +@db.retry_deadlock def add_comaintainer( pkgbase: PackageBase, comaintainer: User ) -> notify.ComaintainerAddNotification: diff --git a/aurweb/ratelimit.py b/aurweb/ratelimit.py index cb08cdf5..97923a52 100644 --- a/aurweb/ratelimit.py +++ b/aurweb/ratelimit.py @@ -38,17 +38,26 @@ def _update_ratelimit_db(request: Request): now = time.utcnow() time_to_delete = now - window_length + @db.retry_deadlock + def retry_delete(records: list[ApiRateLimit]) -> None: + with db.begin(): + db.delete_all(records) + records = db.query(ApiRateLimit).filter(ApiRateLimit.WindowStart < time_to_delete) - with db.begin(): - db.delete_all(records) + retry_delete(records) + + @db.retry_deadlock + def retry_create(record: ApiRateLimit, now: int, host: str) -> ApiRateLimit: + with db.begin(): + if not record: + record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1) + else: + record.Requests += 1 + return record host = request.client.host record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first() - with db.begin(): - if not record: - record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1) - else: - record.Requests += 1 + record = retry_create(record, now, host) logger.debug(record.Requests) return record diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index db05955a..3937757a 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -32,6 +32,7 @@ async def passreset(request: Request): return render_template(request, "passreset.html", context) +@db.async_retry_deadlock @router.post("/passreset", response_class=HTMLResponse) @handle_form_exceptions @requires_guest @@ -260,6 +261,7 @@ async def account_register( return render_template(request, "register.html", context) +@db.async_retry_deadlock @router.post("/register", response_class=HTMLResponse) @handle_form_exceptions @requires_guest @@ -336,18 +338,15 @@ async def account_register_post( AccountType=atype, ) - # If a PK was given and either one does not exist or the given - # PK mismatches the existing user's SSHPubKey.PubKey. - if PK: - # Get the second element in the PK, which is the actual key. - keys = util.parse_ssh_keys(PK.strip()) - for k in keys: - pk = " ".join(k) - fprint = get_fingerprint(pk) - with db.begin(): - db.create( - models.SSHPubKey, UserID=user.ID, PubKey=pk, Fingerprint=fprint - ) + # If a PK was given and either one does not exist or the given + # PK mismatches the existing user's SSHPubKey.PubKey. + if PK: + # Get the second element in the PK, which is the actual key. + keys = util.parse_ssh_keys(PK.strip()) + for k in keys: + pk = " ".join(k) + fprint = get_fingerprint(pk) + db.create(models.SSHPubKey, User=user, PubKey=pk, Fingerprint=fprint) # Send a reset key notification to the new user. WelcomeNotification(user.ID).send() @@ -458,6 +457,8 @@ async def account_edit_post( update.password, ] + # These update functions are all guarded by retry_deadlock; + # there's no need to guard this route itself. for f in updates: f(**args, request=request, user=user, context=context) @@ -633,6 +634,7 @@ async def terms_of_service(request: Request): return render_terms_of_service(request, context, accept_needed) +@db.async_retry_deadlock @router.post("/tos") @handle_form_exceptions @requires_auth diff --git a/aurweb/routers/auth.py b/aurweb/routers/auth.py index 3f94952e..0e675559 100644 --- a/aurweb/routers/auth.py +++ b/aurweb/routers/auth.py @@ -28,6 +28,11 @@ async def login_get(request: Request, next: str = "/"): return await login_template(request, next) +@db.retry_deadlock +def _retry_login(request: Request, user: User, passwd: str, cookie_timeout: int) -> str: + return user.login(request, passwd, cookie_timeout) + + @router.post("/login", response_class=HTMLResponse) @handle_form_exceptions @requires_guest @@ -48,13 +53,16 @@ async def login_post( status_code=HTTPStatus.BAD_REQUEST, detail=_("Bad Referer header.") ) - with db.begin(): - user = ( - db.query(User) - .filter(or_(User.Username == user, User.Email == user)) - .first() + user = ( + db.query(User) + .filter( + or_( + User.Username == user, + User.Email == user, + ) ) - + .first() + ) if not user: return await login_template(request, next, errors=["Bad username or password."]) @@ -62,7 +70,7 @@ async def login_post( return await login_template(request, next, errors=["Account Suspended"]) cookie_timeout = cookies.timeout(remember_me) - sid = user.login(request, passwd, cookie_timeout) + sid = _retry_login(request, user, passwd, cookie_timeout) if not sid: return await login_template(request, next, errors=["Bad username or password."]) @@ -101,12 +109,17 @@ async def login_post( return response +@db.retry_deadlock +def _retry_logout(request: Request) -> None: + request.user.logout(request) + + @router.post("/logout") @handle_form_exceptions @requires_auth async def logout(request: Request, next: str = Form(default="/")): if request.user.is_authenticated(): - request.user.logout(request) + _retry_logout(request) # Use 303 since we may be handling a post request, that'll get it # to redirect to a get request. diff --git a/aurweb/routers/html.py b/aurweb/routers/html.py index 2148d535..da1ffd55 100644 --- a/aurweb/routers/html.py +++ b/aurweb/routers/html.py @@ -35,6 +35,7 @@ async def favicon(request: Request): return RedirectResponse("/static/images/favicon.ico") +@db.async_retry_deadlock @router.post("/language", response_class=RedirectResponse) @handle_form_exceptions async def language( diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 076aec1e..3b1ab688 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -87,6 +87,7 @@ async def pkgbase_flag_comment(request: Request, name: str): return render_template(request, "pkgbase/flag-comment.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/keywords") @handle_form_exceptions async def pkgbase_keywords( @@ -139,6 +140,7 @@ async def pkgbase_flag_get(request: Request, name: str): return render_template(request, "pkgbase/flag.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/flag") @handle_form_exceptions @requires_auth @@ -170,6 +172,7 @@ async def pkgbase_flag_post( return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comments") @handle_form_exceptions @requires_auth @@ -279,6 +282,7 @@ async def pkgbase_comment_edit( return render_template(request, "pkgbase/comments/edit.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comments/{id}") @handle_form_exceptions @requires_auth @@ -324,6 +328,7 @@ async def pkgbase_comment_post( ) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comments/{id}/pin") @handle_form_exceptions @requires_auth @@ -362,6 +367,7 @@ async def pkgbase_comment_pin( return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comments/{id}/unpin") @handle_form_exceptions @requires_auth @@ -399,6 +405,7 @@ async def pkgbase_comment_unpin( return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comments/{id}/delete") @handle_form_exceptions @requires_auth @@ -440,6 +447,7 @@ async def pkgbase_comment_delete( return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comments/{id}/undelete") @handle_form_exceptions @requires_auth @@ -482,6 +490,7 @@ async def pkgbase_comment_undelete( return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/vote") @handle_form_exceptions @requires_auth @@ -501,6 +510,7 @@ async def pkgbase_vote(request: Request, name: str): return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/unvote") @handle_form_exceptions @requires_auth @@ -519,6 +529,7 @@ async def pkgbase_unvote(request: Request, name: str): return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/notify") @handle_form_exceptions @requires_auth @@ -528,6 +539,7 @@ async def pkgbase_notify(request: Request, name: str): return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/unnotify") @handle_form_exceptions @requires_auth @@ -537,6 +549,7 @@ async def pkgbase_unnotify(request: Request, name: str): return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/unflag") @handle_form_exceptions @requires_auth @@ -567,6 +580,7 @@ async def pkgbase_disown_get( return render_template(request, "pkgbase/disown.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/disown") @handle_form_exceptions @requires_auth @@ -617,6 +631,7 @@ async def pkgbase_disown_post( return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/adopt") @handle_form_exceptions @requires_auth @@ -659,6 +674,7 @@ async def pkgbase_comaintainers(request: Request, name: str) -> Response: return render_template(request, "pkgbase/comaintainers.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/comaintainers") @handle_form_exceptions @requires_auth @@ -715,6 +731,7 @@ async def pkgbase_request( return render_template(request, "pkgbase/request.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/request") @handle_form_exceptions @requires_auth @@ -817,6 +834,7 @@ async def pkgbase_delete_get( return render_template(request, "pkgbase/delete.html", context) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/delete") @handle_form_exceptions @requires_auth @@ -889,6 +907,7 @@ async def pkgbase_merge_get( ) +@db.async_retry_deadlock @router.post("/pkgbase/{name}/merge") @handle_form_exceptions @requires_auth diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index 51be6d2c..bf86bdcc 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -69,6 +69,7 @@ async def request_close(request: Request, id: int): return render_template(request, "requests/close.html", context) +@db.async_retry_deadlock @router.post("/requests/{id}/close") @handle_form_exceptions @requires_auth diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index a84bb6bd..37edb072 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -217,6 +217,7 @@ async def trusted_user_proposal(request: Request, proposal: int): return render_proposal(request, context, proposal, voteinfo, voters, vote) +@db.async_retry_deadlock @router.post("/tu/{proposal}") @handle_form_exceptions @requires_auth @@ -267,13 +268,15 @@ async def trusted_user_proposal_post( request, context, proposal, voteinfo, voters, vote, status_code=status_code ) - if decision in {"Yes", "No", "Abstain"}: - # Increment whichever decision was given to us. - setattr(voteinfo, decision, getattr(voteinfo, decision) + 1) - else: - return Response("Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST) - with db.begin(): + if decision in {"Yes", "No", "Abstain"}: + # Increment whichever decision was given to us. + setattr(voteinfo, decision, getattr(voteinfo, decision) + 1) + else: + return Response( + "Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST + ) + vote = db.create(models.TUVote, User=request.user, VoteInfo=voteinfo) context["error"] = "You've already voted for this proposal." @@ -301,6 +304,7 @@ async def trusted_user_addvote( return render_template(request, "addvote.html", context) +@db.async_retry_deadlock @router.post("/addvote") @handle_form_exceptions @requires_auth diff --git a/aurweb/users/update.py b/aurweb/users/update.py index 51f2d2e0..6bd4a295 100644 --- a/aurweb/users/update.py +++ b/aurweb/users/update.py @@ -8,6 +8,7 @@ from aurweb.models.ssh_pub_key import get_fingerprint from aurweb.util import strtobool +@db.retry_deadlock def simple( U: str = str(), E: str = str(), @@ -42,6 +43,7 @@ def simple( user.OwnershipNotify = strtobool(ON) +@db.retry_deadlock def language( L: str = str(), request: Request = None, @@ -55,6 +57,7 @@ def language( context["language"] = L +@db.retry_deadlock def timezone( TZ: str = str(), request: Request = None, @@ -68,6 +71,7 @@ def timezone( context["language"] = TZ +@db.retry_deadlock def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None: if not PK: # If no pubkey is provided, wipe out any pubkeys the user @@ -101,12 +105,14 @@ def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None: ) +@db.retry_deadlock def account_type(T: int = None, user: models.User = None, **kwargs) -> None: if T is not None and (T := int(T)) != user.AccountTypeID: with db.begin(): user.AccountTypeID = T +@db.retry_deadlock def password( P: str = str(), request: Request = None, diff --git a/test/test_db.py b/test/test_db.py index 8ac5607d..22dbdd36 100644 --- a/test/test_db.py +++ b/test/test_db.py @@ -5,6 +5,7 @@ import tempfile from unittest import mock import pytest +from sqlalchemy.exc import OperationalError import aurweb.config import aurweb.initdb @@ -226,3 +227,22 @@ def test_name_without_pytest_current_test(): with mock.patch.dict("os.environ", {}, clear=True): dbname = aurweb.db.name() assert dbname == aurweb.config.get("database", "name") + + +def test_retry_deadlock(): + @db.retry_deadlock + def func(): + raise OperationalError("Deadlock found", tuple(), "") + + with pytest.raises(OperationalError): + func() + + +@pytest.mark.asyncio +async def test_async_retry_deadlock(): + @db.async_retry_deadlock + async def func(): + raise OperationalError("Deadlock found", tuple(), "") + + with pytest.raises(OperationalError): + await func() From 30e72d2db5f9b3b863ddc03efda65c37c0a4aa2c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Sat, 24 Sep 2022 16:51:25 +0000 Subject: [PATCH 123/415] feat: archive git repository (experimental) See doc/git-archive.md for general Git archive specifications See doc/repos/metadata-repo.md for info and direction related to the new Git metadata archive --- aurweb/archives/__init__.py | 1 + aurweb/archives/spec/__init__.py | 1 + aurweb/archives/spec/base.py | 77 ++++++ aurweb/archives/spec/metadata.py | 85 ++++++ aurweb/archives/spec/pkgbases.py | 32 +++ aurweb/archives/spec/pkgnames.py | 33 +++ aurweb/archives/spec/users.py | 26 ++ aurweb/models/package_base.py | 10 + aurweb/pkgbase/util.py | 5 +- aurweb/rpc.py | 76 +++--- aurweb/schema.py | 6 + aurweb/scripts/git_archive.py | 125 +++++++++ aurweb/scripts/mkpkglists.py | 1 + aurweb/scripts/popupdate.py | 14 +- aurweb/testing/git.py | 8 +- aurweb/util.py | 8 + conf/config.defaults | 12 + conf/config.dev | 6 + doc/git-archive.md | 75 ++++++ doc/maintenance.txt | 36 ++- doc/repos/metadata-repo.md | 121 +++++++++ doc/repos/pkgbases-repo.md | 15 ++ doc/repos/pkgnames-repo.md | 15 ++ doc/repos/users-repo.md | 15 ++ doc/specs/metadata.md | 14 + doc/specs/pkgbases.md | 14 + doc/specs/pkgnames.md | 14 + doc/specs/popularity.md | 14 + doc/specs/users.md | 14 + ...70_add_popularityupdated_to_packagebase.py | 33 +++ pyproject.toml | 1 + templates/partials/packages/details.html | 2 +- test/test_git_archives.py | 241 ++++++++++++++++++ test/test_templates.py | 4 + 34 files changed, 1104 insertions(+), 50 deletions(-) create mode 100644 aurweb/archives/__init__.py create mode 100644 aurweb/archives/spec/__init__.py create mode 100644 aurweb/archives/spec/base.py create mode 100644 aurweb/archives/spec/metadata.py create mode 100644 aurweb/archives/spec/pkgbases.py create mode 100644 aurweb/archives/spec/pkgnames.py create mode 100644 aurweb/archives/spec/users.py create mode 100644 aurweb/scripts/git_archive.py create mode 100644 doc/git-archive.md create mode 100644 doc/repos/metadata-repo.md create mode 100644 doc/repos/pkgbases-repo.md create mode 100644 doc/repos/pkgnames-repo.md create mode 100644 doc/repos/users-repo.md create mode 100644 doc/specs/metadata.md create mode 100644 doc/specs/pkgbases.md create mode 100644 doc/specs/pkgnames.md create mode 100644 doc/specs/popularity.md create mode 100644 doc/specs/users.md create mode 100644 migrations/versions/6441d3b65270_add_popularityupdated_to_packagebase.py create mode 100644 test/test_git_archives.py diff --git a/aurweb/archives/__init__.py b/aurweb/archives/__init__.py new file mode 100644 index 00000000..47020641 --- /dev/null +++ b/aurweb/archives/__init__.py @@ -0,0 +1 @@ +# aurweb.archives diff --git a/aurweb/archives/spec/__init__.py b/aurweb/archives/spec/__init__.py new file mode 100644 index 00000000..b6e376b4 --- /dev/null +++ b/aurweb/archives/spec/__init__.py @@ -0,0 +1 @@ +# aurweb.archives.spec diff --git a/aurweb/archives/spec/base.py b/aurweb/archives/spec/base.py new file mode 100644 index 00000000..60f734f2 --- /dev/null +++ b/aurweb/archives/spec/base.py @@ -0,0 +1,77 @@ +from pathlib import Path +from typing import Any, Dict, Iterable, List, Set + + +class GitInfo: + """Information about a Git repository.""" + + """ Path to Git repository. """ + path: str + + """ Local Git repository configuration. """ + config: Dict[str, Any] + + def __init__(self, path: str, config: Dict[str, Any] = dict()) -> "GitInfo": + self.path = Path(path) + self.config = config + + +class SpecOutput: + """Class used for git_archive.py output details.""" + + """ Filename relative to the Git repository root. """ + filename: Path + + """ Git repository information. """ + git_info: GitInfo + + """ Bytes bound for `SpecOutput.filename`. """ + data: bytes + + def __init__(self, filename: str, git_info: GitInfo, data: bytes) -> "SpecOutput": + self.filename = filename + self.git_info = git_info + self.data = data + + +class SpecBase: + """ + Base for Spec classes defined in git_archve.py --spec modules. + + All supported --spec modules must contain the following classes: + - Spec(SpecBase) + """ + + """ A list of SpecOutputs, each of which contain output file data. """ + outputs: List[SpecOutput] = list() + + """ A set of repositories to commit changes to. """ + repos: Set[str] = set() + + def generate(self) -> Iterable[SpecOutput]: + """ + "Pure virtual" output generator. + + `SpecBase.outputs` and `SpecBase.repos` should be populated within an + overridden version of this function in SpecBase derivatives. + """ + raise NotImplementedError() + + def add_output(self, filename: str, git_info: GitInfo, data: bytes) -> None: + """ + Add a SpecOutput instance to the set of outputs. + + :param filename: Filename relative to the git repository root + :param git_info: GitInfo instance + :param data: Binary data bound for `filename` + """ + if git_info.path not in self.repos: + self.repos.add(git_info.path) + + self.outputs.append( + SpecOutput( + filename, + git_info, + data, + ) + ) diff --git a/aurweb/archives/spec/metadata.py b/aurweb/archives/spec/metadata.py new file mode 100644 index 00000000..e7c8e096 --- /dev/null +++ b/aurweb/archives/spec/metadata.py @@ -0,0 +1,85 @@ +from typing import Iterable + +import orjson + +from aurweb import config, db +from aurweb.models import Package, PackageBase, User +from aurweb.rpc import RPC + +from .base import GitInfo, SpecBase, SpecOutput + +ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2 + + +class Spec(SpecBase): + def __init__(self) -> "Spec": + self.metadata_repo = GitInfo( + config.get("git-archive", "metadata-repo"), + ) + + def generate(self) -> Iterable[SpecOutput]: + # Base query used by the RPC. + base_query = ( + db.query(Package) + .join(PackageBase) + .join(User, PackageBase.MaintainerUID == User.ID) + ) + + # Create an instance of RPC, use it to get entities from + # our query and perform a metadata subquery for all packages. + rpc = RPC(version=5, type="info") + print("performing package database query") + packages = rpc.entities(base_query).all() + print("performing package database subqueries") + rpc.subquery({pkg.ID for pkg in packages}) + + pkgbases, pkgnames = dict(), dict() + for package in packages: + # Produce RPC type=info data for `package` + data = rpc.get_info_json_data(package) + + pkgbase_name = data.get("PackageBase") + pkgbase_data = { + "ID": data.pop("PackageBaseID"), + "URLPath": data.pop("URLPath"), + "FirstSubmitted": data.pop("FirstSubmitted"), + "LastModified": data.pop("LastModified"), + "OutOfDate": data.pop("OutOfDate"), + "Maintainer": data.pop("Maintainer"), + "Keywords": data.pop("Keywords"), + "NumVotes": data.pop("NumVotes"), + "Popularity": data.pop("Popularity"), + "PopularityUpdated": package.PopularityUpdated.timestamp(), + } + + # Store the data in `pkgbases` dict. We do this so we only + # end up processing a single `pkgbase` if repeated after + # this loop + pkgbases[pkgbase_name] = pkgbase_data + + # Remove Popularity and NumVotes from package data. + # These fields change quite often which causes git data + # modification to explode. + # data.pop("NumVotes") + # data.pop("Popularity") + + # Remove the ID key from package json. + data.pop("ID") + + # Add the `package`.Name to the pkgnames set + name = data.get("Name") + pkgnames[name] = data + + # Add metadata outputs + self.add_output( + "pkgname.json", + self.metadata_repo, + orjson.dumps(pkgnames, option=ORJSON_OPTS), + ) + self.add_output( + "pkgbase.json", + self.metadata_repo, + orjson.dumps(pkgbases, option=ORJSON_OPTS), + ) + + return self.outputs diff --git a/aurweb/archives/spec/pkgbases.py b/aurweb/archives/spec/pkgbases.py new file mode 100644 index 00000000..9f02c1c6 --- /dev/null +++ b/aurweb/archives/spec/pkgbases.py @@ -0,0 +1,32 @@ +from typing import Iterable + +import orjson + +from aurweb import config, db +from aurweb.models import PackageBase + +from .base import GitInfo, SpecBase, SpecOutput + +ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2 + + +class Spec(SpecBase): + def __init__(self) -> "Spec": + self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo")) + + def generate(self) -> Iterable[SpecOutput]: + filt = PackageBase.PackagerUID.isnot(None) + query = ( + db.query(PackageBase.Name) + .filter(filt) + .order_by(PackageBase.Name.asc()) + .all() + ) + pkgbases = [pkgbase.Name for pkgbase in query] + + self.add_output( + "pkgbase.json", + self.pkgbases_repo, + orjson.dumps(pkgbases, option=ORJSON_OPTS), + ) + return self.outputs diff --git a/aurweb/archives/spec/pkgnames.py b/aurweb/archives/spec/pkgnames.py new file mode 100644 index 00000000..c7cd9ea7 --- /dev/null +++ b/aurweb/archives/spec/pkgnames.py @@ -0,0 +1,33 @@ +from typing import Iterable + +import orjson + +from aurweb import config, db +from aurweb.models import Package, PackageBase + +from .base import GitInfo, SpecBase, SpecOutput + +ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2 + + +class Spec(SpecBase): + def __init__(self) -> "Spec": + self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo")) + + def generate(self) -> Iterable[SpecOutput]: + filt = PackageBase.PackagerUID.isnot(None) + query = ( + db.query(Package.Name) + .join(PackageBase, PackageBase.ID == Package.PackageBaseID) + .filter(filt) + .order_by(Package.Name.asc()) + .all() + ) + pkgnames = [pkg.Name for pkg in query] + + self.add_output( + "pkgname.json", + self.pkgnames_repo, + orjson.dumps(pkgnames, option=ORJSON_OPTS), + ) + return self.outputs diff --git a/aurweb/archives/spec/users.py b/aurweb/archives/spec/users.py new file mode 100644 index 00000000..80da1641 --- /dev/null +++ b/aurweb/archives/spec/users.py @@ -0,0 +1,26 @@ +from typing import Iterable + +import orjson + +from aurweb import config, db +from aurweb.models import User + +from .base import GitInfo, SpecBase, SpecOutput + +ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2 + + +class Spec(SpecBase): + def __init__(self) -> "Spec": + self.users_repo = GitInfo(config.get("git-archive", "users-repo")) + + def generate(self) -> Iterable[SpecOutput]: + query = db.query(User.Username).order_by(User.Username.asc()).all() + users = [user.Username for user in query] + + self.add_output( + "users.json", + self.users_repo, + orjson.dumps(users, option=ORJSON_OPTS), + ) + return self.outputs diff --git a/aurweb/models/package_base.py b/aurweb/models/package_base.py index bf80233d..26d9165f 100644 --- a/aurweb/models/package_base.py +++ b/aurweb/models/package_base.py @@ -64,3 +64,13 @@ class PackageBase(Base): if key in PackageBase.TO_FLOAT and not isinstance(attr, float): return float(attr) return attr + + +def popularity_decay(pkgbase: PackageBase, utcnow: int): + """Return the delta between now and the last time popularity was updated, in days""" + return int((utcnow - pkgbase.PopularityUpdated.timestamp()) / 86400) + + +def popularity(pkgbase: PackageBase, utcnow: int): + """Return up-to-date popularity""" + return float(pkgbase.Popularity) * (0.98 ** popularity_decay(pkgbase, utcnow)) diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 968135d1..46d6e2db 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -3,8 +3,9 @@ from typing import Any from fastapi import Request from sqlalchemy import and_ -from aurweb import config, db, defaults, l10n, util +from aurweb import config, db, defaults, l10n, time, util from aurweb.models import PackageBase, User +from aurweb.models.package_base import popularity from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comment import PackageComment from aurweb.models.package_request import PENDING_ID, PackageRequest @@ -81,6 +82,8 @@ def make_context( and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None)) ).count() + context["popularity"] = popularity(pkgbase, time.utcnow()) + return context diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 26677f80..515c6ffb 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -6,9 +6,10 @@ from fastapi.responses import HTMLResponse from sqlalchemy import and_, literal, orm import aurweb.config as config -from aurweb import db, defaults, models +from aurweb import db, defaults, models, time from aurweb.exceptions import RPCError from aurweb.filters import number_format +from aurweb.models.package_base import popularity from aurweb.packages.search import RPCSearch TYPE_MAPPING = { @@ -120,16 +121,15 @@ class RPC: if not args: raise RPCError("No request type/data specified.") - def _get_json_data(self, package: models.Package) -> dict[str, Any]: + def get_json_data(self, package: models.Package) -> dict[str, Any]: """Produce dictionary data of one Package that can be JSON-serialized. :param package: Package instance :returns: JSON-serializable dictionary """ - # Produce RPC API compatible Popularity: If zero, it's an integer - # 0, otherwise, it's formatted to the 6th decimal place. - pop = package.Popularity + # Normalize Popularity for RPC output to 6 decimal precision + pop = popularity(package, time.utcnow()) pop = 0 if not pop else float(number_format(pop, 6)) snapshot_uri = config.get("options", "snapshot_uri") @@ -151,8 +151,8 @@ class RPC: "LastModified": package.ModifiedTS, } - def _get_info_json_data(self, package: models.Package) -> dict[str, Any]: - data = self._get_json_data(package) + def get_info_json_data(self, package: models.Package) -> dict[str, Any]: + data = self.get_json_data(package) # All info results have _at least_ an empty list of # License and Keywords. @@ -176,7 +176,7 @@ class RPC: """ return [data_generator(pkg) for pkg in packages] - def _entities(self, query: orm.Query) -> orm.Query: + def entities(self, query: orm.Query) -> orm.Query: """Select specific RPC columns on `query`.""" return query.with_entities( models.Package.ID, @@ -188,38 +188,14 @@ class RPC: models.PackageBase.Name.label("PackageBaseName"), models.PackageBase.NumVotes, models.PackageBase.Popularity, + models.PackageBase.PopularityUpdated, models.PackageBase.OutOfDateTS, models.PackageBase.SubmittedTS, models.PackageBase.ModifiedTS, models.User.Username.label("Maintainer"), ).group_by(models.Package.ID) - def _handle_multiinfo_type( - self, args: list[str] = [], **kwargs - ) -> list[dict[str, Any]]: - self._enforce_args(args) - args = set(args) - - packages = ( - db.query(models.Package) - .join(models.PackageBase) - .join( - models.User, - models.User.ID == models.PackageBase.MaintainerUID, - isouter=True, - ) - .filter(models.Package.Name.in_(args)) - ) - - max_results = config.getint("options", "max_rpc_results") - packages = self._entities(packages).limit(max_results + 1) - - if packages.count() > max_results: - raise RPCError("Too many package results.") - - ids = {pkg.ID for pkg in packages} - - # Aliases for 80-width. + def subquery(self, ids: set[int]): Package = models.Package PackageKeyword = models.PackageKeyword @@ -311,7 +287,33 @@ class RPC: self.extra_info[record.ID][type_].append(name) - return self._assemble_json_data(packages, self._get_info_json_data) + def _handle_multiinfo_type( + self, args: list[str] = [], **kwargs + ) -> list[dict[str, Any]]: + self._enforce_args(args) + args = set(args) + + packages = ( + db.query(models.Package) + .join(models.PackageBase) + .join( + models.User, + models.User.ID == models.PackageBase.MaintainerUID, + isouter=True, + ) + .filter(models.Package.Name.in_(args)) + ) + + max_results = config.getint("options", "max_rpc_results") + packages = self.entities(packages).limit(max_results + 1) + + if packages.count() > max_results: + raise RPCError("Too many package results.") + + ids = {pkg.ID for pkg in packages} + self.subquery(ids) + + return self._assemble_json_data(packages, self.get_info_json_data) def _handle_search_type( self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = [] @@ -330,12 +332,12 @@ class RPC: search.search_by(by, arg) max_results = config.getint("options", "max_rpc_results") - results = self._entities(search.results()).limit(max_results + 1).all() + results = self.entities(search.results()).limit(max_results + 1).all() if len(results) > max_results: raise RPCError("Too many package results.") - return self._assemble_json_data(results, self._get_json_data) + return self._assemble_json_data(results, self.get_json_data) def _handle_msearch_type( self, args: list[str] = [], **kwargs diff --git a/aurweb/schema.py b/aurweb/schema.py index b3b36195..5f998ed9 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -155,6 +155,12 @@ PackageBases = Table( nullable=False, server_default=text("0"), ), + Column( + "PopularityUpdated", + TIMESTAMP, + nullable=False, + server_default=text("'1970-01-01 00:00:01.000000'"), + ), Column("OutOfDateTS", BIGINT(unsigned=True)), Column("FlaggerComment", Text, nullable=False), Column("SubmittedTS", BIGINT(unsigned=True), nullable=False), diff --git a/aurweb/scripts/git_archive.py b/aurweb/scripts/git_archive.py new file mode 100644 index 00000000..4c909c18 --- /dev/null +++ b/aurweb/scripts/git_archive.py @@ -0,0 +1,125 @@ +import argparse +import importlib +import os +import sys +import traceback +from datetime import datetime + +import orjson +import pygit2 + +from aurweb import config + +# Constants +REF = "refs/heads/master" +ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2 + + +def init_repository(git_info) -> None: + pygit2.init_repository(git_info.path) + repo = pygit2.Repository(git_info.path) + for k, v in git_info.config.items(): + repo.config[k] = v + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--spec", + type=str, + required=True, + help="name of spec module in the aurweb.archives.spec package", + ) + return parser.parse_args() + + +def update_repository(repo: pygit2.Repository): + # Use git status to determine file changes + has_changes = False + changes = repo.status() + for filepath, flags in changes.items(): + if flags != pygit2.GIT_STATUS_CURRENT: + has_changes = True + break + + if has_changes: + print("diff detected, committing") + # Add everything in the tree. + print("adding files to git tree") + + # Add the tree to staging + repo.index.read() + repo.index.add_all() + repo.index.write() + tree = repo.index.write_tree() + + # Determine base commit; if repo.head.target raises GitError, + # we have no current commits + try: + base = [repo.head.target] + except pygit2.GitError: + base = [] + + utcnow = datetime.utcnow() + author = pygit2.Signature( + config.get("git-archive", "author"), + config.get("git-archive", "author-email"), + int(utcnow.timestamp()), + 0, + ) + + # Commit the changes + timestamp = utcnow.strftime("%Y-%m-%d %H:%M:%S") + title = f"update - {timestamp}" + repo.create_commit(REF, author, author, title, tree, base) + + print("committed changes") + else: + print("no diff detected") + + +def main() -> int: + args = parse_args() + + print(f"loading '{args.spec}' spec") + spec_package = "aurweb.archives.spec" + module_path = f"{spec_package}.{args.spec}" + spec_module = importlib.import_module(module_path) + print(f"loaded '{args.spec}'") + + # Track repositories that the spec modifies. After we run + # through specs, we want to make a single commit for all + # repositories that contain changes. + repos = dict() + + print(f"running '{args.spec}' spec...") + spec = spec_module.Spec() + for output in spec.generate(): + if not os.path.exists(output.git_info.path / ".git"): + init_repository(output.git_info) + + path = output.git_info.path / output.filename + with open(path, "wb") as f: + f.write(output.data) + + if output.git_info.path not in repos: + repos[output.git_info.path] = pygit2.Repository(output.git_info.path) + + print(f"done running '{args.spec}' spec") + + print("processing repositories") + for path in spec.repos: + print(f"processing repository: {path}") + update_repository(pygit2.Repository(path)) + + return 0 + + +if __name__ == "__main__": + try: + sys.exit(main()) + except KeyboardInterrupt: + sys.exit(0) + except Exception: + traceback.print_exc() + sys.exit(1) diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 7ca171ab..bfdd12b4 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -188,6 +188,7 @@ def _main(): USERS = aurweb.config.get("mkpkglists", "userfile") bench = Benchmark() + logger.warning(f"{sys.argv[0]} is deprecated and will be soon be removed") logger.info("Started re-creating archives, wait a while...") query = ( diff --git a/aurweb/scripts/popupdate.py b/aurweb/scripts/popupdate.py index aa163be1..83506e22 100755 --- a/aurweb/scripts/popupdate.py +++ b/aurweb/scripts/popupdate.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 +from datetime import datetime from sqlalchemy import and_, func from sqlalchemy.sql.functions import coalesce, sum as _sum -from aurweb import db, time +from aurweb import config, db, time from aurweb.models import PackageBase, PackageVote @@ -46,13 +47,24 @@ def run_variable(pkgbases: list[PackageBase] = []) -> None: ids = set() if pkgbases: + # If `pkgbases` were given, we should forcefully update the given + # package base records' popularities. ids = {pkgbase.ID for pkgbase in pkgbases} query = query.filter(PackageBase.ID.in_(ids)) + else: + # Otherwise, we should only update popularities which have exceeded + # the popularity interval length. + interval = config.getint("git-archive", "popularity-interval") + query = query.filter( + PackageBase.PopularityUpdated + <= datetime.fromtimestamp((now - interval)) + ) query.update( { "NumVotes": votes_subq.scalar_subquery(), "Popularity": pop_subq.scalar_subquery(), + "PopularityUpdated": datetime.fromtimestamp(now), } ) diff --git a/aurweb/testing/git.py b/aurweb/testing/git.py index 216515c8..39af87de 100644 --- a/aurweb/testing/git.py +++ b/aurweb/testing/git.py @@ -1,6 +1,4 @@ import os -import shlex -from subprocess import PIPE, Popen from typing import Tuple import py @@ -8,6 +6,7 @@ import py from aurweb.models import Package from aurweb.templates import base_template from aurweb.testing.filelock import FileLock +from aurweb.util import shell_exec class GitRepository: @@ -24,10 +23,7 @@ class GitRepository: self.file_lock.lock(on_create=self._setup) def _exec(self, cmdline: str, cwd: str) -> Tuple[int, str, str]: - args = shlex.split(cmdline) - proc = Popen(args, cwd=cwd, stdout=PIPE, stderr=PIPE) - out, err = proc.communicate() - return (proc.returncode, out.decode().strip(), err.decode().strip()) + return shell_exec(cmdline, cwd) def _exec_repository(self, cmdline: str) -> Tuple[int, str, str]: return self._exec(cmdline, cwd=str(self.file_lock.path)) diff --git a/aurweb/util.py b/aurweb/util.py index 4f1bd64e..432b818a 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -1,6 +1,7 @@ import math import re import secrets +import shlex import string from datetime import datetime from http import HTTPStatus @@ -192,3 +193,10 @@ def parse_ssh_key(string: str) -> Tuple[str, str]: def parse_ssh_keys(string: str) -> list[Tuple[str, str]]: """Parse a list of SSH public keys.""" return [parse_ssh_key(e) for e in string.splitlines()] + + +def shell_exec(cmdline: str, cwd: str) -> Tuple[int, str, str]: + args = shlex.split(cmdline) + proc = Popen(args, cwd=cwd, stdout=PIPE, stderr=PIPE) + out, err = proc.communicate() + return (proc.returncode, out.decode().strip(), err.decode().strip()) diff --git a/conf/config.defaults b/conf/config.defaults index 722802cc..6cdffe65 100644 --- a/conf/config.defaults +++ b/conf/config.defaults @@ -131,6 +131,18 @@ packagesmetaextfile = /srv/http/aurweb/web/html/packages-meta-ext-v1.json.gz pkgbasefile = /srv/http/aurweb/web/html/pkgbase.gz userfile = /srv/http/aurweb/web/html/users.gz +[git-archive] +author = git_archive.py +author-email = no-reply@archlinux.org + +; One week worth of seconds (86400 * 7) +popularity-interval = 604800 + +metadata-repo = /srv/http/aurweb/metadata.git +users-repo = /srv/http/aurweb/users.git +pkgbases-repo = /srv/http/aurweb/pkgbases.git +pkgnames-repo = /srv/http/aurweb/pkgnames.git + [devel] ; commit_url is a format string used to produce a link to a commit hash. commit_url = https://gitlab.archlinux.org/archlinux/aurweb/-/commits/%s diff --git a/conf/config.dev b/conf/config.dev index 923c34ff..b36bfe77 100644 --- a/conf/config.dev +++ b/conf/config.dev @@ -76,5 +76,11 @@ packagesmetaextfile = /var/lib/aurweb/archives/packages-meta-ext-v1.json.gz pkgbasefile = /var/lib/aurweb/archives/pkgbase.gz userfile = /var/lib/aurweb/archives/users.gz +[git-archive] +metadata-repo = metadata.git +users-repo = users.git +pkgbases-repo = pkgbases.git +pkgnames-repo = pkgnames.git + [aurblup] db-path = YOUR_AUR_ROOT/aurblup/ diff --git a/doc/git-archive.md b/doc/git-archive.md new file mode 100644 index 00000000..cbc148b9 --- /dev/null +++ b/doc/git-archive.md @@ -0,0 +1,75 @@ +# aurweb Git Archive Specification + + + WARNING: This aurweb Git Archive implementation is + experimental and may be changed. + + +## Overview + +This git archive specification refers to the archive git repositories +created by [aurweb/scripts/git_archive.py](aurweb/scripts/git_archive.py) +using [spec modules](#spec-modules). + +## Configuration + +- `[git-archive]` + - `author` + - Git commit author + - `author-email` + - Git commit author email + +See an [official spec](#official-specs)'s documentation for spec-specific +configurations. + +## Fetch/Update Archives + +When a client has not yet fetched any initial archives, they should clone +the repository: + + $ git clone https://aur.archlinux.org/archive.git aurweb-archive + +When updating, the repository is already cloned and changes need to be pulled +from remote: + + # To update: + $ cd aurweb-archive && git pull + +For end-user production applications, see +[Minimize Disk Space](#minimize-disk-space). + +## Minimize Disk Space + +Using `git gc` on the repository will compress revisions and remove +unreachable objects which grow the repository a considerable amount +each commit. It is recommended that the following command is used +after cloning the archive or pulling updates: + + $ cd aurweb-archive && git gc --aggressive + +## Spec Modules + +Each aurweb spec module belongs to the `aurweb.archives.spec` package. For +example: a spec named "example" would be located at +`aurweb.archives.spec.example`. + +[Official spec listings](#official-specs) use the following format: + +- `spec_name` + - Spec description; what this spec produces + - `` + +### Official Specs + +- [metadata](doc/specs/metadata.md) + - Package RPC `type=info` metadata + - [metadata-repo](repos/metadata-repo.md) +- [users](doc/specs/users.md) + - List of users found in the database + - [users-repo](repos/users-repo.md) +- [pkgbases](doc/specs/pkgbases.md) + - List of package bases found in the database + - [pkgbases-repo](repos/pkgbases-repo.md) +- [pkgnames](doc/specs/pkgnames.md) + - List of package names found in the database + - [pkgnames-repo](repos/pkgnames-repo.md) diff --git a/doc/maintenance.txt b/doc/maintenance.txt index c52cf76f..56616f79 100644 --- a/doc/maintenance.txt +++ b/doc/maintenance.txt @@ -70,20 +70,48 @@ computations and clean up the database: * aurweb-pkgmaint automatically removes empty repositories that were created within the last 24 hours but never populated. -* aurweb-mkpkglists generates the package list files; it takes an optional - --extended flag, which additionally produces multiinfo metadata. It also - generates {archive.gz}.sha256 files that should be located within +* [Deprecated] aurweb-mkpkglists generates the package list files; it takes + an optional --extended flag, which additionally produces multiinfo metadata. + It also generates {archive.gz}.sha256 files that should be located within mkpkglists.archivedir which contain a SHA-256 hash of their matching .gz counterpart. * aurweb-usermaint removes the last login IP address of all users that did not login within the past seven days. +* aurweb-git-archive generates Git repository archives based on a --spec. + This script is a new generation of aurweb-mkpkglists, which creates and + maintains Git repository versions of the archives produced by + aurweb-mkpkglists. See doc/git-archive.md for detailed documentation. + These scripts can be installed by running `poetry install` and are usually scheduled using Cron. The current setup is: ---- -*/5 * * * * poetry run aurweb-mkpkglists [--extended] +# Run aurweb-git-archive --spec metadata directly after +# aurweb-mkpkglists so that they are executed sequentially, since +# both scripts are quite heavy. `aurweb-mkpkglists` should be removed +# from here once its deprecation period has ended. +*/5 * * * * poetry run aurweb-mkpkglists [--extended] && poetry run aurweb-git-archive --spec metadata + +# Update popularity once an hour. This is done to reduce the amount +# of changes caused by popularity data. Even if a package is otherwise +# unchanged, popularity is recalculated every 5 minutes via aurweb-popupdate, +# which causes changes for a large chunk of packages. +# +# At this interval, clients can still take advantage of popularity +# data, but its updates are guarded behind hour-long intervals. +*/60 * * * * poetry run aurweb-git-archive --spec popularity + +# Usernames +*/5 * * * * poetry run aurweb-git-archive --spec users + +# Package base names +*/5 * * * * poetry run aurweb-git-archive --spec pkgbases + +# Package names +*/5 * * * * poetry run aurweb-git-archive --spec pkgnames + 1 */2 * * * poetry run aurweb-popupdate 2 */2 * * * poetry run aurweb-aurblup 3 */2 * * * poetry run aurweb-pkgmaint diff --git a/doc/repos/metadata-repo.md b/doc/repos/metadata-repo.md new file mode 100644 index 00000000..cc678f40 --- /dev/null +++ b/doc/repos/metadata-repo.md @@ -0,0 +1,121 @@ +# Repository: metadata-repo + +## Overview + +The resulting repository contains RPC `type=info` JSON data for packages, +split into two different files: + +- `pkgbase.json` contains details about each package base in the AUR +- `pkgname.json` contains details about each package in the AUR + +See [Data](#data) for a breakdown of how data is presented in this +repository based off of a RPC `type=info` base. + +See [File Layout](#file-layout) for a detailed summary of the layout +of these files and the data contained within. + +**NOTE: `Popularity` now requires a client-side calculation, see [Popularity Calculation](#popularity-calculation).** + +## Data + +This repository contains RPC `type=info` data for all packages found +in AUR's database, reorganized to be suitable for Git repository +changes. + +- `pkgname.json` holds Package-specific metadata + - Some fields have been removed from `pkgname.json` objects + - `ID` + - `PackageBaseID -> ID` (moved to `pkgbase.json`) + - `NumVotes` (moved to `pkgbase.json`) + - `Popularity` (moved to `pkgbase.json`) +- `pkgbase.json` holds PackageBase-specific metadata + - Package Base fields from `pkgname.json` have been moved over to + `pkgbase.json` + - `ID` + - `Keywords` + - `FirstSubmitted` + - `LastModified` + - `OutOfDate` + - `Maintainer` + - `URLPath` + - `NumVotes` + - `Popularity` + - `PopularityUpdated` + +## Popularity Calculation + +Clients intending to use popularity data from this archive **must** +perform a decay calculation on their end to reflect a close approximation +of up-to-date popularity. + +Putting this step onto the client allows the server to maintain +less popularity record updates, dramatically improving archiving +of popularity data. The same calculation is done on the server-side +when producing outputs for RPC `type=info` and package pages. + +``` +Let T = Current UTC timestamp in seconds +Let PU = PopularityUpdated timestamp in seconds + +# The delta between now and PU in days +Let D = (T - PU) / 86400 + +# Calculate up-to-date popularity: +P = Popularity * (0.98^D) +``` + +We can see that the resulting up-to-date popularity value decays as +the exponent is increased: +- `1.0 * (0.98^1) = 0.98` +- `1.0 * (0.98^2) = 0.96039999` +- ... + +This decay calculation is essentially pushing back the date found for +votes by the exponent, which takes into account the time-factor. However, +since this calculation is based off of decimals and exponents, it +eventually becomes imprecise. The AUR updates these records on a forced +interval and whenever a vote is added to or removed from a particular package +to avoid imprecision from being an issue for clients + +## File Layout + +#### pkgbase.json: + + { + "pkgbase1": { + "FirstSubmitted": 123456, + "ID": 1, + "LastModified": 123456, + "Maintainer": "kevr", + "OutOfDate": null, + "URLPath": "/cgit/aur.git/snapshot/pkgbase1.tar.gz", + "NumVotes": 1, + "Popularity": 1.0, + "PopularityUpdated": 12345567753.0 + }, + ... + } + +#### pkgname.json: + + { + "pkg1": { + "CheckDepends": [], # Only included if a check dependency exists + "Conflicts": [], # Only included if a conflict exists + "Depends": [], # Only included if a dependency exists + "Description": "some description", + "Groups": [], # Only included if a group exists + "ID": 1, + "Keywords": [], + "License": [], + "MakeDepends": [], # Only included if a make dependency exists + "Name": "pkg1", + "OptDepends": [], # Only included if an opt dependency exists + "PackageBase": "pkgbase1", + "Provides": [], # Only included if `provides` is defined + "Replaces": [], # Only included if `replaces` is defined + "URL": "https://some_url.com", + "Version": "1.0-1" + }, + ... + } diff --git a/doc/repos/pkgbases-repo.md b/doc/repos/pkgbases-repo.md new file mode 100644 index 00000000..f4cb896f --- /dev/null +++ b/doc/repos/pkgbases-repo.md @@ -0,0 +1,15 @@ +# Repository: pkgbases-repo + +## Overview + +- `pkgbase.json` contains a list of package base names + +## File Layout + +### pkgbase.json: + + [ + "pkgbase1", + "pkgbase2", + ... + ] diff --git a/doc/repos/pkgnames-repo.md b/doc/repos/pkgnames-repo.md new file mode 100644 index 00000000..ae6fb4ed --- /dev/null +++ b/doc/repos/pkgnames-repo.md @@ -0,0 +1,15 @@ +# Repository: pkgnames-repo + +## Overview + +- `pkgname.json` contains a list of package names + +## File Layout + +### pkgname.json: + + [ + "pkgname1", + "pkgname2", + ... + ] diff --git a/doc/repos/users-repo.md b/doc/repos/users-repo.md new file mode 100644 index 00000000..23db9cfb --- /dev/null +++ b/doc/repos/users-repo.md @@ -0,0 +1,15 @@ +# Repository: users-repo + +## Overview + +- `users.json` contains a list of usernames + +## File Layout + +### users.json: + + [ + "user1", + "user2", + ... + ] diff --git a/doc/specs/metadata.md b/doc/specs/metadata.md new file mode 100644 index 00000000..282c0dd5 --- /dev/null +++ b/doc/specs/metadata.md @@ -0,0 +1,14 @@ +# Git Archive Spec: metadata + +## Configuration + +- `[git-archive]` + - `metadata-repo` + - Path to package metadata git repository location + +## Repositories + +For documentation on each one of these repositories, follow their link, +which brings you to a topical markdown for that repository. + +- [metadata-repo](doc/repos/metadata-repo.md) diff --git a/doc/specs/pkgbases.md b/doc/specs/pkgbases.md new file mode 100644 index 00000000..80279070 --- /dev/null +++ b/doc/specs/pkgbases.md @@ -0,0 +1,14 @@ +# Git Archive Spec: pkgbases + +## Configuration + +- `[git-archive]` + - `pkgbases-repo` + - Path to pkgbases git repository location + +## Repositories + +For documentation on each one of these repositories, follow their link, +which brings you to a topical markdown for that repository. + +- [pkgbases-repo](doc/repos/pkgbases-repo.md) diff --git a/doc/specs/pkgnames.md b/doc/specs/pkgnames.md new file mode 100644 index 00000000..0a4a907d --- /dev/null +++ b/doc/specs/pkgnames.md @@ -0,0 +1,14 @@ +# Git Archive Spec: pkgnames + +## Configuration + +- `[git-archive]` + - `pkgnames-repo` + - Path to pkgnames git repository location + +## Repositories + +For documentation on each one of these repositories, follow their link, +which brings you to a topical markdown for that repository. + +- [pkgnames-repo](doc/repos/pkgnames-repo.md) diff --git a/doc/specs/popularity.md b/doc/specs/popularity.md new file mode 100644 index 00000000..3084f458 --- /dev/null +++ b/doc/specs/popularity.md @@ -0,0 +1,14 @@ +# Git Archive Spec: popularity + +## Configuration + +- `[git-archive]` + - `popularity-repo` + - Path to popularity git repository location + +## Repositories + +For documentation on each one of these repositories, follow their link, +which brings you to a topical markdown for that repository. + +- [popularity-repo](doc/repos/popularity-repo.md) diff --git a/doc/specs/users.md b/doc/specs/users.md new file mode 100644 index 00000000..25396154 --- /dev/null +++ b/doc/specs/users.md @@ -0,0 +1,14 @@ +# Git Archive Spec: users + +## Configuration + +- `[git-archive]` + - `users-repo` + - Path to users git repository location + +## Repositories + +For documentation on each one of these repositories, follow their link, +which brings you to a topical markdown for that repository. + +- [users-repo](doc/repos/users-repo.md) diff --git a/migrations/versions/6441d3b65270_add_popularityupdated_to_packagebase.py b/migrations/versions/6441d3b65270_add_popularityupdated_to_packagebase.py new file mode 100644 index 00000000..afa87687 --- /dev/null +++ b/migrations/versions/6441d3b65270_add_popularityupdated_to_packagebase.py @@ -0,0 +1,33 @@ +"""add PopularityUpdated to PackageBase + +Revision ID: 6441d3b65270 +Revises: d64e5571bc8d +Create Date: 2022-09-22 18:08:03.280664 + +""" +from alembic import op +from sqlalchemy.exc import OperationalError + +from aurweb.models.package_base import PackageBase +from aurweb.scripts import popupdate + +# revision identifiers, used by Alembic. +revision = "6441d3b65270" +down_revision = "d64e5571bc8d" +branch_labels = None +depends_on = None + +table = PackageBase.__table__ + + +def upgrade(): + try: + op.add_column(table.name, table.c.PopularityUpdated) + except OperationalError: + print(f"table '{table.name}' already exists, skipping migration") + + popupdate.run_variable() + + +def downgrade(): + op.drop_column(table.name, "PopularityUpdated") diff --git a/pyproject.toml b/pyproject.toml index f732f2e7..775ece09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -117,3 +117,4 @@ aurweb-tuvotereminder = "aurweb.scripts.tuvotereminder:main" aurweb-usermaint = "aurweb.scripts.usermaint:main" aurweb-config = "aurweb.scripts.config:main" aurweb-adduser = "aurweb.scripts.adduser:main" +aurweb-git-archive = "aurweb.scripts.git_archive:main" diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index 86bc1de5..8ecf9bd8 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -149,7 +149,7 @@ - + diff --git a/test/test_git_archives.py b/test/test_git_archives.py new file mode 100644 index 00000000..8ee4c2ba --- /dev/null +++ b/test/test_git_archives.py @@ -0,0 +1,241 @@ +from http import HTTPStatus +from typing import Tuple +from unittest import mock + +import py +import pygit2 +import pytest +from fastapi.testclient import TestClient + +from aurweb import asgi, config, db +from aurweb.archives.spec.base import GitInfo, SpecBase +from aurweb.models import Package, PackageBase, User +from aurweb.scripts import git_archive +from aurweb.testing.requests import Request + + +@pytest.fixture +def mock_metadata_archive( + tmp_path: py.path.local, +) -> Tuple[py.path.local, py.path.local]: + metadata_path = tmp_path / "metadata.git" + + get_ = config.get + + def mock_config(section: str, option: str) -> str: + if section == "git-archive": + if option == "metadata-repo": + return str(metadata_path) + return get_(section, option) + + with mock.patch("aurweb.config.get", side_effect=mock_config): + yield metadata_path + + +@pytest.fixture +def mock_users_archive(tmp_path: py.path.local) -> py.path.local: + users_path = tmp_path / "users.git" + + get_ = config.get + + def mock_config(section: str, option: str) -> str: + if section == "git-archive": + if option == "users-repo": + return str(users_path) + return get_(section, option) + + with mock.patch("aurweb.config.get", side_effect=mock_config): + yield users_path + + +@pytest.fixture +def mock_pkgbases_archive(tmp_path: py.path.local) -> py.path.local: + pkgbases_path = tmp_path / "pkgbases.git" + + get_ = config.get + + def mock_config(section: str, option: str) -> str: + if section == "git-archive": + if option == "pkgbases-repo": + return str(pkgbases_path) + return get_(section, option) + + with mock.patch("aurweb.config.get", side_effect=mock_config): + yield pkgbases_path + + +@pytest.fixture +def mock_pkgnames_archive(tmp_path: py.path.local) -> py.path.local: + pkgnames_path = tmp_path / "pkgnames.git" + + get_ = config.get + + def mock_config(section: str, option: str) -> str: + if section == "git-archive": + if option == "pkgnames-repo": + return str(pkgnames_path) + return get_(section, option) + + with mock.patch("aurweb.config.get", side_effect=mock_config): + yield pkgnames_path + + +@pytest.fixture +def metadata(mock_metadata_archive: py.path.local) -> py.path.local: + args = [__name__, "--spec", "metadata"] + with mock.patch("sys.argv", args): + yield mock_metadata_archive + + +@pytest.fixture +def users(mock_users_archive: py.path.local) -> py.path.local: + args = [__name__, "--spec", "users"] + with mock.patch("sys.argv", args): + yield mock_users_archive + + +@pytest.fixture +def pkgbases(mock_pkgbases_archive: py.path.local) -> py.path.local: + args = [__name__, "--spec", "pkgbases"] + with mock.patch("sys.argv", args): + yield mock_pkgbases_archive + + +@pytest.fixture +def pkgnames(mock_pkgnames_archive: py.path.local) -> py.path.local: + args = [__name__, "--spec", "pkgnames"] + with mock.patch("sys.argv", args): + yield mock_pkgnames_archive + + +@pytest.fixture +def client() -> TestClient: + yield TestClient(app=asgi.app) + + +@pytest.fixture +def user(db_test: None) -> User: + with db.begin(): + user_ = db.create( + User, + Username="test", + Email="test@example.org", + Passwd="testPassword", + ) + + yield user_ + + +@pytest.fixture +def package(user: User) -> Package: + with db.begin(): + pkgbase_ = db.create( + PackageBase, + Name="test", + Maintainer=user, + Packager=user, + ) + + pkg_ = db.create( + Package, + PackageBase=pkgbase_, + Name="test", + ) + + yield pkg_ + + +def commit_count(repo: pygit2.Repository) -> int: + commits = 0 + for _ in repo.walk(repo.head.target): + commits += 1 + return commits + + +def test_specbase_raises_notimplementederror(): + spec = SpecBase() + with pytest.raises(NotImplementedError): + spec.generate() + + +def test_gitinfo_config(tmpdir: py.path.local): + path = tmpdir / "test.git" + git_info = GitInfo(path, {"user.name": "Test Person"}) + git_archive.init_repository(git_info) + + repo = pygit2.Repository(path) + assert repo.config["user.name"] == "Test Person" + + +def test_metadata(metadata: py.path.local, package: Package): + # Run main(), which creates mock_metadata_archive and commits current + # package data to it, exercising the "diff detected, committing" path + assert git_archive.main() == 0 + repo = pygit2.Repository(metadata) + assert commit_count(repo) == 1 + + # Run main() again to exercise the "no diff detected" path + assert git_archive.main() == 0 + repo = pygit2.Repository(metadata) + assert commit_count(repo) == 1 + + +def test_metadata_change( + client: TestClient, metadata: py.path.local, user: User, package: Package +): + """Test that metadata changes via aurweb cause git_archive to produce diffs.""" + # Run main(), which creates mock_metadata_archive and commits current + # package data to it, exercising the "diff detected, committing" path + assert git_archive.main() == 0 + repo = pygit2.Repository(metadata) + assert commit_count(repo) == 1 + + # Now, we modify `package`-related metadata via aurweb POST. + pkgbasename = package.PackageBase.Name + cookies = {"AURSID": user.login(Request(), "testPassword")} + + with client as request: + endp = f"/pkgbase/{pkgbasename}/keywords" + post_data = {"keywords": "abc def"} + resp = request.post(endp, data=post_data, cookies=cookies, allow_redirects=True) + assert resp.status_code == HTTPStatus.OK + + # Run main() again, which should now produce a new commit with the + # keyword changes we just made + assert git_archive.main() == 0 + repo = pygit2.Repository(metadata) + assert commit_count(repo) == 2 + + +def test_metadata_delete(client: TestClient, metadata: py.path.local, package: Package): + # Run main(), which creates mock_metadata_archive and commits current + # package data to it, exercising the "diff detected, committing" path + assert git_archive.main() == 0 + repo = pygit2.Repository(metadata) + assert commit_count(repo) == 1 + + with db.begin(): + db.delete(package) + + # The deletion here should have caused a diff to be produced in git + assert git_archive.main() == 0 + repo = pygit2.Repository(metadata) + assert commit_count(repo) == 2 + + +def test_users(users: py.path.local, user: User): + assert git_archive.main() == 0 + repo = pygit2.Repository(users) + assert commit_count(repo) == 1 + + +def test_pkgbases(pkgbases: py.path.local, package: Package): + assert git_archive.main() == 0 + repo = pygit2.Repository(pkgbases) + assert commit_count(repo) == 1 + + +def test_pkgnames(pkgnames: py.path.local, package: Package): + assert git_archive.main() == 0 + repo = pygit2.Repository(pkgnames) + assert commit_count(repo) == 1 diff --git a/test/test_templates.py b/test/test_templates.py index f80e68eb..2ff31fc9 100644 --- a/test/test_templates.py +++ b/test/test_templates.py @@ -9,6 +9,7 @@ from aurweb.filters import as_timezone, number_format, timestamp_to_datetime as from aurweb.models import Package, PackageBase, User from aurweb.models.account_type import USER_ID from aurweb.models.license import License +from aurweb.models.package_base import popularity from aurweb.models.package_license import PackageLicense from aurweb.models.package_relation import PackageRelation from aurweb.models.relation_type import PROVIDES_ID, REPLACES_ID @@ -287,12 +288,14 @@ def test_package_details(user: User, package: Package): """Test package details with most fields populated, but not all.""" request = Request(user=user, authenticated=True) context = make_context(request, "Test Details") + context.update( { "request": request, "git_clone_uri_anon": GIT_CLONE_URI_ANON, "git_clone_uri_priv": GIT_CLONE_URI_PRIV, "pkgbase": package.PackageBase, + "popularity": popularity(package.PackageBase, time.utcnow()), "package": package, "comaintainers": [], } @@ -329,6 +332,7 @@ def test_package_details_filled(user: User, package: Package): "git_clone_uri_anon": GIT_CLONE_URI_ANON, "git_clone_uri_priv": GIT_CLONE_URI_PRIV, "pkgbase": package.PackageBase, + "popularity": popularity(package.PackageBase, time.utcnow()), "package": package, "comaintainers": [], "licenses": package.package_licenses, From 137644e9192c8421b0a78a1b955910eed09e9276 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Sun, 25 Sep 2022 10:03:05 +0200 Subject: [PATCH 124/415] docs: suggest shallow clone in git-archive.md we should be suggesting to make a shallow clone to reduce the amount of data that is being transferred initially Signed-off-by: moson-mo --- doc/git-archive.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/git-archive.md b/doc/git-archive.md index cbc148b9..d7c80f76 100644 --- a/doc/git-archive.md +++ b/doc/git-archive.md @@ -24,10 +24,10 @@ configurations. ## Fetch/Update Archives -When a client has not yet fetched any initial archives, they should clone -the repository: +When a client has not yet fetched any initial archives, they should +shallow-clone the repository: - $ git clone https://aur.archlinux.org/archive.git aurweb-archive + $ git clone --depth=1 https://aur.archlinux.org/archive.git aurweb-archive When updating, the repository is already cloned and changes need to be pulled from remote: From 0dddaeeb98ea13dfa10a0462af178dc50481333f Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Fri, 23 Sep 2022 13:31:50 +0100 Subject: [PATCH 125/415] fix: remove sessions of suspended users Fixes: #394 Signed-off-by: Leonidas Spyropoulos --- aurweb/routers/accounts.py | 2 ++ aurweb/users/update.py | 16 +++++++++ test/test_accounts_routes.py | 70 +++++++++++++++++++++++++++++++----- 3 files changed, 80 insertions(+), 8 deletions(-) diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index 3937757a..524ef814 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -412,6 +412,7 @@ async def account_edit_post( TZ: str = Form(aurweb.config.get("options", "default_timezone")), P: str = Form(default=str()), # New Password C: str = Form(default=None), # Password Confirm + S: bool = Form(default=False), # Suspended PK: str = Form(default=None), # PubKey CN: bool = Form(default=False), # Comment Notify UN: bool = Form(default=False), # Update Notify @@ -455,6 +456,7 @@ async def account_edit_post( update.ssh_pubkey, update.account_type, update.password, + update.suspend, ] # These update functions are all guarded by retry_deadlock; diff --git a/aurweb/users/update.py b/aurweb/users/update.py index 6bd4a295..df41f843 100644 --- a/aurweb/users/update.py +++ b/aurweb/users/update.py @@ -134,3 +134,19 @@ def password( # If the target user is the request user, login with # the updated password to update the Session record. user.login(request, P, cookies.timeout(remember_me)) + + +@db.retry_deadlock +def suspend( + S: bool = False, + request: Request = None, + user: models.User = None, + context: dict[str, Any] = {}, + **kwargs, +) -> None: + if S and user.session: + context["S"] = None + with db.begin(): + db.delete_all( + db.query(models.Session).filter(models.Session.UsersID == user.ID) + ) diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index eab8fa4f..b6dce19e 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -9,6 +9,7 @@ import lxml.html import pytest from fastapi.testclient import TestClient +import aurweb.config import aurweb.models.account_type as at from aurweb import captcha, db, logging, time from aurweb.asgi import app @@ -35,6 +36,9 @@ logger = logging.get_logger(__name__) # Some test global constants. TEST_USERNAME = "test" TEST_EMAIL = "test@example.org" +TEST_REFERER = { + "referer": aurweb.config.get("options", "aur_location") + "/login", +} def make_ssh_pubkey(): @@ -61,7 +65,12 @@ def setup(db_test): @pytest.fixture def client() -> TestClient: - yield TestClient(app=app) + client = TestClient(app=app) + + # Necessary for forged login CSRF protection on the login route. Set here + # instead of only on the necessary requests for convenience. + client.headers.update(TEST_REFERER) + yield client def create_user(username: str) -> User: @@ -1003,13 +1012,8 @@ def test_post_account_edit_suspended(client: TestClient, user: User): # Make sure the user record got updated correctly. assert user.Suspended - - post_data.update({"S": False}) - with client as request: - resp = request.post(endpoint, data=post_data, cookies=cookies) - assert resp.status_code == int(HTTPStatus.OK) - - assert not user.Suspended + # Let's make sure the DB got updated properly. + assert user.session is None def test_post_account_edit_error_unauthorized(client: TestClient, user: User): @@ -1262,6 +1266,56 @@ def test_post_account_edit_other_user_type_as_tu( assert expected in caplog.text +def test_post_account_edit_other_user_suspend_as_tu(client: TestClient, tu_user: User): + with db.begin(): + user = create_user("test3") + # Create a session for user + sid = user.login(Request(), "testPassword") + assert sid is not None + + # `user` needs its own TestClient, to keep its AURSID cookies + # apart from `tu_user`s during our testing. + user_client = TestClient(app=app) + user_client.headers.update(TEST_REFERER) + + # Test that `user` can view their account edit page while logged in. + user_cookies = {"AURSID": sid} + with client as request: + endpoint = f"/account/{user.Username}/edit" + resp = request.get(endpoint, cookies=user_cookies, allow_redirects=False) + assert resp.status_code == HTTPStatus.OK + + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + assert cookies is not None # This is useless, we create the dict here ^ + # As a TU, we can see the Account for other users. + with client as request: + resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + assert resp.status_code == int(HTTPStatus.OK) + # As a TU, we can modify other user's account types. + data = { + "U": user.Username, + "E": user.Email, + "S": True, + "passwd": "testPassword", + } + with client as request: + resp = request.post(endpoint, data=data, cookies=cookies) + assert resp.status_code == int(HTTPStatus.OK) + + # Test that `user` no longer has a session. + with user_client as request: + resp = request.get(endpoint, cookies=user_cookies, allow_redirects=False) + assert resp.status_code == HTTPStatus.SEE_OTHER + + # Since user is now suspended, they should not be able to login. + data = {"user": user.Username, "passwd": "testPassword", "next": "/"} + with user_client as request: + resp = request.post("/login", data=data) + assert resp.status_code == HTTPStatus.OK + errors = get_errors(resp.text) + assert errors[0].text.strip() == "Account Suspended" + + def test_post_account_edit_other_user_type_as_tu_invalid_type( client: TestClient, tu_user: User, caplog: pytest.LogCaptureFixture ): From e00b0059f75cb467d4eeab7fb8f8332bbc67288d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 26 Sep 2022 01:27:37 -0700 Subject: [PATCH 126/415] doc: remove --spec popularity from cron recommendations Signed-off-by: Kevin Morris --- doc/maintenance.txt | 9 --------- 1 file changed, 9 deletions(-) diff --git a/doc/maintenance.txt b/doc/maintenance.txt index 56616f79..dacf2b60 100644 --- a/doc/maintenance.txt +++ b/doc/maintenance.txt @@ -94,15 +94,6 @@ usually scheduled using Cron. The current setup is: # from here once its deprecation period has ended. */5 * * * * poetry run aurweb-mkpkglists [--extended] && poetry run aurweb-git-archive --spec metadata -# Update popularity once an hour. This is done to reduce the amount -# of changes caused by popularity data. Even if a package is otherwise -# unchanged, popularity is recalculated every 5 minutes via aurweb-popupdate, -# which causes changes for a large chunk of packages. -# -# At this interval, clients can still take advantage of popularity -# data, but its updates are guarded behind hour-long intervals. -*/60 * * * * poetry run aurweb-git-archive --spec popularity - # Usernames */5 * * * * poetry run aurweb-git-archive --spec users From eb0c5605e491d51ee1ade8431934bad78f7b141e Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 26 Sep 2022 01:28:38 -0700 Subject: [PATCH 127/415] upgrade: bump version to v6.1.5 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index c1f87984..83b965e3 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.4" +AURWEB_VERSION = "v6.1.5" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 775ece09..46d8806f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.4" +version = "v6.1.5" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 8657fd336e4c47dce3eaf78988944658f85bd64e Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Thu, 29 Sep 2022 17:43:26 -0700 Subject: [PATCH 128/415] feat: GET|POST /account/{name}/delete Closes #348 Signed-off-by: Kevin Morris --- aurweb/models/package_vote.py | 2 +- aurweb/models/session.py | 2 +- aurweb/routers/accounts.py | 76 ++++++++++++++++++++++++- po/aurweb.pot | 4 ++ templates/account/delete.html | 43 ++++++++++++++ test/test_accounts_routes.py | 103 ++++++++++++++++++++++++++++++++++ 6 files changed, 226 insertions(+), 4 deletions(-) create mode 100644 templates/account/delete.html diff --git a/aurweb/models/package_vote.py b/aurweb/models/package_vote.py index fa769bb6..b9e233d9 100644 --- a/aurweb/models/package_vote.py +++ b/aurweb/models/package_vote.py @@ -14,7 +14,7 @@ class PackageVote(Base): User = relationship( _User, - backref=backref("package_votes", lazy="dynamic"), + backref=backref("package_votes", lazy="dynamic", cascade="all, delete"), foreign_keys=[__table__.c.UsersID], ) diff --git a/aurweb/models/session.py b/aurweb/models/session.py index d3d69f8c..ff97f017 100644 --- a/aurweb/models/session.py +++ b/aurweb/models/session.py @@ -13,7 +13,7 @@ class Session(Base): User = relationship( _User, - backref=backref("session", uselist=False), + backref=backref("session", cascade="all, delete", uselist=False), foreign_keys=[__table__.c.UsersID], ) diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index 524ef814..12e59b30 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -3,13 +3,13 @@ import typing from http import HTTPStatus from typing import Any -from fastapi import APIRouter, Form, Request +from fastapi import APIRouter, Form, HTTPException, Request from fastapi.responses import HTMLResponse, RedirectResponse from sqlalchemy import and_, or_ import aurweb.config from aurweb import cookies, db, l10n, logging, models, util -from aurweb.auth import account_type_required, requires_auth, requires_guest +from aurweb.auth import account_type_required, creds, requires_auth, requires_guest from aurweb.captcha import get_captcha_salts from aurweb.exceptions import ValidationError, handle_form_exceptions from aurweb.l10n import get_translator_for_request @@ -598,6 +598,78 @@ async def accounts_post( return render_template(request, "account/index.html", context) +@router.get("/account/{name}/delete") +@requires_auth +async def account_delete(request: Request, name: str): + user = db.query(models.User).filter(models.User.Username == name).first() + if not user: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND) + + has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user]) + if not has_cred: + _ = l10n.get_translator_for_request(request) + raise HTTPException( + detail=_("You do not have permission to edit this account."), + status_code=HTTPStatus.UNAUTHORIZED, + ) + + context = make_context(request, "Accounts") + context["name"] = name + return render_template(request, "account/delete.html", context) + + +@db.async_retry_deadlock +@router.post("/account/{name}/delete") +@handle_form_exceptions +@requires_auth +async def account_delete_post( + request: Request, + name: str, + passwd: str = Form(default=str()), + confirm: bool = Form(default=False), +): + user = db.query(models.User).filter(models.User.Username == name).first() + if not user: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND) + + has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user]) + if not has_cred: + _ = l10n.get_translator_for_request(request) + raise HTTPException( + detail=_("You do not have permission to edit this account."), + status_code=HTTPStatus.UNAUTHORIZED, + ) + + context = make_context(request, "Accounts") + context["name"] = name + + confirm = util.strtobool(confirm) + if not confirm: + context["errors"] = [ + "The account has not been deleted, check the confirmation checkbox." + ] + return render_template( + request, + "account/delete.html", + context, + status_code=HTTPStatus.BAD_REQUEST, + ) + + if not request.user.valid_password(passwd): + context["errors"] = ["Invalid password."] + return render_template( + request, + "account/delete.html", + context, + status_code=HTTPStatus.BAD_REQUEST, + ) + + with db.begin(): + db.delete(user) + + return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) + + def render_terms_of_service(request: Request, context: dict, terms: typing.Iterable): if not terms: return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) diff --git a/po/aurweb.pot b/po/aurweb.pot index bc4bab84..1838fae5 100644 --- a/po/aurweb.pot +++ b/po/aurweb.pot @@ -2346,3 +2346,7 @@ msgstr "" #: templates/partials/packages/package_metadata.html msgid "dependencies" msgstr "" + +#: aurweb/routers/accounts.py +msgid "The account has not been deleted, check the confirmation checkbox." +msgstr "" diff --git a/templates/account/delete.html b/templates/account/delete.html new file mode 100644 index 00000000..625d3c2d --- /dev/null +++ b/templates/account/delete.html @@ -0,0 +1,43 @@ +{% extends "partials/layout.html" %} + +{% block pageContent %} +
    +

    {{ "Accounts" | tr }}

    + + {% include "partials/error.html" %} + +

    + {{ + "You can use this form to permanently delete the AUR account %s%s%s." + | tr | format("", name, "") | safe + }} +

    + +

    + {{ + "%sWARNING%s: This action cannot be undone." + | tr | format("", "") | safe + }} +

    + + +
    +
    +

    + + +

    +

    + +

    +

    + +

    +
    + + +
    +{% endblock %} diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index b6dce19e..f4034a9a 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -1949,3 +1949,106 @@ def test_accounts_unauthorized(client: TestClient, user: User): resp = request.get("/accounts", cookies=cookies, allow_redirects=False) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" + + +def test_account_delete_self_unauthorized(client: TestClient, tu_user: User): + with db.begin(): + user = create_user("some_user") + user2 = create_user("user2") + + cookies = {"AURSID": user.login(Request(), "testPassword")} + endpoint = f"/account/{user2.Username}/delete" + with client as request: + resp = request.get(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.UNAUTHORIZED + + resp = request.post(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.UNAUTHORIZED + + # But a TU does have access + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + with TestClient(app=app) as request: + resp = request.get(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.OK + + +def test_account_delete_self_not_found(client: TestClient, user: User): + cookies = {"AURSID": user.login(Request(), "testPassword")} + endpoint = "/account/non-existent-user/delete" + with client as request: + resp = request.get(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.NOT_FOUND + + resp = request.post(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.NOT_FOUND + + +def test_account_delete_self(client: TestClient, user: User): + username = user.Username + + # Confirm that we can view our own account deletion page + cookies = {"AURSID": user.login(Request(), "testPassword")} + endpoint = f"/account/{username}/delete" + with client as request: + resp = request.get(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.OK + + # The checkbox must be checked + with client as request: + resp = request.post( + endpoint, + data={"passwd": "fakePassword", "confirm": False}, + cookies=cookies, + ) + assert resp.status_code == HTTPStatus.BAD_REQUEST + errors = get_errors(resp.text) + assert ( + errors[0].text.strip() + == "The account has not been deleted, check the confirmation checkbox." + ) + + # The correct password must be supplied + with client as request: + resp = request.post( + endpoint, + data={"passwd": "fakePassword", "confirm": True}, + cookies=cookies, + ) + assert resp.status_code == HTTPStatus.BAD_REQUEST + errors = get_errors(resp.text) + assert errors[0].text.strip() == "Invalid password." + + # Supply everything correctly and delete ourselves + with client as request: + resp = request.post( + endpoint, + data={"passwd": "testPassword", "confirm": True}, + cookies=cookies, + ) + assert resp.status_code == HTTPStatus.SEE_OTHER + + # Check that our User record no longer exists in the database + record = db.query(User).filter(User.Username == username).first() + assert record is None + + +def test_account_delete_as_tu(client: TestClient, tu_user: User): + with db.begin(): + user = create_user("user2") + + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + username = user.Username + endpoint = f"/account/{username}/delete" + + # Delete the user + with client as request: + resp = request.post( + endpoint, + data={"passwd": "testPassword", "confirm": True}, + cookies=cookies, + ) + assert resp.status_code == HTTPStatus.SEE_OTHER + + # Check that our User record no longer exists in the database + record = db.query(User).filter(User.Username == username).first() + assert record is None From 3ae6323a7ccf9d2637255c522e0ff8371f7ace20 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Fri, 30 Sep 2022 05:19:58 -0700 Subject: [PATCH 129/415] upgrade: bump to v6.1.6 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 83b965e3..c9f36e51 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.5" +AURWEB_VERSION = "v6.1.6" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 46d8806f..77d136db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.5" +version = "v6.1.6" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 18f5e142b9180648763c5513e2f123dbcfde67b4 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 11 Oct 2022 14:50:09 -0700 Subject: [PATCH 130/415] fix: include orphaned packages in metadata output Signed-off-by: Kevin Morris --- aurweb/archives/spec/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aurweb/archives/spec/metadata.py b/aurweb/archives/spec/metadata.py index e7c8e096..ce7c6f30 100644 --- a/aurweb/archives/spec/metadata.py +++ b/aurweb/archives/spec/metadata.py @@ -22,7 +22,7 @@ class Spec(SpecBase): base_query = ( db.query(Package) .join(PackageBase) - .join(User, PackageBase.MaintainerUID == User.ID) + .join(User, PackageBase.MaintainerUID == User.ID, isouter=True) ) # Create an instance of RPC, use it to get entities from From da5a646a731eab817d6bc2b2ebf54bb1dec58e23 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 11 Oct 2022 15:04:25 -0700 Subject: [PATCH 131/415] upgrade: bump to v6.1.7 Signed-off-by: Kevin Morris --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index c9f36e51..e8ca70d9 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.6" +AURWEB_VERSION = "v6.1.7" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 77d136db..fea2f922 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.6" +version = "v6.1.7" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From b757e66997579b1d5e5c25a444894a6ac246577d Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Tue, 12 Jul 2022 15:12:38 +0100 Subject: [PATCH 132/415] feature: add filters and stats for requests Signed-off-by: Leonidas Spyropoulos --- aurweb/routers/requests.py | 46 ++++++++++++++++++++++++++--- templates/requests.html | 59 ++++++++++++++++++++++++++++++++++++++ test/test_requests.py | 16 ++++++++++- 3 files changed, 116 insertions(+), 5 deletions(-) diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index bf86bdcc..ca5fae73 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -8,7 +8,12 @@ from aurweb import db, defaults, time, util from aurweb.auth import creds, requires_auth from aurweb.exceptions import handle_form_exceptions from aurweb.models import PackageRequest -from aurweb.models.package_request import PENDING_ID, REJECTED_ID +from aurweb.models.package_request import ( + ACCEPTED_ID, + CLOSED_ID, + PENDING_ID, + REJECTED_ID, +) from aurweb.requests.util import get_pkgreq_by_id from aurweb.scripts import notify from aurweb.templates import make_context, render_template @@ -22,26 +27,59 @@ async def requests( request: Request, O: int = Query(default=defaults.O), PP: int = Query(default=defaults.PP), + filter_pending: bool = False, + filter_closed: bool = False, + filter_accepted: bool = False, + filter_rejected: bool = False, ): context = make_context(request, "Requests") context["q"] = dict(request.query_params) + if len(dict(request.query_params)) == 0: + filter_pending = True + O, PP = util.sanitize_params(O, PP) context["O"] = O context["PP"] = PP + context["filter_pending"] = filter_pending + context["filter_closed"] = filter_closed + context["filter_accepted"] = filter_accepted + context["filter_rejected"] = filter_rejected # A PackageRequest query query = db.query(PackageRequest) + # Requests statistics + context["total_requests"] = query.count() + pending_count = 0 + query.filter(PackageRequest.Status == PENDING_ID).count() + context["pending_requests"] = pending_count + closed_count = 0 + query.filter(PackageRequest.Status == CLOSED_ID).count() + context["closed_requests"] = closed_count + accepted_count = 0 + query.filter(PackageRequest.Status == ACCEPTED_ID).count() + context["accepted_requests"] = accepted_count + rejected_count = 0 + query.filter(PackageRequest.Status == REJECTED_ID).count() + context["rejected_requests"] = rejected_count + + # Apply filters + in_filters = [] + if filter_pending: + in_filters.append(PENDING_ID) + if filter_closed: + in_filters.append(CLOSED_ID) + if filter_accepted: + in_filters.append(ACCEPTED_ID) + if filter_rejected: + in_filters.append(REJECTED_ID) + filtered = query.filter(PackageRequest.Status.in_(in_filters)) # If the request user is not elevated (TU or Dev), then # filter PackageRequests which are owned by the request user. if not request.user.is_elevated(): - query = query.filter(PackageRequest.UsersID == request.user.ID) + filtered = filtered.filter(PackageRequest.UsersID == request.user.ID) - context["total"] = query.count() + context["total"] = filtered.count() context["results"] = ( - query.order_by( + filtered.order_by( # Order primarily by the Status column being PENDING_ID, # and secondarily by RequestTS; both in descending order. case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), diff --git a/templates/requests.html b/templates/requests.html index ed8f31fb..9037855c 100644 --- a/templates/requests.html +++ b/templates/requests.html @@ -4,6 +4,65 @@ {% set plural = "%d package requests found." %} {% block pageContent %} +
    +

    {{ "Requests" | tr }}

    +

    {{ "Total Statistics" | tr }}

    +
    {{ "Git Clone URL" | tr }}:
    {{ "Description" | tr }}:{{ pkg.Description }}{{ package.Description }}
    {{ "Upstream URL" | tr }}: - {% if pkg.URL %} - {{ pkg.URL }} + {% if package.URL %} + {{ package.URL }} {% else %} {{ "None" | tr }} {% endif %} From 25e05830a670b0ca99d007eabf2d8c127a13ce9a Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Mon, 5 Sep 2022 19:50:41 -0700 Subject: [PATCH 092/415] test: test that /packages/{name} produces the package's description This commit fixes two of our tests in test_templates.py to go along with our new template modifications, as well as a new test in test_packages_routes.py which constructs two packages belonging to the same package base, then tests that viewing their pages produces their independent descriptions. Signed-off-by: Kevin Morris --- templates/partials/packages/details.html | 2 +- test/test_packages_routes.py | 44 ++++++++++++++++++++++++ test/test_templates.py | 4 +-- 3 files changed, 47 insertions(+), 3 deletions(-) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index cdb62128..86bc1de5 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -17,7 +17,7 @@
    {{ "Description" | tr }}: {{ package.Description }}
    {{ "Popularity" | tr }}:{{ pkgbase.Popularity | number_format(6 if pkgbase.Popularity <= 0.2 else 2) }}{{ popularity | number_format(6 if popularity <= 0.2 else 2) }}
    {{ "First Submitted" | tr }}:
    + + + + + + + + + + + + + + + + + + + + + + +
    {{ "Total" | tr }}:{{ total_requests }}
    {{ "Pending" | tr }}:{{ pending_requests }}
    {{ "Closed" | tr }}:{{ closed_requests }}
    {{ "Accepted" | tr }}:{{ accepted_requests }}
    {{ "Rejected" | tr }}:{{ rejected_requests }}
    +

    {{ "Filters" | tr }}

    +
    +
    +
    + {{ "Select filter criteria" | tr }} +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + +
    +
    +
    +
    +
    {% if not total %}

    {{ "No requests matched your search criteria." | tr }}

    diff --git a/test/test_requests.py b/test/test_requests.py index 83cdb402..344b9edc 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -717,6 +717,10 @@ def test_requests( "O": 0, # Page 1 "SeB": "nd", "SB": "n", + "filter_pending": True, + "filter_closed": True, + "filter_accepted": True, + "filter_rejected": True, }, cookies=cookies, ) @@ -732,7 +736,17 @@ def test_requests( # Request page 2 of the requests page. with client as request: - resp = request.get("/requests", params={"O": 50}, cookies=cookies) # Page 2 + resp = request.get( + "/requests", + params={ + "O": 50, + "filter_pending": True, + "filter_closed": True, + "filter_accepted": True, + "filter_rejected": True, + }, + cookies=cookies, + ) # Page 2 assert resp.status_code == int(HTTPStatus.OK) assert "‹ Previous" in resp.text From 9c0f8f053ecaa2a34473dcf4b6b45c2d6812df96 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Fri, 6 May 2022 20:22:07 +0100 Subject: [PATCH 133/415] chore: rename logging.py and redis.py to avoid circular imports Signed-off-by: Leonidas Spyropoulos --- aurweb/asgi.py | 7 +++---- aurweb/{logging.py => aur_logging.py} | 0 aurweb/{redis.py => aur_redis.py} | 4 ++-- aurweb/initdb.py | 2 +- aurweb/models/user.py | 4 ++-- aurweb/packages/util.py | 2 +- aurweb/pkgbase/actions.py | 4 ++-- aurweb/prometheus.py | 4 ++-- aurweb/ratelimit.py | 6 +++--- aurweb/routers/accounts.py | 4 ++-- aurweb/routers/html.py | 6 +++--- aurweb/routers/packages.py | 4 ++-- aurweb/routers/pkgbase.py | 4 ++-- aurweb/routers/trusted_user.py | 4 ++-- aurweb/scripts/mkpkglists.py | 4 ++-- aurweb/scripts/notify.py | 4 ++-- aurweb/scripts/rendercomment.py | 4 ++-- aurweb/testing/alpm.py | 4 ++-- aurweb/testing/filelock.py | 4 ++-- aurweb/users/validate.py | 4 ++-- aurweb/util.py | 4 ++-- test/conftest.py | 4 ++-- test/test_accounts_routes.py | 4 ++-- test/test_asgi.py | 6 +++--- test/test_homepage.py | 2 +- test/test_logging.py | 4 ++-- test/test_packages_util.py | 2 +- test/test_ratelimit.py | 6 +++--- test/test_redis.py | 24 ++++++++++++------------ test/test_rendercomment.py | 4 ++-- test/test_rpc.py | 2 +- test/test_rss.py | 4 ++-- 32 files changed, 72 insertions(+), 73 deletions(-) rename aurweb/{logging.py => aur_logging.py} (100%) rename aurweb/{redis.py => aur_redis.py} (95%) diff --git a/aurweb/asgi.py b/aurweb/asgi.py index 72b47b4c..b172626f 100644 --- a/aurweb/asgi.py +++ b/aurweb/asgi.py @@ -22,19 +22,18 @@ from starlette.middleware.sessions import SessionMiddleware import aurweb.captcha # noqa: F401 import aurweb.config import aurweb.filters # noqa: F401 -import aurweb.logging import aurweb.pkgbase.util as pkgbaseutil -from aurweb import logging, prometheus, util +from aurweb import aur_logging, prometheus, util +from aurweb.aur_redis import redis_connection from aurweb.auth import BasicAuthBackend from aurweb.db import get_engine, query from aurweb.models import AcceptedTerm, Term from aurweb.packages.util import get_pkg_or_base from aurweb.prometheus import instrumentator -from aurweb.redis import redis_connection from aurweb.routers import APP_ROUTES from aurweb.templates import make_context, render_template -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) # Setup the FastAPI app. app = FastAPI() diff --git a/aurweb/logging.py b/aurweb/aur_logging.py similarity index 100% rename from aurweb/logging.py rename to aurweb/aur_logging.py diff --git a/aurweb/redis.py b/aurweb/aur_redis.py similarity index 95% rename from aurweb/redis.py rename to aurweb/aur_redis.py index af179b9b..ec66df19 100644 --- a/aurweb/redis.py +++ b/aurweb/aur_redis.py @@ -2,9 +2,9 @@ import fakeredis from redis import ConnectionPool, Redis import aurweb.config -from aurweb import logging +from aurweb import aur_logging -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) pool = None diff --git a/aurweb/initdb.py b/aurweb/initdb.py index ded4330d..ee59212c 100644 --- a/aurweb/initdb.py +++ b/aurweb/initdb.py @@ -3,8 +3,8 @@ import argparse import alembic.command import alembic.config +import aurweb.aur_logging import aurweb.db -import aurweb.logging import aurweb.schema diff --git a/aurweb/models/user.py b/aurweb/models/user.py index 0d638677..9846d996 100644 --- a/aurweb/models/user.py +++ b/aurweb/models/user.py @@ -10,12 +10,12 @@ from sqlalchemy.orm import backref, relationship import aurweb.config import aurweb.models.account_type import aurweb.schema -from aurweb import db, logging, schema, time, util +from aurweb import aur_logging, db, schema, time, util from aurweb.models.account_type import AccountType as _AccountType from aurweb.models.ban import is_banned from aurweb.models.declarative import Base -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) SALT_ROUNDS_DEFAULT = 12 diff --git a/aurweb/packages/util.py b/aurweb/packages/util.py index b6ba7e20..cddec0ac 100644 --- a/aurweb/packages/util.py +++ b/aurweb/packages/util.py @@ -7,11 +7,11 @@ from fastapi import HTTPException from sqlalchemy import orm from aurweb import config, db, models +from aurweb.aur_redis import redis_connection from aurweb.models import Package from aurweb.models.official_provider import OFFICIAL_BASE, OfficialProvider from aurweb.models.package_dependency import PackageDependency from aurweb.models.package_relation import PackageRelation -from aurweb.redis import redis_connection from aurweb.templates import register_filter Providers = list[Union[PackageRelation, OfficialProvider]] diff --git a/aurweb/pkgbase/actions.py b/aurweb/pkgbase/actions.py index a453cb36..56ba738d 100644 --- a/aurweb/pkgbase/actions.py +++ b/aurweb/pkgbase/actions.py @@ -1,6 +1,6 @@ from fastapi import Request -from aurweb import db, logging, util +from aurweb import aur_logging, db, util from aurweb.auth import creds from aurweb.models import PackageBase, User from aurweb.models.package_comaintainer import PackageComaintainer @@ -10,7 +10,7 @@ from aurweb.packages.requests import handle_request, update_closure_comment from aurweb.pkgbase import util as pkgbaseutil from aurweb.scripts import notify, popupdate -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) @db.retry_deadlock diff --git a/aurweb/prometheus.py b/aurweb/prometheus.py index 0bbea4be..b8b7984f 100644 --- a/aurweb/prometheus.py +++ b/aurweb/prometheus.py @@ -5,9 +5,9 @@ from prometheus_fastapi_instrumentator import Instrumentator from prometheus_fastapi_instrumentator.metrics import Info from starlette.routing import Match, Route -from aurweb import logging +from aurweb import aur_logging -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) _instrumentator = Instrumentator() diff --git a/aurweb/ratelimit.py b/aurweb/ratelimit.py index 97923a52..ea191972 100644 --- a/aurweb/ratelimit.py +++ b/aurweb/ratelimit.py @@ -1,11 +1,11 @@ from fastapi import Request from redis.client import Pipeline -from aurweb import config, db, logging, time +from aurweb import aur_logging, config, db, time +from aurweb.aur_redis import redis_connection from aurweb.models import ApiRateLimit -from aurweb.redis import redis_connection -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) def _update_ratelimit_redis(request: Request, pipeline: Pipeline): diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index 12e59b30..24aacdf7 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -8,7 +8,7 @@ from fastapi.responses import HTMLResponse, RedirectResponse from sqlalchemy import and_, or_ import aurweb.config -from aurweb import cookies, db, l10n, logging, models, util +from aurweb import aur_logging, cookies, db, l10n, models, util from aurweb.auth import account_type_required, creds, requires_auth, requires_guest from aurweb.captcha import get_captcha_salts from aurweb.exceptions import ValidationError, handle_form_exceptions @@ -22,7 +22,7 @@ from aurweb.users import update, validate from aurweb.users.util import get_user_by_name router = APIRouter() -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) @router.get("/passreset", response_class=HTMLResponse) diff --git a/aurweb/routers/html.py b/aurweb/routers/html.py index da1ffd55..f5e6657f 100644 --- a/aurweb/routers/html.py +++ b/aurweb/routers/html.py @@ -16,7 +16,7 @@ from sqlalchemy import and_, case, or_ import aurweb.config import aurweb.models.package_request -from aurweb import cookies, db, logging, models, time, util +from aurweb import aur_logging, cookies, db, models, time, util from aurweb.cache import db_count_cache from aurweb.exceptions import handle_form_exceptions from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID @@ -24,7 +24,7 @@ from aurweb.models.package_request import PENDING_ID from aurweb.packages.util import query_notified, query_voted, updated_packages from aurweb.templates import make_context, render_template -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) router = APIRouter() @@ -80,7 +80,7 @@ async def index(request: Request): bases = db.query(models.PackageBase) - redis = aurweb.redis.redis_connection() + redis = aurweb.aur_redis.redis_connection() cache_expire = 300 # Five minutes. # Package statistics. diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index 55d2abf5..0d482521 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -5,7 +5,7 @@ from typing import Any from fastapi import APIRouter, Form, Query, Request, Response import aurweb.filters # noqa: F401 -from aurweb import config, db, defaults, logging, models, util +from aurweb import aur_logging, config, db, defaults, models, util from aurweb.auth import creds, requires_auth from aurweb.exceptions import InvariantError, handle_form_exceptions from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID @@ -15,7 +15,7 @@ from aurweb.packages.util import get_pkg_or_base from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil from aurweb.templates import make_context, make_variable_context, render_template -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) router = APIRouter() diff --git a/aurweb/routers/pkgbase.py b/aurweb/routers/pkgbase.py index 3b1ab688..9dab76f8 100644 --- a/aurweb/routers/pkgbase.py +++ b/aurweb/routers/pkgbase.py @@ -4,7 +4,7 @@ from fastapi import APIRouter, Form, HTTPException, Query, Request, Response from fastapi.responses import JSONResponse, RedirectResponse from sqlalchemy import and_ -from aurweb import config, db, l10n, logging, templates, time, util +from aurweb import aur_logging, config, db, l10n, templates, time, util from aurweb.auth import creds, requires_auth from aurweb.exceptions import InvariantError, ValidationError, handle_form_exceptions from aurweb.models import PackageBase @@ -21,7 +21,7 @@ from aurweb.scripts import notify, popupdate from aurweb.scripts.rendercomment import update_comment_render_fastapi from aurweb.templates import make_variable_context, render_template -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) router = APIRouter() diff --git a/aurweb/routers/trusted_user.py b/aurweb/routers/trusted_user.py index 37edb072..4248347d 100644 --- a/aurweb/routers/trusted_user.py +++ b/aurweb/routers/trusted_user.py @@ -7,7 +7,7 @@ from fastapi import APIRouter, Form, HTTPException, Request from fastapi.responses import RedirectResponse, Response from sqlalchemy import and_, func, or_ -from aurweb import db, l10n, logging, models, time +from aurweb import aur_logging, db, l10n, models, time from aurweb.auth import creds, requires_auth from aurweb.exceptions import handle_form_exceptions from aurweb.models import User @@ -15,7 +15,7 @@ from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID from aurweb.templates import make_context, make_variable_context, render_template router = APIRouter() -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) # Some TU route specific constants. ITEMS_PER_PAGE = 10 # Paged table size. diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index bfdd12b4..e74bbf25 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -32,11 +32,11 @@ import orjson from sqlalchemy import literal, orm import aurweb.config -from aurweb import db, filters, logging, models, util +from aurweb import aur_logging, db, filters, models, util from aurweb.benchmark import Benchmark from aurweb.models import Package, PackageBase, User -logger = logging.get_logger("aurweb.scripts.mkpkglists") +logger = aur_logging.get_logger("aurweb.scripts.mkpkglists") TYPE_MAP = { diff --git a/aurweb/scripts/notify.py b/aurweb/scripts/notify.py index f19438bb..93108cd3 100755 --- a/aurweb/scripts/notify.py +++ b/aurweb/scripts/notify.py @@ -13,7 +13,7 @@ import aurweb.config import aurweb.db import aurweb.filters import aurweb.l10n -from aurweb import db, logging +from aurweb import aur_logging, db from aurweb.models import PackageBase, User from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comment import PackageComment @@ -22,7 +22,7 @@ from aurweb.models.package_request import PackageRequest from aurweb.models.request_type import RequestType from aurweb.models.tu_vote import TUVote -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) aur_location = aurweb.config.get("options", "aur_location") diff --git a/aurweb/scripts/rendercomment.py b/aurweb/scripts/rendercomment.py index ff6fe09c..4a2c84bd 100755 --- a/aurweb/scripts/rendercomment.py +++ b/aurweb/scripts/rendercomment.py @@ -9,10 +9,10 @@ import markdown import pygit2 import aurweb.config -from aurweb import db, logging, util +from aurweb import aur_logging, db, util from aurweb.models import PackageComment -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) class LinkifyExtension(markdown.extensions.Extension): diff --git a/aurweb/testing/alpm.py b/aurweb/testing/alpm.py index ddafb710..61a9315f 100644 --- a/aurweb/testing/alpm.py +++ b/aurweb/testing/alpm.py @@ -4,10 +4,10 @@ import re import shutil import subprocess -from aurweb import logging, util +from aurweb import aur_logging, util from aurweb.templates import base_template -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) class AlpmDatabase: diff --git a/aurweb/testing/filelock.py b/aurweb/testing/filelock.py index 33b42cb3..d582f0bf 100644 --- a/aurweb/testing/filelock.py +++ b/aurweb/testing/filelock.py @@ -4,9 +4,9 @@ from typing import Callable from posix_ipc import O_CREAT, Semaphore -from aurweb import logging +from aurweb import aur_logging -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) def default_on_create(path): diff --git a/aurweb/users/validate.py b/aurweb/users/validate.py index 6c27a0b7..8fc68864 100644 --- a/aurweb/users/validate.py +++ b/aurweb/users/validate.py @@ -9,7 +9,7 @@ when encountering invalid criteria and return silently otherwise. from fastapi import Request from sqlalchemy import and_ -from aurweb import config, db, l10n, logging, models, time, util +from aurweb import aur_logging, config, db, l10n, models, time, util from aurweb.auth import creds from aurweb.captcha import get_captcha_answer, get_captcha_salts, get_captcha_token from aurweb.exceptions import ValidationError @@ -17,7 +17,7 @@ from aurweb.models.account_type import ACCOUNT_TYPE_NAME from aurweb.models.ssh_pub_key import get_fingerprint from aurweb.util import strtobool -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) def invalid_fields(E: str = str(), U: str = str(), **kwargs) -> None: diff --git a/aurweb/util.py b/aurweb/util.py index 432b818a..cda12af1 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -15,9 +15,9 @@ from email_validator import EmailSyntaxError, validate_email from fastapi.responses import JSONResponse import aurweb.config -from aurweb import defaults, logging +from aurweb import aur_logging, defaults -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) def make_random_string(length: int) -> str: diff --git a/test/conftest.py b/test/conftest.py index aac221f7..15a982aa 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -52,12 +52,12 @@ from sqlalchemy.orm import scoped_session import aurweb.config import aurweb.db -from aurweb import initdb, logging, testing +from aurweb import aur_logging, initdb, testing from aurweb.testing.email import Email from aurweb.testing.filelock import FileLock from aurweb.testing.git import GitRepository -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) # Synchronization lock for database setup. setup_lock = Lock() diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index f4034a9a..33baa0ea 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -11,7 +11,7 @@ from fastapi.testclient import TestClient import aurweb.config import aurweb.models.account_type as at -from aurweb import captcha, db, logging, time +from aurweb import aur_logging, captcha, db, time from aurweb.asgi import app from aurweb.db import create, query from aurweb.models.accepted_term import AcceptedTerm @@ -31,7 +31,7 @@ from aurweb.models.user import User from aurweb.testing.html import get_errors from aurweb.testing.requests import Request -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) # Some test global constants. TEST_USERNAME = "test" diff --git a/test/test_asgi.py b/test/test_asgi.py index 6ff80fa3..3b794c76 100644 --- a/test/test_asgi.py +++ b/test/test_asgi.py @@ -10,17 +10,17 @@ from fastapi import HTTPException from fastapi.testclient import TestClient import aurweb.asgi +import aurweb.aur_redis import aurweb.config -import aurweb.redis from aurweb.exceptions import handle_form_exceptions from aurweb.testing.requests import Request @pytest.fixture def setup(db_test, email_test): - aurweb.redis.redis_connection().flushall() + aurweb.aur_redis.redis_connection().flushall() yield - aurweb.redis.redis_connection().flushall() + aurweb.aur_redis.redis_connection().flushall() @pytest.fixture diff --git a/test/test_homepage.py b/test/test_homepage.py index 5490a244..521f71c4 100644 --- a/test/test_homepage.py +++ b/test/test_homepage.py @@ -7,6 +7,7 @@ from fastapi.testclient import TestClient from aurweb import db, time from aurweb.asgi import app +from aurweb.aur_redis import redis_connection from aurweb.models.account_type import USER_ID from aurweb.models.package import Package from aurweb.models.package_base import PackageBase @@ -14,7 +15,6 @@ from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_request import PackageRequest from aurweb.models.request_type import DELETION_ID, RequestType from aurweb.models.user import User -from aurweb.redis import redis_connection from aurweb.testing.html import parse_root from aurweb.testing.requests import Request diff --git a/test/test_logging.py b/test/test_logging.py index 63092d07..90d13c93 100644 --- a/test/test_logging.py +++ b/test/test_logging.py @@ -1,6 +1,6 @@ -from aurweb import logging +from aurweb import aur_logging -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) def test_logging(caplog): diff --git a/test/test_packages_util.py b/test/test_packages_util.py index 0042cd71..a5273b68 100644 --- a/test/test_packages_util.py +++ b/test/test_packages_util.py @@ -2,6 +2,7 @@ import pytest from fastapi.testclient import TestClient from aurweb import asgi, config, db, time +from aurweb.aur_redis import kill_redis from aurweb.models.account_type import USER_ID from aurweb.models.official_provider import OFFICIAL_BASE, OfficialProvider from aurweb.models.package import Package @@ -11,7 +12,6 @@ from aurweb.models.package_source import PackageSource from aurweb.models.package_vote import PackageVote from aurweb.models.user import User from aurweb.packages import util -from aurweb.redis import kill_redis @pytest.fixture(autouse=True) diff --git a/test/test_ratelimit.py b/test/test_ratelimit.py index 20528847..b7cd7e7d 100644 --- a/test/test_ratelimit.py +++ b/test/test_ratelimit.py @@ -3,13 +3,13 @@ from unittest import mock import pytest from redis.client import Pipeline -from aurweb import config, db, logging +from aurweb import aur_logging, config, db +from aurweb.aur_redis import redis_connection from aurweb.models import ApiRateLimit from aurweb.ratelimit import check_ratelimit -from aurweb.redis import redis_connection from aurweb.testing.requests import Request -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) @pytest.fixture(autouse=True) diff --git a/test/test_redis.py b/test/test_redis.py index a66cd204..6f9bdb40 100644 --- a/test/test_redis.py +++ b/test/test_redis.py @@ -3,11 +3,11 @@ from unittest import mock import pytest import aurweb.config -from aurweb.redis import redis_connection +from aurweb.aur_redis import redis_connection @pytest.fixture -def rediss(): +def redis(): """Create a RedisStub.""" def mock_get(section, key): @@ -21,20 +21,20 @@ def rediss(): yield redis -def test_redis_stub(rediss): +def test_redis_stub(redis): # We don't yet have a test key set. - assert rediss.get("test") is None + assert redis.get("test") is None # Set the test key to abc. - rediss.set("test", "abc") - assert rediss.get("test").decode() == "abc" + redis.set("test", "abc") + assert redis.get("test").decode() == "abc" # Test expire. - rediss.expire("test", 0) - assert rediss.get("test") is None + redis.expire("test", 0) + assert redis.get("test") is None # Now, set the test key again and use delete() on it. - rediss.set("test", "abc") - assert rediss.get("test").decode() == "abc" - rediss.delete("test") - assert rediss.get("test") is None + redis.set("test", "abc") + assert redis.get("test").decode() == "abc" + redis.delete("test") + assert redis.get("test") is None diff --git a/test/test_rendercomment.py b/test/test_rendercomment.py index 5b7ff5ac..59eb7191 100644 --- a/test/test_rendercomment.py +++ b/test/test_rendercomment.py @@ -2,14 +2,14 @@ from unittest import mock import pytest -from aurweb import config, db, logging, time +from aurweb import aur_logging, config, db, time from aurweb.models import Package, PackageBase, PackageComment, User from aurweb.models.account_type import USER_ID from aurweb.scripts import rendercomment from aurweb.scripts.rendercomment import update_comment_render from aurweb.testing.git import GitRepository -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) aur_location = config.get("options", "aur_location") diff --git a/test/test_rpc.py b/test/test_rpc.py index 84ddd8d7..f417d379 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -10,6 +10,7 @@ from redis.client import Pipeline import aurweb.models.dependency_type as dt import aurweb.models.relation_type as rt from aurweb import asgi, config, db, rpc, scripts, time +from aurweb.aur_redis import redis_connection from aurweb.models.account_type import USER_ID from aurweb.models.dependency_type import DEPENDS_ID from aurweb.models.license import License @@ -22,7 +23,6 @@ from aurweb.models.package_relation import PackageRelation from aurweb.models.package_vote import PackageVote from aurweb.models.relation_type import PROVIDES_ID from aurweb.models.user import User -from aurweb.redis import redis_connection @pytest.fixture diff --git a/test/test_rss.py b/test/test_rss.py index 8526caa1..d227a183 100644 --- a/test/test_rss.py +++ b/test/test_rss.py @@ -4,14 +4,14 @@ import lxml.etree import pytest from fastapi.testclient import TestClient -from aurweb import db, logging, time +from aurweb import aur_logging, db, time from aurweb.asgi import app from aurweb.models.account_type import AccountType from aurweb.models.package import Package from aurweb.models.package_base import PackageBase from aurweb.models.user import User -logger = logging.get_logger(__name__) +logger = aur_logging.get_logger(__name__) @pytest.fixture(autouse=True) From 8555e232aeb331d3104fba5ec6b71341f979628b Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sat, 22 Oct 2022 20:15:46 +0100 Subject: [PATCH 134/415] docs: fix mailing list after migration to mailman3 Closes: #396 Signed-off-by: Leonidas Spyropoulos --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 58612a36..c8d4f90d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,7 @@ Before sending patches, you are recommended to run `flake8` and `isort`. You can add a git hook to do this by installing `python-pre-commit` and running `pre-commit install`. -[1]: https://lists.archlinux.org/listinfo/aur-dev +[1]: https://lists.archlinux.org/mailman3/lists/aur-dev.lists.archlinux.org/ [2]: https://gitlab.archlinux.org/archlinux/aurweb ### Coding Guidelines From 0417603499f890a475eb7890bad3ba63c44637ca Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sat, 22 Oct 2022 21:48:40 +0100 Subject: [PATCH 135/415] housekeep: bump renovate dependencies email-validator: 1.2.1 -> ^1.3.0 uvicorn: ^0.18.0 -> ^0.19.0 fastapi: ^0.83.0 -> ^0.85.0 pytest-asyncio: ^0.19.0 -> ^0.20.1 pytest-cov ^3.0.0 -> ^4.0.0 Signed-off-by: Leonidas Spyropoulos --- poetry.lock | 869 ++++++++++++++++++++++++++----------------------- pyproject.toml | 10 +- 2 files changed, 466 insertions(+), 413 deletions(-) diff --git a/poetry.lock b/poetry.lock index ef2c70f9..9cf24f9a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -23,7 +23,7 @@ tz = ["python-dateutil"] [[package]] name = "anyio" -version = "3.6.1" +version = "3.6.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false @@ -36,7 +36,7 @@ sniffio = ">=1.1" [package.extras] doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16)"] +trio = ["trio (>=0.16,<0.22)"] [[package]] name = "asgiref" @@ -69,11 +69,11 @@ python-versions = ">=3.5" dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "authlib" -version = "1.0.1" +version = "1.1.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." category = "main" optional = false @@ -84,7 +84,7 @@ cryptography = ">=3.2" [[package]] name = "bcrypt" -version = "4.0.0" +version = "4.0.1" description = "Modern password hashing for your software and your servers" category = "main" optional = false @@ -112,7 +112,7 @@ dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0 [[package]] name = "certifi" -version = "2022.6.15" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -138,7 +138,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -161,7 +161,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.4.4" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -175,7 +175,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "37.0.4" +version = "38.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -188,7 +188,7 @@ cffi = ">=1.12" docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools_rust (>=0.11.4)"] +sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] @@ -204,7 +204,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "dnspython" @@ -224,8 +224,8 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "email-validator" -version = "1.2.1" -description = "A robust email syntax and deliverability validation library." +version = "1.3.0" +description = "A robust email address syntax and deliverability validation library." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" @@ -247,7 +247,7 @@ testing = ["pre-commit"] [[package]] name = "fakeredis" -version = "1.9.0" +version = "1.9.4" description = "Fake implementation of redis API for testing purposes." category = "main" optional = false @@ -255,7 +255,6 @@ python-versions = ">=3.7,<4.0" [package.dependencies] redis = "<4.4" -six = ">=1.16.0,<2.0.0" sortedcontainers = ">=2.4.0,<3.0.0" [package.extras] @@ -264,21 +263,21 @@ lua = ["lupa (>=1.13,<2.0)"] [[package]] name = "fastapi" -version = "0.83.0" +version = "0.85.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" -starlette = "0.19.1" +starlette = "0.20.4" [package.extras] -all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] -dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +all = ["email-validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.7.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-orjson (==3.6.2)", "types-ujson (==5.4.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "feedgen" @@ -306,14 +305,14 @@ testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pyt [[package]] name = "greenlet" -version = "1.1.2" +version = "1.1.3.post0" description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] -docs = ["sphinx"] +docs = ["Sphinx"] [[package]] name = "gunicorn" @@ -323,6 +322,9 @@ category = "main" optional = false python-versions = ">=3.5" +[package.dependencies] +setuptools = ">=3.0" + [package.extras] eventlet = ["eventlet (>=0.24.1)"] gevent = ["gevent (>=1.4.0)"] @@ -411,7 +413,7 @@ toml = "*" wsproto = ">=0.14.0" [package.extras] -docs = ["pydata-sphinx-theme"] +docs = ["pydata_sphinx_theme"] h3 = ["aioquic (>=0.9.0,<1.0)"] trio = ["trio (>=0.11.0)"] uvloop = ["uvloop"] @@ -426,7 +428,7 @@ python-versions = ">=3.6.1" [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false @@ -434,7 +436,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.12.0" +version = "5.0.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -444,9 +446,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -489,12 +491,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] -htmlsoup = ["beautifulsoup4"] +htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] [[package]] name = "mako" -version = "1.2.1" +version = "1.2.3" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false @@ -504,7 +506,7 @@ python-versions = ">=3.7" MarkupSafe = ">=0.9.2" [package.extras] -babel = ["babel"] +babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] @@ -540,7 +542,7 @@ python-versions = ">=3.5" [[package]] name = "orjson" -version = "3.7.12" +version = "3.8.0" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" category = "main" optional = false @@ -603,7 +605,7 @@ python-versions = ">=3.6.1" [[package]] name = "prometheus-client" -version = "0.14.1" +version = "0.15.0" description = "Python client for the Prometheus monitoring system." category = "main" optional = false @@ -614,7 +616,7 @@ twisted = ["twisted"] [[package]] name = "prometheus-fastapi-instrumentator" -version = "5.8.2" +version = "5.9.1" description = "Instrument your FastAPI with Prometheus metrics" category = "main" optional = false @@ -626,7 +628,7 @@ prometheus-client = ">=0.8.0,<1.0.0" [[package]] name = "protobuf" -version = "4.21.5" +version = "4.21.8" description = "" category = "main" optional = false @@ -658,14 +660,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.2" +version = "1.10.2" description = "Data validation and settings management using python type hints" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.1.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -673,7 +675,7 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygit2" -version = "1.10.0" +version = "1.10.1" description = "Python bindings for libgit2." category = "main" optional = false @@ -715,7 +717,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.19.0" +version = "0.20.1" description = "Pytest support for asyncio" category = "dev" optional = false @@ -729,7 +731,7 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-cov" -version = "3.0.0" +version = "4.0.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false @@ -839,7 +841,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rfc3986" @@ -855,6 +857,19 @@ idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] idna2008 = ["idna"] +[[package]] +name = "setuptools" +version = "65.5.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -865,11 +880,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "sniffio" -version = "1.2.0" +version = "1.3.0" description = "Sniff out which async library your code is running under" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "sortedcontainers" @@ -881,7 +896,7 @@ python-versions = "*" [[package]] name = "sqlalchemy" -version = "1.4.40" +version = "1.4.42" description = "Database Abstraction Library" category = "main" optional = false @@ -895,21 +910,21 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql_pymssql = ["pymssql"] -mssql_pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql_connector = ["mysql-connector-python"] +mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql_psycopg2binary = ["psycopg2-binary"] -postgresql_psycopg2cffi = ["psycopg2cffi"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "srcinfo" @@ -924,11 +939,11 @@ parse = "*" [[package]] name = "starlette" -version = "0.19.1" +version = "0.20.4" description = "The little ASGI library that shines." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] anyio = ">=3.4.0,<5" @@ -938,7 +953,7 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\"" full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] [[package]] -name = "tap.py" +name = "tap-py" version = "3.1" description = "Test Anything Protocol (TAP) tools" category = "dev" @@ -966,7 +981,7 @@ python-versions = ">=3.7" [[package]] name = "typing-extensions" -version = "4.3.0" +version = "4.4.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false @@ -974,7 +989,7 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.11" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -982,12 +997,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" -version = "0.18.3" +version = "0.19.0" description = "The lightning-fast ASGI server." category = "main" optional = false @@ -998,7 +1013,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] [[package]] name = "webencodings" @@ -1032,7 +1047,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "wsproto" -version = "1.1.0" +version = "1.2.0" description = "WebSockets state-machine based protocol implementation" category = "main" optional = false @@ -1043,20 +1058,20 @@ h11 = ">=0.9.0,<1" [[package]] name = "zipp" -version = "3.8.1" +version = "3.9.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "e1f9d796eea832af84c40c754ee3c58e633e98bd7cdb42a985b2c8657e82037e" +content-hash = "de9f0dc1d7e3f149a83629ad30d161da38aa1498b81aaa8bdfd2ebed50f232ab" [metadata.files] aiofiles = [ @@ -1068,8 +1083,8 @@ alembic = [ {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, ] anyio = [ - {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, - {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, ] asgiref = [ {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, @@ -1084,30 +1099,39 @@ attrs = [ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] authlib = [ - {file = "Authlib-1.0.1-py2.py3-none-any.whl", hash = "sha256:1286e2d5ef5bfe5a11cc2d0a0d1031f0393f6ce4d61f5121cfe87fa0054e98bd"}, - {file = "Authlib-1.0.1.tar.gz", hash = "sha256:6e74a4846ac36dfc882b3cc2fbd3d9eb410a627f2f2dc11771276655345223b1"}, + {file = "Authlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:be4b6a1dea51122336c210a6945b27a105b9ac572baffd15b07bcff4376c1523"}, + {file = "Authlib-1.1.0.tar.gz", hash = "sha256:0a270c91409fc2b7b0fbee6996e09f2ee3187358762111a9a4225c874b94e891"}, ] bcrypt = [ - {file = "bcrypt-4.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:845b1daf4df2dd94d2fdbc9454953ca9dd0e12970a0bfc9f3dcc6faea3fa96e4"}, - {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8780e69f9deec9d60f947b169507d2c9816e4f11548f1f7ebee2af38b9b22ae4"}, - {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3334446fac200499e8bc04a530ce3cf0b3d7151e0e4ac5c0dddd3d95e97843"}, - {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb67f6a6c72dfb0a02f3df51550aa1862708e55128b22543e2b42c74f3620d7"}, - {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:7c7dd6c1f05bf89e65261d97ac3a6520f34c2acb369afb57e3ea4449be6ff8fd"}, - {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:594780b364fb45f2634c46ec8d3e61c1c0f1811c4f2da60e8eb15594ecbf93ed"}, - {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2d0dd19aad87e4ab882ef1d12df505f4c52b28b69666ce83c528f42c07379227"}, - {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bf413f2a9b0a2950fc750998899013f2e718d20fa4a58b85ca50b6df5ed1bbf9"}, - {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ede0f506554571c8eda80db22b83c139303ec6b595b8f60c4c8157bdd0bdee36"}, - {file = "bcrypt-4.0.0-cp36-abi3-win32.whl", hash = "sha256:dc6ec3dc19b1c193b2f7cf279d3e32e7caf447532fbcb7af0906fe4398900c33"}, - {file = "bcrypt-4.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:0b0f0c7141622a31e9734b7f649451147c04ebb5122327ac0bd23744df84be90"}, - {file = "bcrypt-4.0.0.tar.gz", hash = "sha256:c59c170fc9225faad04dde1ba61d85b413946e8ce2e5f5f5ff30dfd67283f319"}, + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, ] bleach = [ {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, ] certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, ] cffi = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, @@ -1188,80 +1212,84 @@ colorama = [ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] coverage = [ - {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, - {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, - {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, - {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, - {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, - {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, - {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, - {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, - {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, - {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, - {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, - {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, - {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, - {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, - {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, - {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] cryptography = [ - {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"}, - {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280"}, - {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3"}, - {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59"}, - {file = "cryptography-37.0.4-cp36-abi3-win32.whl", hash = "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157"}, - {file = "cryptography-37.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327"}, - {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b"}, - {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"}, - {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, + {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:10d1f29d6292fc95acb597bacefd5b9e812099d75a6469004fd38ba5471a977f"}, + {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3fc26e22840b77326a764ceb5f02ca2d342305fba08f002a8c1f139540cdfaad"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b72c360427889b40f36dc214630e688c2fe03e16c162ef0aa41da7ab1455153"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:194044c6b89a2f9f169df475cc167f6157eb9151cc69af8a2a163481d45cc407"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca9f6784ea96b55ff41708b92c3f6aeaebde4c560308e5fbbd3173fbc466e94e"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:16fa61e7481f4b77ef53991075de29fc5bacb582a1244046d2e8b4bb72ef66d0"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d4ef6cc305394ed669d4d9eebf10d3a101059bdcf2669c366ec1d14e4fb227bd"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3261725c0ef84e7592597606f6583385fed2a5ec3909f43bc475ade9729a41d6"}, + {file = "cryptography-38.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0297ffc478bdd237f5ca3a7dc96fc0d315670bfa099c04dc3a4a2172008a405a"}, + {file = "cryptography-38.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89ed49784ba88c221756ff4d4755dbc03b3c8d2c5103f6d6b4f83a0fb1e85294"}, + {file = "cryptography-38.0.1-cp36-abi3-win32.whl", hash = "sha256:ac7e48f7e7261207d750fa7e55eac2d45f720027d5703cd9007e9b37bbb59ac0"}, + {file = "cryptography-38.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ad7353f6ddf285aeadfaf79e5a6829110106ff8189391704c1d8801aa0bae45a"}, + {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:896dd3a66959d3a5ddcfc140a53391f69ff1e8f25d93f0e2e7830c6de90ceb9d"}, + {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d3971e2749a723e9084dd507584e2a2761f78ad2c638aa31e80bc7a15c9db4f9"}, + {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:79473cf8a5cbc471979bd9378c9f425384980fcf2ab6534b18ed7d0d9843987d"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9e69ae01f99abe6ad646947bba8941e896cb3aa805be2597a0400e0764b5818"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5067ee7f2bce36b11d0e334abcd1ccf8c541fc0bbdaf57cdd511fdee53e879b6"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3e3a2599e640927089f932295a9a247fc40a5bdf69b0484532f530471a382750"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2e5856248a416767322c8668ef1845ad46ee62629266f84a8f007a317141013"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:64760ba5331e3f1794d0bcaabc0d0c39e8c60bf67d09c93dc0e54189dfd7cfe5"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b6c9b706316d7b5a137c35e14f4103e2115b088c412140fdbd5f87c73284df61"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0163a849b6f315bf52815e238bc2b2346604413fa7c1601eea84bcddb5fb9ac"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d1a5bd52d684e49a36582193e0b89ff267704cd4025abefb9e26803adeb3e5fb"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:765fa194a0f3372d83005ab83ab35d7c5526c4e22951e46059b8ac678b44fa5a"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:52e7bee800ec869b4031093875279f1ff2ed12c1e2f74923e8f49c916afd1d3b"}, + {file = "cryptography-38.0.1.tar.gz", hash = "sha256:1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7"}, ] deprecated = [ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, @@ -1272,20 +1300,20 @@ dnspython = [ {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] email-validator = [ - {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"}, - {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"}, + {file = "email_validator-1.3.0-py2.py3-none-any.whl", hash = "sha256:816073f2a7cffef786b29928f58ec16cdac42710a53bb18aa94317e3e145ec5c"}, + {file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"}, ] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] fakeredis = [ - {file = "fakeredis-1.9.0-py3-none-any.whl", hash = "sha256:868467ff399520fc77e37ff002c60d1b2a1674742982e27338adaeebcc537648"}, - {file = "fakeredis-1.9.0.tar.gz", hash = "sha256:60639946e3bb1274c30416f539f01f9d73b4ea68c244c1442f5524e45f51e882"}, + {file = "fakeredis-1.9.4-py3-none-any.whl", hash = "sha256:61afe14095aad3e7413a0a6fe63041da1b4bc3e41d5228a33b60bd03fabf22d8"}, + {file = "fakeredis-1.9.4.tar.gz", hash = "sha256:17415645d11994061f5394f3f1c76ba4531f3f8b63f9c55a8fd2120bebcbfae9"}, ] fastapi = [ - {file = "fastapi-0.83.0-py3-none-any.whl", hash = "sha256:694a2b6c2607a61029a4be1c6613f84d74019cb9f7a41c7a475dca8e715f9368"}, - {file = "fastapi-0.83.0.tar.gz", hash = "sha256:96eb692350fe13d7a9843c3c87a874f0d45102975257dd224903efd6c0fde3bd"}, + {file = "fastapi-0.85.1-py3-none-any.whl", hash = "sha256:de3166b6b1163dc22da4dc4ebdc3192fcbac7700dd1870a1afa44de636a636b5"}, + {file = "fastapi-0.85.1.tar.gz", hash = "sha256:1facd097189682a4ff11cbd01334a992e51b56be663b2bd50c2c09523624f144"}, ] feedgen = [ {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, @@ -1295,61 +1323,72 @@ filelock = [ {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] greenlet = [ - {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, - {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, - {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, - {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, - {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, - {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, - {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, - {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, - {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, - {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, - {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, - {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, - {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, - {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, - {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, - {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, - {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, - {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, - {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, - {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, - {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, - {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, - {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, - {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, - {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, - {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:949c9061b8c6d3e6e439466a9be1e787208dec6246f4ec5fffe9677b4c19fcc3"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d7815e1519a8361c5ea2a7a5864945906f8e386fa1bc26797b4d443ab11a4589"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9649891ab4153f217f319914455ccf0b86986b55fc0573ce803eb998ad7d6854"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-win32.whl", hash = "sha256:11fc7692d95cc7a6a8447bb160d98671ab291e0a8ea90572d582d57361360f05"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-win_amd64.whl", hash = "sha256:05ae7383f968bba4211b1fbfc90158f8e3da86804878442b4fb6c16ccbcaa519"}, + {file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ccbe7129a282ec5797df0451ca1802f11578be018a32979131065565da89b392"}, + {file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8b58232f5b72973350c2b917ea3df0bebd07c3c82a0a0e34775fc2c1f857e9"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f6661b58412879a2aa099abb26d3c93e91dedaba55a6394d1fb1512a77e85de9"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c6e942ca9835c0b97814d14f78da453241837419e0d26f7403058e8db3e38f8"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a812df7282a8fc717eafd487fccc5ba40ea83bb5b13eb3c90c446d88dbdfd2be"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7a6560df073ec9de2b7cb685b199dfd12519bc0020c62db9d1bb522f989fa"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17a69967561269b691747e7f436d75a4def47e5efcbc3c573180fc828e176d80"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:60839ab4ea7de6139a3be35b77e22e0398c270020050458b3d25db4c7c394df5"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-win_amd64.whl", hash = "sha256:8926a78192b8b73c936f3e87929931455a6a6c6c385448a07b9f7d1072c19ff3"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:c6f90234e4438062d6d09f7d667f79edcc7c5e354ba3a145ff98176f974b8132"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814f26b864ed2230d3a7efe0336f5766ad012f94aad6ba43a7c54ca88dd77cba"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fda1139d87ce5f7bd80e80e54f9f2c6fe2f47983f1a6f128c47bf310197deb6"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0643250dd0756f4960633f5359884f609a234d4066686754e834073d84e9b51"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb863057bed786f6622982fb8b2c122c68e6e9eddccaa9fa98fd937e45ee6c4f"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c0581077cf2734569f3e500fab09c0ff6a2ab99b1afcacbad09b3c2843ae743"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:695d0d8b5ae42c800f1763c9fce9d7b94ae3b878919379150ee5ba458a460d57"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5662492df0588a51d5690f6578f3bbbd803e7f8d99a99f3bf6128a401be9c269"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:bffba15cff4802ff493d6edcf20d7f94ab1c2aee7cfc1e1c7627c05f1102eee8"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-win32.whl", hash = "sha256:7afa706510ab079fd6d039cc6e369d4535a48e202d042c32e2097f030a16450f"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-win_amd64.whl", hash = "sha256:3a24f3213579dc8459e485e333330a921f579543a5214dbc935bc0763474ece3"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:64e10f303ea354500c927da5b59c3802196a07468332d292aef9ddaca08d03dd"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:eb6ac495dccb1520667cfea50d89e26f9ffb49fa28496dea2b95720d8b45eb54"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:88720794390002b0c8fa29e9602b395093a9a766b229a847e8d88349e418b28a"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39464518a2abe9c505a727af7c0b4efff2cf242aa168be5f0daa47649f4d7ca8"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0914f02fcaa8f84f13b2df4a81645d9e82de21ed95633765dd5cc4d3af9d7403"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96656c5f7c95fc02c36d4f6ef32f4e94bb0b6b36e6a002c21c39785a4eec5f5d"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4f74aa0092602da2069df0bc6553919a15169d77bcdab52a21f8c5242898f519"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3aeac044c324c1a4027dca0cde550bd83a0c0fbff7ef2c98df9e718a5086c194"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-win32.whl", hash = "sha256:fe7c51f8a2ab616cb34bc33d810c887e89117771028e1e3d3b77ca25ddeace04"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:70048d7b2c07c5eadf8393e6398595591df5f59a2f26abc2f81abca09610492f"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:66aa4e9a726b70bcbfcc446b7ba89c8cec40f405e51422c39f42dfa206a96a05"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:025b8de2273d2809f027d347aa2541651d2e15d593bbce0d5f502ca438c54136"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:82a38d7d2077128a017094aff334e67e26194f46bd709f9dcdacbf3835d47ef5"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7d20c3267385236b4ce54575cc8e9f43e7673fc761b069c820097092e318e3b"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8ece5d1a99a2adcb38f69af2f07d96fb615415d32820108cd340361f590d128"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2794eef1b04b5ba8948c72cc606aab62ac4b0c538b14806d9c0d88afd0576d6b"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a8d24eb5cb67996fb84633fdc96dbc04f2d8b12bfcb20ab3222d6be271616b67"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0120a879aa2b1ac5118bce959ea2492ba18783f65ea15821680a256dfad04754"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-win32.whl", hash = "sha256:bef49c07fcb411c942da6ee7d7ea37430f830c482bf6e4b72d92fd506dd3a427"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:62723e7eb85fa52e536e516ee2ac91433c7bb60d51099293671815ff49ed1c21"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d25cdedd72aa2271b984af54294e9527306966ec18963fd032cc851a725ddc1b"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:924df1e7e5db27d19b1359dc7d052a917529c95ba5b8b62f4af611176da7c8ad"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ec615d2912b9ad807afd3be80bf32711c0ff9c2b00aa004a45fd5d5dde7853d9"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0971d37ae0eaf42344e8610d340aa0ad3d06cd2eee381891a10fe771879791f9"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:325f272eb997916b4a3fc1fea7313a8adb760934c2140ce13a2117e1b0a8095d"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75afcbb214d429dacdf75e03a1d6d6c5bd1fa9c35e360df8ea5b6270fb2211c"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5c2d21c2b768d8c86ad935e404cc78c30d53dea009609c3ef3a9d49970c864b5"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:467b73ce5dcd89e381292fb4314aede9b12906c18fab903f995b86034d96d5c8"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-win32.whl", hash = "sha256:8149a6865b14c33be7ae760bcdb73548bb01e8e47ae15e013bf7ef9290ca309a"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-win_amd64.whl", hash = "sha256:104f29dd822be678ef6b16bf0035dcd43206a8a48668a6cae4d2fe9c7a7abdeb"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c8c9301e3274276d3d20ab6335aa7c5d9e5da2009cccb01127bddb5c951f8870"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8415239c68b2ec9de10a5adf1130ee9cb0ebd3e19573c55ba160ff0ca809e012"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:3c22998bfef3fcc1b15694818fc9b1b87c6cc8398198b96b6d355a7bcb8c934e"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa1845944e62f358d63fcc911ad3b415f585612946b8edc824825929b40e59e"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:890f633dc8cb307761ec566bc0b4e350a93ddd77dc172839be122be12bae3e10"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cf37343e43404699d58808e51f347f57efd3010cc7cee134cdb9141bd1ad9ea"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5edf75e7fcfa9725064ae0d8407c849456553a181ebefedb7606bac19aa1478b"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"}, + {file = "greenlet-1.1.3.post0.tar.gz", hash = "sha256:f5e09dc5c6e1796969fd4b775ea1417d70e49a5df29aaa8e5d10675d9e11872c"}, ] gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, @@ -1384,12 +1423,12 @@ hyperframe = [ {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, ] idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, - {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, + {file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"}, + {file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1476,8 +1515,8 @@ lxml = [ {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, ] mako = [ - {file = "Mako-1.2.1-py3-none-any.whl", hash = "sha256:df3921c3081b013c8a2d5ff03c18375651684921ae83fd12e64800b7da923257"}, - {file = "Mako-1.2.1.tar.gz", hash = "sha256:f054a5ff4743492f1aa9ecc47172cb33b42b9d993cffcc146c9de17e717b0307"}, + {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, + {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, ] markdown = [ {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, @@ -1535,48 +1574,48 @@ mysqlclient = [ {file = "mysqlclient-2.1.1.tar.gz", hash = "sha256:828757e419fb11dd6c5ed2576ec92c3efaa93a0f7c39e263586d1ee779c3d782"}, ] orjson = [ - {file = "orjson-3.7.12-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:5fbf5ec736c952e150a4399862bdd0043c1597e4d9e64adebe750855e72e2f65"}, - {file = "orjson-3.7.12-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:c09ed2e953447472c497ec682f4f40727744ed72672600e2e105ed5c373a82b1"}, - {file = "orjson-3.7.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdbbf6f8a23c66fa67661966891fd62341c5b7265e77fd6ecd7195aac26e76c0"}, - {file = "orjson-3.7.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a04df90f09e9c64c082d5e9af50e3e4c8cdc151b681f9d4928bb6bb17ef45c7b"}, - {file = "orjson-3.7.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:946d769d6e57e31838c8486e3f440540214690aaecca3bd2a57e31a227d27031"}, - {file = "orjson-3.7.12-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fff4760d3c04edcc99be0c9040b4cbb3f6c4ae5b4c4fc1ec1f70c3fe47a9ea5a"}, - {file = "orjson-3.7.12-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a7a57ab51d92235604044da31e1481e53b44b6df4688929dd8c176ff09381516"}, - {file = "orjson-3.7.12-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0966b2f6db800ed40138df80040b84ba6a180f50af9b9a4ed5f7231114f6beb8"}, - {file = "orjson-3.7.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ec3f644f1a1e3b642050ee1428311eaec2b959ffb6122ebc216143e67a939b64"}, - {file = "orjson-3.7.12-cp310-none-win_amd64.whl", hash = "sha256:75a7d1b61300e76b06767dc60ff3f38af4a6634cb8169bc8e9db2b4124c27e6d"}, - {file = "orjson-3.7.12-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8c618af13ae16e050342018a9d019365c6f7d1cba04f42fd8d8ca1d1a604a54c"}, - {file = "orjson-3.7.12-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9ef5f5c5fd1d0086f9323dafacfa902c2f4f120f319e689457ee2a66aebfc889"}, - {file = "orjson-3.7.12-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:eec6d61468ee0f251ac33d8738942390fda4e1e36f2d9c365ac271a87e78004b"}, - {file = "orjson-3.7.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:277ac2591570d88d5501cbf5855fc4a421cc51f3075b3be1b50ef2f8e8d2d014"}, - {file = "orjson-3.7.12-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae14ccad9b912abfee0e598a9fb57b6888ec3d2121983b757d9135702d1ab035"}, - {file = "orjson-3.7.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b8ac02e683286e1979f1c57c026503c2433a26525adb1671142b0b13d52a7c"}, - {file = "orjson-3.7.12-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4d76fc5708cf1a7a394b42c1c697a8635fbce73730455870127815b8d7229bcf"}, - {file = "orjson-3.7.12-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:39717add544688a3a938fcbc4122cf1b31030ba8ea1145d12fc6ee29d0eabe27"}, - {file = "orjson-3.7.12-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:93beb800fc35402db6c7d435fcf8b3e45822eb668d112c2def3e2851b3557bb1"}, - {file = "orjson-3.7.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:148b33d2a9f7e464e0a292f13fa11e226baf11b61495ad536977e800bc9ca845"}, - {file = "orjson-3.7.12-cp37-none-win_amd64.whl", hash = "sha256:2baefa5fb5133448f06d24b2523dfb3eda562a93bb69c33f539c7bbb8b0d61ed"}, - {file = "orjson-3.7.12-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e6ae6d14062be5a210909f8816936e0b9b9747b8416d99ec927ab4b8d73bdce6"}, - {file = "orjson-3.7.12-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:22738105f3e926ef22702b14a9b79652f18f8dd45b798a126ee9644e0ac683d8"}, - {file = "orjson-3.7.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7e5aa0bf79f475c67d22eb4c085416ebb05042ce3c98abdbcfe11c1674d096d"}, - {file = "orjson-3.7.12-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7698b66ed751d9b887a27f5e02fb8405f06edafc47ac4542b2e10b2927f9e1"}, - {file = "orjson-3.7.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9accc4ba1cb83b70ac89f9de465b12e96bc6713158d27b655106413ed07944a"}, - {file = "orjson-3.7.12-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:5a9cc4f2231756b939f3aaa997024e748e06ac9bc5619343aa0e88b2833a567f"}, - {file = "orjson-3.7.12-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e1082f82cfc2fd9ee42b3716900da8b13a2efd627a105438c5d98f2476ddcd54"}, - {file = "orjson-3.7.12-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a80722ed6545069d4f8fe16e02f5e9a67e09b6872c4c7501fa095d57471d96a6"}, - {file = "orjson-3.7.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b37eba028ef4f55587ac4eb6dffc5207a884cb506f79e4104f2d5587f163a676"}, - {file = "orjson-3.7.12-cp38-none-win_amd64.whl", hash = "sha256:94cc18a7d20b1fc36f6a60ad98027a27e1462fb815cf0245728285df0ea6b5cf"}, - {file = "orjson-3.7.12-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:71975ed815c929e14351cfde6d74ea892e850f74b02eaa57d2b96cc8c3fbed7b"}, - {file = "orjson-3.7.12-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:5a45baa048b462774b3b777725416006b7eec4b70b1bfc40d895cfa65c5b5eac"}, - {file = "orjson-3.7.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bffc45cd04480be9f18b790f28d716dde117de43b02e0f702935b584fada1de"}, - {file = "orjson-3.7.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a7122f702fe62e79ff3e8a6f975b5559440345ace5618ee1d97c49230f2839b6"}, - {file = "orjson-3.7.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6d1fd006691ea9e500ebba753dea471daef8972260e8ef48b4f356daa2fb3d1"}, - {file = "orjson-3.7.12-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:4b5851c0acc2a35173ba5fa854e15bf6f18757fafe1f7cce0fbc7fc24af3ec8a"}, - {file = "orjson-3.7.12-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:0f2ddd043450579ba35bbcf34e9217ee4de0fc52716ae3eb6cfff5e24fcc0ba3"}, - {file = "orjson-3.7.12-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ff6006857688991e800e9d2d992195451e25353c47b313f0db859016ceb811b3"}, - {file = "orjson-3.7.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:818405b65fa9d9d37330e57d87f91b40c10d2469d16914c7a819d0d494af482c"}, - {file = "orjson-3.7.12-cp39-none-win_amd64.whl", hash = "sha256:c1e4297b5dee3e14e068cc35505b3e1a626dd3fb8d357842902616564d2f713f"}, - {file = "orjson-3.7.12.tar.gz", hash = "sha256:05f20fa1a368207d16ecdf16072c3be58f85c4954cd2ed6c9704463963b9791a"}, + {file = "orjson-3.8.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:9a93850a1bdc300177b111b4b35b35299f046148ba23020f91d6efd7bf6b9d20"}, + {file = "orjson-3.8.0-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7536a2a0b41672f824912aeab545c2467a9ff5ca73a066ff04fb81043a0a177a"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66c19399bb3b058e3236af7910b57b19a4fc221459d722ed72a7dc90370ca090"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b391d5c2ddc2f302d22909676b306cb6521022c3ee306c861a6935670291b2c"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdb1042970ca5f544a047d6c235a7eb4acdb69df75441dd1dfcbc406377ab37"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d189e2acb510e374700cb98cf11b54f0179916ee40f8453b836157ae293efa79"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6a23b40c98889e9abac084ce5a1fb251664b41da9f6bdb40a4729e2288ed2ed4"}, + {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, + {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, + {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, + {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, + {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, + {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6e3da2e4bd27c3b796519ca74132c7b9e5348fb6746315e0f6c1592bc5cf1caf"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:896a21a07f1998648d9998e881ab2b6b80d5daac4c31188535e9d50460edfcf7"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4065906ce3ad6195ac4d1bddde862fe811a42d7be237a1ff762666c3a4bb2151"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:5f856279872a4449fc629924e6a083b9821e366cf98b14c63c308269336f7c14"}, + {file = "orjson-3.8.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b1cd25acfa77935bb2e791b75211cec0cfc21227fe29387e553c545c3ff87e1"}, + {file = "orjson-3.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3e2459d441ab8fd8b161aa305a73d5269b3cda13b5a2a39eba58b4dd3e394f49"}, + {file = "orjson-3.8.0-cp37-none-win_amd64.whl", hash = "sha256:d2b5dafbe68237a792143137cba413447f60dd5df428e05d73dcba10c1ea6fcf"}, + {file = "orjson-3.8.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5b072ef8520cfe7bd4db4e3c9972d94336763c2253f7c4718a49e8733bada7b8"}, + {file = "orjson-3.8.0-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e68c699471ea3e2dd1b35bfd71c6a0a0e4885b64abbe2d98fce1ef11e0afaff3"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7225e8b08996d1a0c804d3a641a53e796685e8c9a9fd52bd428980032cad9a"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f687776a03c19f40b982fb5c414221b7f3d19097841571be2223d1569a59877"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7990a9caf3b34016ac30be5e6cfc4e7efd76aa85614a1215b0eae4f0c7e3db59"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:02d638d43951ba346a80f0abd5942a872cc87db443e073f6f6fc530fee81e19b"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f4b46dbdda2f0bd6480c39db90b21340a19c3b0fcf34bc4c6e465332930ca539"}, + {file = "orjson-3.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:655d7387a1634a9a477c545eea92a1ee902ab28626d701c6de4914e2ed0fecd2"}, + {file = "orjson-3.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5edb93cdd3eb32977633fa7aaa6a34b8ab54d9c49cdcc6b0d42c247a29091b22"}, + {file = "orjson-3.8.0-cp38-none-win_amd64.whl", hash = "sha256:03ed95814140ff09f550b3a42e6821f855d981c94d25b9cc83e8cca431525d70"}, + {file = "orjson-3.8.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b0e72974a5d3b101226899f111368ec2c9824d3e9804af0e5b31567f53ad98a"}, + {file = "orjson-3.8.0-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ea5fe20ef97545e14dd4d0263e4c5c3bc3d2248d39b4b0aed4b84d528dfc0af"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6433c956f4a18112342a18281e0bec67fcd8b90be3a5271556c09226e045d805"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87462791dd57de2e3e53068bf4b7169c125c50960f1bdda08ed30c797cb42a56"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be02f6acee33bb63862eeff80548cd6b8a62e2d60ad2d8dfd5a8824cc43d8887"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a709c2249c1f2955dbf879506fd43fa08c31fdb79add9aeb891e3338b648bf60"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2065b6d280dc58f131ffd93393737961ff68ae7eb6884b68879394074cc03c13"}, + {file = "orjson-3.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fd6cac83136e06e538a4d17117eaeabec848c1e86f5742d4811656ad7ee475f"}, + {file = "orjson-3.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25b5e48fbb9f0b428a5e44cf740675c9281dd67816149fc33659803399adbbe8"}, + {file = "orjson-3.8.0-cp39-none-win_amd64.whl", hash = "sha256:2058653cc12b90e482beacb5c2d52dc3d7606f9e9f5a52c1c10ef49371e76f52"}, + {file = "orjson-3.8.0.tar.gz", hash = "sha256:fb42f7cf57d5804a9daa6b624e3490ec9e2631e042415f3aebe9f35a8492ba6c"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -1605,28 +1644,28 @@ priority = [ {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"}, ] prometheus-client = [ - {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, - {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, + {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, + {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, ] prometheus-fastapi-instrumentator = [ - {file = "prometheus-fastapi-instrumentator-5.8.2.tar.gz", hash = "sha256:f1fa362043b974d138f5245acc973c32d1fa798bd2bd98ef2754befbf385a566"}, - {file = "prometheus_fastapi_instrumentator-5.8.2-py3-none-any.whl", hash = "sha256:5bfec239a924e1fed4ba94eb0addc73422d11821e894200b6d0e36a61c966827"}, + {file = "prometheus-fastapi-instrumentator-5.9.1.tar.gz", hash = "sha256:3651a72f73359a28e8afb0d370ebe3774147323ee2285e21236b229ce79172fc"}, + {file = "prometheus_fastapi_instrumentator-5.9.1-py3-none-any.whl", hash = "sha256:b5206ea9aa6975a0b07f3bf7376932b8a1b2983164b5abb04878e75ba336d9ed"}, ] protobuf = [ - {file = "protobuf-4.21.5-cp310-abi3-win32.whl", hash = "sha256:5310cbe761e87f0c1decce019d23f2101521d4dfff46034f8a12a53546036ec7"}, - {file = "protobuf-4.21.5-cp310-abi3-win_amd64.whl", hash = "sha256:e5c5a2886ae48d22a9d32fbb9b6636a089af3cd26b706750258ce1ca96cc0116"}, - {file = "protobuf-4.21.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ee04f5823ed98bb9a8c3b1dc503c49515e0172650875c3f76e225b223793a1f2"}, - {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b04484d6f42f48c57dd2737a72692f4c6987529cdd148fb5b8e5f616862a2e37"}, - {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e0b272217aad8971763960238c1a1e6a65d50ef7824e23300da97569a251c55"}, - {file = "protobuf-4.21.5-cp37-cp37m-win32.whl", hash = "sha256:5eb0724615e90075f1d763983e708e1cef08e66b1891d8b8b6c33bc3b2f1a02b"}, - {file = "protobuf-4.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:011c0f267e85f5d73750b6c25f0155d5db1e9443cd3590ab669a6221dd8fcdb0"}, - {file = "protobuf-4.21.5-cp38-cp38-win32.whl", hash = "sha256:7b6f22463e2d1053d03058b7b4ceca6e4ed4c14f8c286c32824df751137bf8e7"}, - {file = "protobuf-4.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:b52e7a522911a40445a5f588bd5b5e584291bfc5545e09b7060685e4b2ff814f"}, - {file = "protobuf-4.21.5-cp39-cp39-win32.whl", hash = "sha256:a7faa62b183d6a928e3daffd06af843b4287d16ef6e40f331575ecd236a7974d"}, - {file = "protobuf-4.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:5e0ce02418ef03d7657a420ae8fd6fec4995ac713a3cb09164e95f694dbcf085"}, - {file = "protobuf-4.21.5-py2.py3-none-any.whl", hash = "sha256:bf711b451212dc5b0fa45ae7dada07d8e71a4b0ff0bc8e4783ee145f47ac4f82"}, - {file = "protobuf-4.21.5-py3-none-any.whl", hash = "sha256:3ec6f5b37935406bb9df9b277e79f8ed81d697146e07ef2ba8a5a272fb24b2c9"}, - {file = "protobuf-4.21.5.tar.gz", hash = "sha256:eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a"}, + {file = "protobuf-4.21.8-cp310-abi3-win32.whl", hash = "sha256:c252c55ee15175aa1b21b7b9896e6add5162d066d5202e75c39f96136f08cce3"}, + {file = "protobuf-4.21.8-cp310-abi3-win_amd64.whl", hash = "sha256:809ca0b225d3df42655a12f311dd0f4148a943c51f1ad63c38343e457492b689"}, + {file = "protobuf-4.21.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bbececaf3cfea9ea65ebb7974e6242d310d2a7772a6f015477e0d79993af4511"}, + {file = "protobuf-4.21.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b02eabb9ebb1a089ed20626a90ad7a69cee6bcd62c227692466054b19c38dd1f"}, + {file = "protobuf-4.21.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:4761201b93e024bb70ee3a6a6425d61f3152ca851f403ba946fb0cde88872661"}, + {file = "protobuf-4.21.8-cp37-cp37m-win32.whl", hash = "sha256:f2d55ff22ec300c4d954d3b0d1eeb185681ec8ad4fbecff8a5aee6a1cdd345ba"}, + {file = "protobuf-4.21.8-cp37-cp37m-win_amd64.whl", hash = "sha256:c5f94911dd8feb3cd3786fc90f7565c9aba7ce45d0f254afd625b9628f578c3f"}, + {file = "protobuf-4.21.8-cp38-cp38-win32.whl", hash = "sha256:b37b76efe84d539f16cba55ee0036a11ad91300333abd213849cbbbb284b878e"}, + {file = "protobuf-4.21.8-cp38-cp38-win_amd64.whl", hash = "sha256:2c92a7bfcf4ae76a8ac72e545e99a7407e96ffe52934d690eb29a8809ee44d7b"}, + {file = "protobuf-4.21.8-cp39-cp39-win32.whl", hash = "sha256:89d641be4b5061823fa0e463c50a2607a97833e9f8cfb36c2f91ef5ccfcc3861"}, + {file = "protobuf-4.21.8-cp39-cp39-win_amd64.whl", hash = "sha256:bc471cf70a0f53892fdd62f8cd4215f0af8b3f132eeee002c34302dff9edd9b6"}, + {file = "protobuf-4.21.8-py2.py3-none-any.whl", hash = "sha256:a55545ce9eec4030cf100fcb93e861c622d927ef94070c1a3c01922902464278"}, + {file = "protobuf-4.21.8-py3-none-any.whl", hash = "sha256:0f236ce5016becd989bf39bd20761593e6d8298eccd2d878eda33012645dc369"}, + {file = "protobuf-4.21.8.tar.gz", hash = "sha256:427426593b55ff106c84e4a88cac855175330cb6eb7e889e85aaa7b5652b686d"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -1640,76 +1679,81 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, - {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"}, - {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"}, - {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"}, - {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"}, - {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"}, - {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"}, - {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"}, - {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"}, - {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, - {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pygit2 = [ - {file = "pygit2-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:493a0ce9cbc580855942cdcb2bf3b674f3295c26233e990bfa98058c321313f1"}, - {file = "pygit2-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:73a5fd0304252c84f5f9f1b5b0eadfa3641a04d11f96d89fbd77ffade52adc37"}, - {file = "pygit2-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f812ec8ea10e83b05a770f4f95808f729bc821e0548af69fd0a80e17876003"}, - {file = "pygit2-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a6e0866bb033b1dcfc62fedfe44b12dee92f619c6cfed7ca1de6867fba31f9"}, - {file = "pygit2-1.10.0-cp310-cp310-win32.whl", hash = "sha256:9b3b328ad53420a16908a5bba4923d3b26eef27a570802e68c5ed5afb0eca0f3"}, - {file = "pygit2-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:21021a48aa2151e5f0504d56099a194cffc0fede688703f8d0764edf186d802b"}, - {file = "pygit2-1.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5717cd2fd1a0d23a2bbdf8ce4271aa72e1d283d258c88b341d9d9c4673707e73"}, - {file = "pygit2-1.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0c4506581e816e2357adf4e9b642de8b386778cbf09bd870a9843ef9c9a5379"}, - {file = "pygit2-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c514d6d9b051f2f5f0d8e277ccefda3380bfbf38047e12c92f8e3f110d27314"}, - {file = "pygit2-1.10.0-cp37-cp37m-win32.whl", hash = "sha256:2a23e157251a77f2cfd944ae119a730ef5fa66132eb15119b01b016650a1dbae"}, - {file = "pygit2-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c380f0a8c669aeaf71d9d73f8de16502dc050a6022f0571c77bd5efecf88492c"}, - {file = "pygit2-1.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a0f30d90e52a664a8b1a6ae30067e503a576fc53d40c6a1bc533dc67a70b1410"}, - {file = "pygit2-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9308e78f83e46c95db59128161a5dfe5f6a1652342238224142474d41a0d7011"}, - {file = "pygit2-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e64db8ec4ee0aaf6e726fa4655ea9cbde7a7f2cf34f134f25f6faa52a27d618"}, - {file = "pygit2-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f702aacea8ace3422e02ac20161a4f1afcf13bd0d20edd18726ff386165bbb"}, - {file = "pygit2-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e86bc4c74c40fb46156158c1dd774c1f0e50ee3a860af4131ce2ac1dfff4fc34"}, - {file = "pygit2-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:f1785ba78690b06581694b2e898b68cd1bc344417475c0b994d574b0d2010160"}, - {file = "pygit2-1.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e9d7db4fe6ddf8f7ab29c6a24a8a9bd0af92a214ad0e812b49eb7c411cddf3e"}, - {file = "pygit2-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56d44a3437a6d642c98a33830d8e3d2556e608abba412e451fec514702fa9a76"}, - {file = "pygit2-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8eb687e4bf7b46ca545f50eb25c5e9c41a86be59ae51e83ce42f7793658b560"}, - {file = "pygit2-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71e084dd1c075c0ca3f4b8015ce3cc2dd73fc1c0ead52b6a79990ec5ab7f67d9"}, - {file = "pygit2-1.10.0-cp39-cp39-win32.whl", hash = "sha256:e1f4d7e981c9240912cd587e9b5f1d00a03b79248fcf15add5e3944d11d21884"}, - {file = "pygit2-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2ee457af2d6ca47838d5ddd0c558af829e7db8d1402f61e4695024d3ce54301d"}, - {file = "pygit2-1.10.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:55f05d86b5d635f98816183b1eadbc0349dc26451a58b1920051b2f7593b9d0a"}, - {file = "pygit2-1.10.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd17ebbbcf5c7f10e2dfbd2b7b6abdf5686069a0a1a84c72c1c6bf17c26c72dd"}, - {file = "pygit2-1.10.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2098086b479b6b744d5fb2822fd2d01d14d05ac84c34d911c46b609bd5435c18"}, - {file = "pygit2-1.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01b66a9de0a753ccd2e835b8598f119bccb587bcde9f78adc24a73ead456b083"}, - {file = "pygit2-1.10.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca00acb3d117d736d9dc1144092fdb899f95e1b1aba6d2c3b6df58b80b24dfb"}, - {file = "pygit2-1.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c61db929b4bc52796f22516199ae697a594bbc205e97275f61365c0225ac1130"}, - {file = "pygit2-1.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:33cf14c6188e6494231547e581790e73e66114b5d5e6ef8617487b8a5e13e987"}, - {file = "pygit2-1.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de3170eb76bcaa207fe23caea939bdfefeabdefc3f09191acf0d0b461ce87b"}, - {file = "pygit2-1.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a70e219feee75b18cfc78bd2cee755760be8ec4bedd40aa10d6cd257567a44a"}, - {file = "pygit2-1.10.0.tar.gz", hash = "sha256:7c751eee88c731b922e4e487ee287e2e40906b2bd32d0bfd2105947f63e867de"}, + {file = "pygit2-1.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e3f60e47c6a7a87f18a112753eb98848f4c5333986bec1940558ce09cdaf53bf"}, + {file = "pygit2-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0f69ea42231bebf08006c85cd5aa233c9c047c5a88b7fcfb4b639476b70e31b"}, + {file = "pygit2-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0097b6631ef05c837c4800fad559d0865a90c55475a18f38c6f2f5a12750e914"}, + {file = "pygit2-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddb3b5bdcdfae205d9cc0c80bc53fad222a5ba67e66fd336ef223f86b0ac5835"}, + {file = "pygit2-1.10.1-cp310-cp310-win32.whl", hash = "sha256:3efd2a2ab2bb443e1b758525546d74a5a12fe27006194d3c02b3e6ecc1e101e6"}, + {file = "pygit2-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:11225811194ae6b9dbb34c2e8900e0eba6eacc180d82766e3dbddcbd2c6e6454"}, + {file = "pygit2-1.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:73e251d0b73f1010ad28c20bcdcf33e312fb363f10b7268ad2bcfa09770f9ac2"}, + {file = "pygit2-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cb73f7967207a9ac485722ef0e517e5ca482f3c1308a0ac934707cb267b0ac7a"}, + {file = "pygit2-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b115bef251af4daf18f2f967287b56da2eae2941d5389dc1666bd0160892d769"}, + {file = "pygit2-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd55a6cf7ad6276fb5772e5c60c51fca2d9a5e68ea3e7237847421c10080a68"}, + {file = "pygit2-1.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:33138c256ad0ff084f5d8a82ab7d280f9ed6706ebb000ac82e3d133e2d82b366"}, + {file = "pygit2-1.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f4f507e5cd775f6d5d95ec65761af4cdb33b2f859af15bf10a06d11efd0d3b2"}, + {file = "pygit2-1.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752f844d5379081fae5ef78e3bf6f0f35ae9b11aafc37e5e03e1c3607b196806"}, + {file = "pygit2-1.10.1-cp37-cp37m-win32.whl", hash = "sha256:b31ffdbc87629613ae03a533e01eee79112a12f66faf375fa08934074044a664"}, + {file = "pygit2-1.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:e09386b71ad474f2c2c02b6b251fa904b1145dabfe9095955ab30a789aaf84c0"}, + {file = "pygit2-1.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:564e832e750f889aea3bb3e82674e1c860c9b89a141404530271e1341723a258"}, + {file = "pygit2-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43bb910272866eb822e930dbd0feecc340e0c24934143aab651fa180cc5ebfb0"}, + {file = "pygit2-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e4905cbb87db598b1cb38800ff995c0ba1f58745e2f52af4d54dbc93b9bda8"}, + {file = "pygit2-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f4689ce38cd62a7000d38602ba4d704df5cec708e5d98dadaffcf510f3317"}, + {file = "pygit2-1.10.1-cp38-cp38-win32.whl", hash = "sha256:b67ef30f3c022be1d6da9ef0188f60fc2d20639bff44693ef5653818e887001b"}, + {file = "pygit2-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:dcd849c44bd743d829dbd9dc9d7e13c14cf31a47c22e2e3f9e98fa845a8b8b28"}, + {file = "pygit2-1.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8bb9002924975271d64e8869b44ea97f068e85b5edd03e802e4917b770aaf2d"}, + {file = "pygit2-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:889ca83528c0649afd970da700cc6ed47dc340481f146a39ba5bfbeca1ddd6f8"}, + {file = "pygit2-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5465db21c6fd481ec29aa7afcca9a85b1fdb19b2f2d09a31b4bdba2f1bd0e75"}, + {file = "pygit2-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ceecd5d30583f9db56aadcd7238bb3c76a2934d8a932de47aed77fe3c188e7"}, + {file = "pygit2-1.10.1-cp39-cp39-win32.whl", hash = "sha256:9d6e1270b91e7bf70185bb4c3686e04cca87a385c8a2d5c74eec8770091531be"}, + {file = "pygit2-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:d4251830276018c2346ddccad4b4ce06ed1d983b002a633c4d894b13669052d0"}, + {file = "pygit2-1.10.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7eb2cee54a1cb468b5502493ee4f3ec2f1f82db9c46fab7dacaa37afc4fcde8e"}, + {file = "pygit2-1.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411dc8af5f25c30a0c3d79ee1e22fb892d6fd6ccb54d4c1fb7746e6274e36426"}, + {file = "pygit2-1.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe41da630f4e7cb290dc7e97edf30a59d634426af52a89d4ab5c0fb1ea9ccfe4"}, + {file = "pygit2-1.10.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9da53c6f5c08308450059d7dfb3067d59c45f14bee99743e536c5f9d9823f154"}, + {file = "pygit2-1.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb49f9469a893f75f105cdf2c79254859aaf2fdce1078c38514ca12fe185a759"}, + {file = "pygit2-1.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff838665d6410b5a605f53c1ccd2d2f87ca30de59e89773e7cb5e10211426f90"}, + {file = "pygit2-1.10.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9d23bb613f5692da78c09a79ae40d6ced57b772ae9153aed23a9aa1889a16c85"}, + {file = "pygit2-1.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a3cc867fa6907bfc78d7d1322f3dabd4107b16238205df7e2dec9ee265f0c0"}, + {file = "pygit2-1.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb3eb2f1d437db6e115d5f56d122f2f3737fa2e6063aa42e4d856ca76d785ce6"}, + {file = "pygit2-1.10.1.tar.gz", hash = "sha256:354651bf062c02d1f08041d6fbf1a9b4bf7a93afce65979bdc08bdc65653aa2e"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, @@ -1720,12 +1764,12 @@ pytest = [ {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, - {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, + {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, + {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, ] pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, ] pytest-forked = [ {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, @@ -1758,65 +1802,74 @@ rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] +setuptools = [ + {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, + {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] sortedcontainers = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.4.40-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:b07fc38e6392a65935dc8b486229679142b2ea33c94059366b4d8b56f1e35a97"}, - {file = "SQLAlchemy-1.4.40-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fb4edb6c354eac0fcc07cb91797e142f702532dbb16c1d62839d6eec35f814cf"}, - {file = "SQLAlchemy-1.4.40-cp27-cp27m-win32.whl", hash = "sha256:2026632051a93997cf8f6fda14360f99230be1725b7ab2ef15be205a4b8a5430"}, - {file = "SQLAlchemy-1.4.40-cp27-cp27m-win_amd64.whl", hash = "sha256:f2aa85aebc0ef6b342d5d3542f969caa8c6a63c8d36cf5098769158a9fa2123c"}, - {file = "SQLAlchemy-1.4.40-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0b9e3d81f86ba04007f0349e373a5b8c81ec2047aadb8d669caf8c54a092461"}, - {file = "SQLAlchemy-1.4.40-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1ab08141d93de83559f6a7d9a962830f918623a885b3759ec2b9d1a531ff28fe"}, - {file = "SQLAlchemy-1.4.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00dd998b43b282c71de46b061627b5edb9332510eb1edfc5017b9e4356ed44ea"}, - {file = "SQLAlchemy-1.4.40-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb342c0e25cc8f78a0e7c692da3b984f072666b316fbbec2a0e371cb4dfef5f0"}, - {file = "SQLAlchemy-1.4.40-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23b693876ac7963b6bc7b1a5f3a2642f38d2624af834faad5933913928089d1b"}, - {file = "SQLAlchemy-1.4.40-cp310-cp310-win32.whl", hash = "sha256:2cf50611ef4221ad587fb7a1708e61ff72966f84330c6317642e08d6db4138fd"}, - {file = "SQLAlchemy-1.4.40-cp310-cp310-win_amd64.whl", hash = "sha256:26ee4dbac5dd7abf18bf3cd8f04e51f72c339caf702f68172d308888cd26c6c9"}, - {file = "SQLAlchemy-1.4.40-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b41b87b929118838bafc4bb18cf3c5cd1b3be4b61cd9042e75174df79e8ac7a2"}, - {file = "SQLAlchemy-1.4.40-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:885e11638946472b4a0a7db8e6df604b2cf64d23dc40eedc3806d869fcb18fae"}, - {file = "SQLAlchemy-1.4.40-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b7ff0a8bf0aec1908b92b8dfa1246128bf4f94adbdd3da6730e9c542e112542d"}, - {file = "SQLAlchemy-1.4.40-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfa8ab4ba0c97ab6bcae1f0948497d14c11b6c6ecd1b32b8a79546a0823d8211"}, - {file = "SQLAlchemy-1.4.40-cp36-cp36m-win32.whl", hash = "sha256:d259fa08e4b3ed952c01711268bcf6cd2442b0c54866d64aece122f83da77c6d"}, - {file = "SQLAlchemy-1.4.40-cp36-cp36m-win_amd64.whl", hash = "sha256:c8d974c991eef0cd29418a5957ae544559dc326685a6f26b3a914c87759bf2f4"}, - {file = "SQLAlchemy-1.4.40-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:28b1791a30d62fc104070965f1a2866699c45bbf5adc0be0cf5f22935edcac58"}, - {file = "SQLAlchemy-1.4.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7ccdca6cd167611f4a62a8c2c0c4285c2535640d77108f782ce3f3cccb70f3a"}, - {file = "SQLAlchemy-1.4.40-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:69deec3a94de10062080d91e1ba69595efeafeafe68b996426dec9720031fb25"}, - {file = "SQLAlchemy-1.4.40-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ad778f4e80913fb171247e4fa82123d0068615ae1d51a9791fc4284cb81748"}, - {file = "SQLAlchemy-1.4.40-cp37-cp37m-win32.whl", hash = "sha256:9ced2450c9fd016f9232d976661623e54c450679eeefc7aa48a3d29924a63189"}, - {file = "SQLAlchemy-1.4.40-cp37-cp37m-win_amd64.whl", hash = "sha256:cdee4d475e35684d210dc6b430ff8ca2ed0636378ac19b457e2f6f350d1f5acc"}, - {file = "SQLAlchemy-1.4.40-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:08b47c971327e733ffd6bae2d4f50a7b761793efe69d41067fcba86282819eea"}, - {file = "SQLAlchemy-1.4.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf03d37819dc17a388d313919daf32058d19ba1e592efdf14ce8cbd997e6023"}, - {file = "SQLAlchemy-1.4.40-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a62c0ecbb9976550f26f7bf75569f425e661e7249349487f1483115e5fc893a6"}, - {file = "SQLAlchemy-1.4.40-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec440990ab00650d0c7ea2c75bc225087afdd7ddcb248e3d934def4dff62762"}, - {file = "SQLAlchemy-1.4.40-cp38-cp38-win32.whl", hash = "sha256:2b64955850a14b9d481c17becf0d3f62fb1bb31ac2c45c2caf5ad06d9e811187"}, - {file = "SQLAlchemy-1.4.40-cp38-cp38-win_amd64.whl", hash = "sha256:959bf4390766a8696aa01285016c766b4eb676f712878aac5fce956dd49695d9"}, - {file = "SQLAlchemy-1.4.40-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:0992f3cc640ec0f88f721e426da884c34ff0a60eb73d3d64172e23dfadfc8a0b"}, - {file = "SQLAlchemy-1.4.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa9e0d7832b7511b3b3fd0e67fac85ff11fd752834c143ca2364c9b778c0485a"}, - {file = "SQLAlchemy-1.4.40-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c9d0f1a9538cc5e75f2ea0cb6c3d70155a1b7f18092c052e0d84105622a41b63"}, - {file = "SQLAlchemy-1.4.40-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c956a5d1adb49a35d78ef0fae26717afc48a36262359bb5b0cbd7a3a247c26f"}, - {file = "SQLAlchemy-1.4.40-cp39-cp39-win32.whl", hash = "sha256:6b70d02bbe1adbbf715d2249cacf9ac17c6f8d22dfcb3f1a4fbc5bf64364da8a"}, - {file = "SQLAlchemy-1.4.40-cp39-cp39-win_amd64.whl", hash = "sha256:bf073c619b5a7f7cd731507d0fdc7329bee14b247a63b0419929e4acd24afea8"}, - {file = "SQLAlchemy-1.4.40.tar.gz", hash = "sha256:44a660506080cc975e1dfa5776fe5f6315ddc626a77b50bf0eee18b0389ea265"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, + {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, ] srcinfo = [ {file = "srcinfo-0.0.8-py3-none-any.whl", hash = "sha256:0922ee4302b927d7ddea74c47e539b226a0a7738dc89f95b66404a28d07f3f6b"}, {file = "srcinfo-0.0.8.tar.gz", hash = "sha256:5ac610cf8b15d4b0a0374bd1f7ad301675c2938f0414addf3ef7d7e3fcaf5c65"}, ] starlette = [ - {file = "starlette-0.19.1-py3-none-any.whl", hash = "sha256:5a60c5c2d051f3a8eb546136aa0c9399773a689595e099e0877704d5888279bf"}, - {file = "starlette-0.19.1.tar.gz", hash = "sha256:c6d21096774ecb9639acad41b86b7706e52ba3bf1dc13ea4ed9ad593d47e24c7"}, + {file = "starlette-0.20.4-py3-none-any.whl", hash = "sha256:c0414d5a56297d37f3db96a84034d61ce29889b9eaccf65eb98a0b39441fcaa3"}, + {file = "starlette-0.20.4.tar.gz", hash = "sha256:42fcf3122f998fefce3e2c5ad7e5edbf0f02cf685d646a83a08d404726af5084"}, ] -"tap.py" = [ +tap-py = [ {file = "tap.py-3.1-py3-none-any.whl", hash = "sha256:928c852f3361707b796c93730cc5402c6378660b161114461066acf53d65bf5d"}, {file = "tap.py-3.1.tar.gz", hash = "sha256:3c0cd45212ad5a25b35445964e2517efa000a118a1bfc3437dae828892eaf1e1"}, ] @@ -1829,16 +1882,16 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typing-extensions = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] urllib3 = [ - {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, - {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] uvicorn = [ - {file = "uvicorn-0.18.3-py3-none-any.whl", hash = "sha256:0abd429ebb41e604ed8d2be6c60530de3408f250e8d2d84967d85ba9e86fe3af"}, - {file = "uvicorn-0.18.3.tar.gz", hash = "sha256:9a66e7c42a2a95222f76ec24a4b754c158261c4696e683b9dadc72b590e0311b"}, + {file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"}, + {file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"}, ] webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, @@ -1915,10 +1968,10 @@ wrapt = [ {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] wsproto = [ - {file = "wsproto-1.1.0-py3-none-any.whl", hash = "sha256:2218cb57952d90b9fca325c0dcfb08c3bda93e8fd8070b0a17f048e2e47a521b"}, - {file = "wsproto-1.1.0.tar.gz", hash = "sha256:a2e56bfd5c7cd83c1369d83b5feccd6d37798b74872866e62616e0ecf111bda8"}, + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, ] zipp = [ - {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, - {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, + {file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"}, + {file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"}, ] diff --git a/pyproject.toml b/pyproject.toml index fea2f922..3b615c73 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ aiofiles = "^22.0.0" asgiref = "^3.4.1" bcrypt = "^4.0.0" bleach = "^5.0.0" -email-validator = "1.2.1" +email-validator = "^1.3.0" fakeredis = "^1.6.1" feedgen = "^0.9.0" httpx = "^0.23.0" @@ -85,7 +85,7 @@ Werkzeug = "^2.0.2" SQLAlchemy = "^1.4.26" # ASGI -uvicorn = "^0.18.0" +uvicorn = "^0.19.0" gunicorn = "^20.1.0" Hypercorn = "^0.14.0" prometheus-fastapi-instrumentator = "^5.7.1" @@ -93,14 +93,14 @@ pytest-xdist = "^2.4.0" filelock = "^3.3.2" posix-ipc = "^1.0.5" pyalpm = "^0.10.6" -fastapi = "^0.83.0" +fastapi = "^0.85.1" srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] coverage = "^6.0.2" pytest = "^7.0.0" -pytest-asyncio = "^0.19.0" -pytest-cov = "^3.0.0" +pytest-asyncio = "^0.20.1" +pytest-cov = "^4.0.0" pytest-tap = "^3.2" [tool.poetry.scripts] From 524334409a1744e8caf6fb4b2f0d42ec189bca27 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sat, 22 Oct 2022 21:58:30 +0100 Subject: [PATCH 136/415] fix: add production logging.prod.conf to be less verbose Signed-off-by: Leonidas Spyropoulos --- logging.prod.conf | 58 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 logging.prod.conf diff --git a/logging.prod.conf b/logging.prod.conf new file mode 100644 index 00000000..63692a28 --- /dev/null +++ b/logging.prod.conf @@ -0,0 +1,58 @@ +[loggers] +keys=root,aurweb,uvicorn,hypercorn,alembic + +[handlers] +keys=simpleHandler,detailedHandler + +[formatters] +keys=simpleFormatter,detailedFormatter + +[logger_root] +level=INFO +; We add NullHandler programmatically. +handlers= +propogate=0 + +[logger_aurweb] +level=INFO +handlers=simpleHandler +qualname=aurweb +propagate=1 + +[logger_uvicorn] +level=WARN +handlers=simpleHandler +qualname=uvicorn +propagate=0 + +[logger_hypercorn] +level=WARN +handlers=simpleHandler +qualname=hypercorn +propagate=0 + +[logger_alembic] +level=WARN +handlers=simpleHandler +qualname=alembic +propagate=0 + +[handler_simpleHandler] +class=StreamHandler +level=INFO +formatter=simpleFormatter +args=(sys.stdout,) + +[handler_detailedHandler] +class=StreamHandler +level=DEBUG +formatter=detailedFormatter +args=(sys.stdout,) + +[formatter_simpleFormatter] +format=%(asctime)s %(levelname)-8s | %(name)s @ (%(filename)s:%(lineno)d): %(message)s +datefmt=%H:%M:%S + +[formatter_detailedFormatter] +format=%(asctime)s %(levelname)-8s | [%(name)s.%(funcName)s() @ %(filename)s:%(lineno)d]: %(message)s +datefmt=%H:%M:%S From 3dcbee5a4f035777b5b65d124bc3c46240b661c8 Mon Sep 17 00:00:00 2001 From: Mario Oenning Date: Fri, 28 Oct 2022 12:42:50 +0000 Subject: [PATCH 137/415] fix: make overwriting of archive files atomic --- aurweb/scripts/mkpkglists.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index e74bbf25..67cc7fab 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -24,7 +24,6 @@ import io import os import shutil import sys -import tempfile from collections import defaultdict from typing import Any @@ -219,10 +218,9 @@ def _main(): output = list() snapshot_uri = aurweb.config.get("options", "snapshot_uri") - tmpdir = tempfile.mkdtemp() - tmp_packages = os.path.join(tmpdir, os.path.basename(PACKAGES)) - tmp_meta = os.path.join(tmpdir, os.path.basename(META)) - tmp_metaext = os.path.join(tmpdir, os.path.basename(META_EXT)) + tmp_packages = f"{PACKAGES}.tmp" + tmp_meta = f"{META}.tmp" + tmp_metaext = f"{META_EXT}.tmp" gzips = { "packages": gzip.open(tmp_packages, "wt"), "meta": gzip.open(tmp_meta, "wb"), @@ -276,13 +274,13 @@ def _main(): # Produce pkgbase.gz query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all() - tmp_pkgbase = os.path.join(tmpdir, os.path.basename(PKGBASE)) + tmp_pkgbase = f"{PKGBASE}.tmp" with gzip.open(tmp_pkgbase, "wt") as f: f.writelines([f"{base.Name}\n" for i, base in enumerate(query)]) # Produce users.gz query = db.query(User.Username).all() - tmp_users = os.path.join(tmpdir, os.path.basename(USERS)) + tmp_users = f"{USERS}.tmp" with gzip.open(tmp_users, "wt") as f: f.writelines([f"{user.Username}\n" for i, user in enumerate(query)]) @@ -297,7 +295,7 @@ def _main(): for src, dst in files: checksum = sha256sum(src) - base = os.path.basename(src) + base = os.path.basename(dst) checksum_formatted = f"SHA256 ({base}) = {checksum}" checksum_file = f"{dst}.sha256" @@ -307,7 +305,6 @@ def _main(): # Move the new archive into its rightful place. shutil.move(src, dst) - os.removedirs(tmpdir) seconds = filters.number_format(bench.end(), 4) logger.info(f"Completed in {seconds} seconds.") From d793193fdfc9d8369a89a932b5dc719ab1153985 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 27 Oct 2022 15:11:37 +0100 Subject: [PATCH 138/415] style: make logging easier to read Signed-off-by: Leonidas Spyropoulos --- logging.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/logging.conf b/logging.conf index 7dfd30f0..d27b0153 100644 --- a/logging.conf +++ b/logging.conf @@ -50,9 +50,9 @@ formatter=detailedFormatter args=(sys.stdout,) [formatter_simpleFormatter] -format=%(asctime)s %(levelname)-5s | %(name)s: %(message)s +format=%(asctime)s %(levelname)-8s | %(name)s @ (%(filename)s:%(lineno)d): %(message)s datefmt=%H:%M:%S [formatter_detailedFormatter] -format=%(asctime)s %(levelname)-5s | %(name)s.%(funcName)s() @ L%(lineno)d: %(message)s +format=%(asctime)s %(levelname)-8s | [%(name)s.%(funcName)s() @ %(filename)s:%(lineno)d]: %(message)s datefmt=%H:%M:%S From 7e06823e580942cc11b8164a559386e766d94470 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 27 Oct 2022 15:34:52 +0100 Subject: [PATCH 139/415] refactor: remove redundand parenthesis when return tuple Signed-off-by: Leonidas Spyropoulos --- aurweb/auth/__init__.py | 2 +- aurweb/git/update.py | 2 +- aurweb/packages/util.py | 6 +++--- aurweb/routers/accounts.py | 4 ++-- aurweb/routers/packages.py | 30 +++++++++++++++--------------- aurweb/scripts/rendercomment.py | 6 +++--- aurweb/util.py | 6 +++--- test/test_packages_routes.py | 4 ++-- test/test_pkgbase_routes.py | 4 ++-- test/test_spawn.py | 2 +- test/test_tuvotereminder.py | 2 +- 11 files changed, 34 insertions(+), 34 deletions(-) diff --git a/aurweb/auth/__init__.py b/aurweb/auth/__init__.py index b8056f91..5a1fc8d0 100644 --- a/aurweb/auth/__init__.py +++ b/aurweb/auth/__init__.py @@ -127,7 +127,7 @@ class BasicAuthBackend(AuthenticationBackend): user.nonce = util.make_nonce() user.authenticated = True - return (AuthCredentials(["authenticated"]), user) + return AuthCredentials(["authenticated"]), user def _auth_required(auth_goal: bool = True): diff --git a/aurweb/git/update.py b/aurweb/git/update.py index 94a8d623..b1256fdb 100755 --- a/aurweb/git/update.py +++ b/aurweb/git/update.py @@ -52,7 +52,7 @@ def parse_dep(depstring): depname = re.sub(r"(<|=|>).*", "", dep) depcond = dep[len(depname) :] - return (depname, desc, depcond) + return depname, desc, depcond def create_pkgbase(conn, pkgbase, user): diff --git a/aurweb/packages/util.py b/aurweb/packages/util.py index cddec0ac..25671028 100644 --- a/aurweb/packages/util.py +++ b/aurweb/packages/util.py @@ -239,12 +239,12 @@ def source_uri(pkgsrc: models.PackageSource) -> Tuple[str, str]: the package base name. :param pkgsrc: PackageSource instance - :return (text, uri) tuple + :return text, uri)tuple """ if "::" in pkgsrc.Source: return pkgsrc.Source.split("::", 1) elif "://" in pkgsrc.Source: - return (pkgsrc.Source, pkgsrc.Source) + return pkgsrc.Source, pkgsrc.Source path = config.get("options", "source_file_uri") pkgbasename = pkgsrc.Package.PackageBase.Name - return (pkgsrc.Source, path % (pkgsrc.Source, pkgbasename)) + return pkgsrc.Source, path % (pkgsrc.Source, pkgbasename) diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index 24aacdf7..07962c37 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -160,9 +160,9 @@ def process_account_form(request: Request, user: models.User, args: dict[str, An for check in checks: check(**args, request=request, user=user, _=_) except ValidationError as exc: - return (False, exc.data) + return False, exc.data - return (True, []) + return True, [] def make_account_form_context( diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index 0d482521..a4aac496 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -213,7 +213,7 @@ async def package( async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs): if not package_ids: - return (False, ["You did not select any packages to unflag."]) + return False, ["You did not select any packages to unflag."] # Holds the set of package bases we're looking to unflag. # Constructed below via looping through the packages query. @@ -226,14 +226,14 @@ async def packages_unflag(request: Request, package_ids: list[int] = [], **kwarg creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger] ) if not has_cred: - return (False, ["You did not select any packages to unflag."]) + return False, ["You did not select any packages to unflag."] if pkg.PackageBase not in bases: bases.update({pkg.PackageBase}) for pkgbase in bases: pkgbase_actions.pkgbase_unflag_instance(request, pkgbase) - return (True, ["The selected packages have been unflagged."]) + return True, ["The selected packages have been unflagged."] async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs): @@ -271,13 +271,13 @@ async def packages_notify(request: Request, package_ids: list[int] = [], **kwarg pkgbase_actions.pkgbase_notify_instance(request, pkgbase) # TODO: This message does not yet have a translation. - return (True, ["The selected packages' notifications have been enabled."]) + return True, ["The selected packages' notifications have been enabled."] async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs): if not package_ids: # TODO: This error does not yet have a translation. - return (False, ["You did not select any packages for notification removal."]) + return False, ["You did not select any packages for notification removal."] # TODO: This error does not yet have a translation. error_tuple = ( @@ -307,14 +307,14 @@ async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwa pkgbase_actions.pkgbase_unnotify_instance(request, pkgbase) # TODO: This message does not yet have a translation. - return (True, ["The selected packages' notifications have been removed."]) + return True, ["The selected packages' notifications have been removed."] async def packages_adopt( request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs ): if not package_ids: - return (False, ["You did not select any packages to adopt."]) + return False, ["You did not select any packages to adopt."] if not confirm: return ( @@ -347,7 +347,7 @@ async def packages_adopt( for pkgbase in bases: pkgbase_actions.pkgbase_adopt_instance(request, pkgbase) - return (True, ["The selected packages have been adopted."]) + return True, ["The selected packages have been adopted."] def disown_all(request: Request, pkgbases: list[models.PackageBase]) -> list[str]: @@ -364,7 +364,7 @@ async def packages_disown( request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs ): if not package_ids: - return (False, ["You did not select any packages to disown."]) + return False, ["You did not select any packages to disown."] if not confirm: return ( @@ -397,9 +397,9 @@ async def packages_disown( # Now, disown all the bases if we can. if errors := disown_all(request, bases): - return (False, errors) + return False, errors - return (True, ["The selected packages have been disowned."]) + return True, ["The selected packages have been disowned."] async def packages_delete( @@ -410,7 +410,7 @@ async def packages_delete( **kwargs, ): if not package_ids: - return (False, ["You did not select any packages to delete."]) + return False, ["You did not select any packages to delete."] if not confirm: return ( @@ -422,7 +422,7 @@ async def packages_delete( ) if not request.user.has_credential(creds.PKGBASE_DELETE): - return (False, ["You do not have permission to delete packages."]) + return False, ["You do not have permission to delete packages."] # set-ify package_ids and query the database for related records. package_ids = set(package_ids) @@ -432,7 +432,7 @@ async def packages_delete( # Let the user know there was an issue with their input: they have # provided at least one package_id which does not exist in the DB. # TODO: This error has not yet been translated. - return (False, ["One of the packages you selected does not exist."]) + return False, ["One of the packages you selected does not exist."] # Make a set out of all package bases related to `packages`. bases = {pkg.PackageBase for pkg in packages} @@ -448,7 +448,7 @@ async def packages_delete( ) util.apply_all(notifs, lambda n: n.send()) - return (True, ["The selected packages have been deleted."]) + return True, ["The selected packages have been deleted."] # A mapping of action string -> callback functions used within the diff --git a/aurweb/scripts/rendercomment.py b/aurweb/scripts/rendercomment.py index 4a2c84bd..643b0370 100755 --- a/aurweb/scripts/rendercomment.py +++ b/aurweb/scripts/rendercomment.py @@ -46,7 +46,7 @@ class FlysprayLinksInlineProcessor(markdown.inlinepatterns.InlineProcessor): el = Element("a") el.set("href", f"https://bugs.archlinux.org/task/{m.group(1)}") el.text = markdown.util.AtomicString(m.group(0)) - return (el, m.start(0), m.end(0)) + return el, m.start(0), m.end(0) class FlysprayLinksExtension(markdown.extensions.Extension): @@ -74,7 +74,7 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor): oid = m.group(1) if oid not in self._repo: # Unknown OID; preserve the orginal text. - return (None, None, None) + return None, None, None el = Element("a") commit_uri = aurweb.config.get("options", "commit_uri") @@ -83,7 +83,7 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor): "href", commit_uri % (quote_plus(self._head), quote_plus(oid[:prefixlen])) ) el.text = markdown.util.AtomicString(oid[:prefixlen]) - return (el, m.start(0), m.end(0)) + return el, m.start(0), m.end(0) class GitCommitsExtension(markdown.extensions.Extension): diff --git a/aurweb/util.py b/aurweb/util.py index cda12af1..0a39cd3d 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -107,7 +107,7 @@ def sanitize_params(offset: str, per_page: str) -> Tuple[int, int]: except ValueError: per_page = defaults.PP - return (offset, per_page) + return offset, per_page def strtobool(value: Union[str, bool]) -> bool: @@ -187,7 +187,7 @@ def parse_ssh_key(string: str) -> Tuple[str, str]: if proc.returncode: raise invalid_exc - return (prefix, key) + return prefix, key def parse_ssh_keys(string: str) -> list[Tuple[str, str]]: @@ -199,4 +199,4 @@ def shell_exec(cmdline: str, cwd: str) -> Tuple[int, str, str]: args = shlex.split(cmdline) proc = Popen(args, cwd=cwd, stdout=PIPE, stderr=PIPE) out, err = proc.communicate() - return (proc.returncode, out.decode().strip(), err.decode().strip()) + return proc.returncode, out.decode().strip(), err.decode().strip() diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index 6e92eeff..3b717783 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -1149,7 +1149,7 @@ def test_packages_post_unknown_action(client: TestClient, user: User, package: P def test_packages_post_error(client: TestClient, user: User, package: Package): async def stub_action(request: Request, **kwargs): - return (False, ["Some error."]) + return False, ["Some error."] actions = {"stub": stub_action} with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): @@ -1170,7 +1170,7 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): def test_packages_post(client: TestClient, user: User, package: Package): async def stub_action(request: Request, **kwargs): - return (True, ["Some success."]) + return True, ["Some success."] actions = {"stub": stub_action} with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index bfdb0c37..18c11626 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -1315,7 +1315,7 @@ def test_packages_post_unknown_action(client: TestClient, user: User, package: P def test_packages_post_error(client: TestClient, user: User, package: Package): async def stub_action(request: Request, **kwargs): - return (False, ["Some error."]) + return False, ["Some error."] actions = {"stub": stub_action} with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): @@ -1336,7 +1336,7 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): def test_packages_post(client: TestClient, user: User, package: Package): async def stub_action(request: Request, **kwargs): - return (True, ["Some success."]) + return True, ["Some success."] actions = {"stub": stub_action} with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): diff --git a/test/test_spawn.py b/test/test_spawn.py index be1c5e7c..25b9ebfc 100644 --- a/test/test_spawn.py +++ b/test/test_spawn.py @@ -24,7 +24,7 @@ class FakeProcess: """We need this constructor to remain compatible with Popen.""" def communicate(self) -> Tuple[bytes, bytes]: - return (self.stdout, self.stderr) + return self.stdout, self.stderr def terminate(self) -> None: raise Exception("Fake termination.") diff --git a/test/test_tuvotereminder.py b/test/test_tuvotereminder.py index 0233c8b2..5f2ae3a1 100644 --- a/test/test_tuvotereminder.py +++ b/test/test_tuvotereminder.py @@ -42,7 +42,7 @@ def email_pieces(voteinfo: TUVoteInfo) -> Tuple[str, str]: f"[1]. The voting period\nends in less than 48 hours.\n\n" f"[1] {aur_location}/tu/?id={voteinfo.ID}" ) - return (subject, content) + return subject, content @pytest.fixture From 48e5dc6763b664fb307d2894cedab0a9aaf09630 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 27 Oct 2022 15:49:48 +0100 Subject: [PATCH 140/415] feat: remove empty lines from ssh_keys text area, and show helpful message Signed-off-by: Leonidas Spyropoulos --- aurweb/util.py | 2 +- po/aurweb.pot | 4 ++++ templates/partials/account_form.html | 7 +++++++ test/test_util.py | 29 +++++++++++++++++++++++++--- 4 files changed, 38 insertions(+), 4 deletions(-) diff --git a/aurweb/util.py b/aurweb/util.py index 0a39cd3d..7b997609 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -192,7 +192,7 @@ def parse_ssh_key(string: str) -> Tuple[str, str]: def parse_ssh_keys(string: str) -> list[Tuple[str, str]]: """Parse a list of SSH public keys.""" - return [parse_ssh_key(e) for e in string.splitlines()] + return [parse_ssh_key(e) for e in string.strip().splitlines(True) if e.strip()] def shell_exec(cmdline: str, cwd: str) -> Tuple[int, str, str]: diff --git a/po/aurweb.pot b/po/aurweb.pot index 1838fae5..ff1bde8b 100644 --- a/po/aurweb.pot +++ b/po/aurweb.pot @@ -1398,6 +1398,10 @@ msgid "" "the Arch User Repository." msgstr "" +#: templates/partials/account_form.html +msgid "Specify multiple SSH Keys separated by new line, empty lines are ignored." +msgstr "" + #: template/account_edit_form.php msgid "SSH Public Key" msgstr "" diff --git a/templates/partials/account_form.html b/templates/partials/account_form.html index 007fb389..a433a57d 100644 --- a/templates/partials/account_form.html +++ b/templates/partials/account_form.html @@ -264,6 +264,13 @@

    +

    + + {{ + "Specify multiple SSH Keys separated by new line, empty lines are ignored." | tr + }} + +

    diff --git a/test/test_util.py b/test/test_util.py index 2e8b2e4e..fd7d8655 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -96,14 +96,37 @@ YbxDwGimZZslg0OZu9UzoAT6xEGyiZsqJkTMbRp1ZYIOv9jHCJxRuxxuN3fzxyT3xE69+vhq2/NJX\ vTNJCD6JtMClxbIXW9q74nNqG+2SD/VQNMUz/505TK1PbY/4uyFfq5HquHJXQVCBll03FRerNHH2N\ schFne6BFHpa48PCoZNH45wLjFXwUyrGU1HrNqh6ZPdRfBTrTOkgs+BKBxGNeV45aYUPu/cFBSPcB\ fRSo6OFcejKc=""" + assert_multiple_keys(pks) + + +def test_parse_ssh_keys_with_extra_lines(): + pks = """ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyN\ +TYAAABBBEURnkiY6JoLyqDE8Li1XuAW+LHmkmLDMW/GL5wY7k4/A+Ta7bjA3MOKrF9j4EuUTvCuNX\ +ULxvpfSqheTFWZc+g= + + + + +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDmqEapFMh/ajPHnm1dBweYPeLOUjC0Ydp6uw7rB\ +S5KCggUVQR8WfIm+sRYTj2+smGsK6zHMBjFnbzvV11vnMqcnY+Sa4LhIAdwkbt/b8HlGaLj1hCWSh\ +a5b5/noeK7L+CECGHdvfJhpxBbhq38YEdFnCGbslk/4NriNcUp/DO81CXb1RzJ9GBFH8ivPW1mbe9\ +YbxDwGimZZslg0OZu9UzoAT6xEGyiZsqJkTMbRp1ZYIOv9jHCJxRuxxuN3fzxyT3xE69+vhq2/NJX\ +8aRsxGPL9G/XKcaYGS6y6LW4quIBCz/XsTZfx1GmkQeZPYHH8FeE+XC/+toXL/kamxdOQKFYEEpWK\ +vTNJCD6JtMClxbIXW9q74nNqG+2SD/VQNMUz/505TK1PbY/4uyFfq5HquHJXQVCBll03FRerNHH2N\ +schFne6BFHpa48PCoZNH45wLjFXwUyrGU1HrNqh6ZPdRfBTrTOkgs+BKBxGNeV45aYUPu/cFBSPcB\ +fRSo6OFcejKc= + + +""" + assert_multiple_keys(pks) + + +def assert_multiple_keys(pks): keys = util.parse_ssh_keys(pks) assert len(keys) == 2 - pfx1, key1, pfx2, key2 = pks.split() k1, k2 = keys - assert pfx1 == k1[0] assert key1 == k1[1] - assert pfx2 == k2[0] assert key2 == k2[1] From 333051ab1f65d28fce7ecbae8ada50a75564303d Mon Sep 17 00:00:00 2001 From: Mario Oenning Date: Fri, 28 Oct 2022 16:55:16 +0000 Subject: [PATCH 141/415] feat: add field "Submitter" to metadata-archives --- aurweb/scripts/mkpkglists.py | 5 +++++ test/test_mkpkglists.py | 8 +++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 67cc7fab..1ff6fbb2 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -163,6 +163,7 @@ def as_dict(package: Package) -> dict[str, Any]: "Popularity": float(package.Popularity), "OutOfDate": package.OutOfDate, "Maintainer": package.Maintainer, + "Submitter": package.Submitter, "FirstSubmitted": package.FirstSubmitted, "LastModified": package.LastModified, } @@ -190,10 +191,13 @@ def _main(): logger.warning(f"{sys.argv[0]} is deprecated and will be soon be removed") logger.info("Started re-creating archives, wait a while...") + Submitter = orm.aliased(User) + query = ( db.query(Package) .join(PackageBase, PackageBase.ID == Package.PackageBaseID) .join(User, PackageBase.MaintainerUID == User.ID, isouter=True) + .join(Submitter, PackageBase.SubmitterUID == Submitter.ID, isouter=True) .filter(PackageBase.PackagerUID.isnot(None)) .with_entities( Package.ID, @@ -207,6 +211,7 @@ def _main(): PackageBase.Popularity, PackageBase.OutOfDateTS.label("OutOfDate"), User.Username.label("Maintainer"), + Submitter.Username.label("Submitter"), PackageBase.SubmittedTS.label("FirstSubmitted"), PackageBase.ModifiedTS.label("LastModified"), ) diff --git a/test/test_mkpkglists.py b/test/test_mkpkglists.py index 3c105817..e7800ffe 100644 --- a/test/test_mkpkglists.py +++ b/test/test_mkpkglists.py @@ -30,6 +30,7 @@ META_KEYS = [ "Popularity", "OutOfDate", "Maintainer", + "Submitter", "FirstSubmitted", "LastModified", "URLPath", @@ -61,7 +62,12 @@ def packages(user: User) -> list[Package]: lic = db.create(License, Name="GPL") for i in range(5): # Create the package. - pkgbase = db.create(PackageBase, Name=f"pkgbase_{i}", Packager=user) + pkgbase = db.create( + PackageBase, + Name=f"pkgbase_{i}", + Packager=user, + Submitter=user, + ) pkg = db.create(Package, PackageBase=pkgbase, Name=f"pkg_{i}") # Create some related records. From 6ee34ab3cb14ada09d991141779ae9c5f9f50698 Mon Sep 17 00:00:00 2001 From: Mario Oenning Date: Mon, 31 Oct 2022 09:42:56 +0000 Subject: [PATCH 142/415] feat: add field "CoMaintainers" to metadata-archives --- aurweb/scripts/mkpkglists.py | 15 +++++++++++++++ test/test_mkpkglists.py | 3 +++ 2 files changed, 18 insertions(+) diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 1ff6fbb2..903d96ae 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -142,6 +142,21 @@ def get_extended_fields(): ) .distinct() .order_by("Name"), + # Co-Maintainer + db.query(models.PackageComaintainer) + .join(models.User, models.User.ID == models.PackageComaintainer.UsersID) + .join( + models.Package, + models.Package.PackageBaseID == models.PackageComaintainer.PackageBaseID, + ) + .with_entities( + models.Package.ID, + literal("CoMaintainers").label("Type"), + models.User.Username.label("Name"), + literal(str()).label("Cond"), + ) + .distinct() + .order_by("Name"), ] query = subqueries[0].union_all(*subqueries[1:]) return get_extended_dict(query) diff --git a/test/test_mkpkglists.py b/test/test_mkpkglists.py index e7800ffe..8edbcd81 100644 --- a/test/test_mkpkglists.py +++ b/test/test_mkpkglists.py @@ -11,6 +11,7 @@ from aurweb.models import ( License, Package, PackageBase, + PackageComaintainer, PackageDependency, PackageLicense, User, @@ -79,6 +80,7 @@ def packages(user: User) -> list[Package]: DepName=f"dep_{i}", DepCondition=">=1.0", ) + db.create(PackageComaintainer, User=user, PackageBase=pkgbase, Priority=1) # Add the package to our output list. output.append(pkg) @@ -229,6 +231,7 @@ def test_mkpkglists_extended(config_mock: None, user: User, packages: list[Packa assert key in pkg, f"{pkg=} record does not have {key=}" assert isinstance(pkg["Depends"], list) assert isinstance(pkg["License"], list) + assert isinstance(pkg["CoMaintainers"], list) for file in (PACKAGES, PKGBASE, USERS, META, META_EXT): with open(f"{file}.sha256") as f: From 286834bab1e184d2f92b6c03440e8dc85c2b8d0c Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Mon, 31 Oct 2022 14:43:31 +0000 Subject: [PATCH 143/415] fix: regression on gzipped filenames from 3dcbee5a With the 3dcbee5a the filenames inside the .gz archives contained .tmp at the end. This fixes those by using Gzip Class constructor instead of the gzip.open method. Signed-off-by: Leonidas Spyropoulos --- aurweb/scripts/mkpkglists.py | 55 +++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 903d96ae..7ff2690b 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -242,8 +242,10 @@ def _main(): tmp_meta = f"{META}.tmp" tmp_metaext = f"{META_EXT}.tmp" gzips = { - "packages": gzip.open(tmp_packages, "wt"), - "meta": gzip.open(tmp_meta, "wb"), + "packages": gzip.GzipFile( + filename=PACKAGES, mode="wb", fileobj=open(tmp_packages, "wb") + ), + "meta": gzip.GzipFile(filename=META, mode="wb", fileobj=open(tmp_meta, "wb")), } # Append list opening to the metafile. @@ -252,7 +254,9 @@ def _main(): # Produce packages.gz + packages-meta-ext-v1.json.gz extended = False if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS: - gzips["meta_ext"] = gzip.open(tmp_metaext, "wb") + gzips["meta_ext"] = gzip.GzipFile( + filename=META_EXT, mode="wb", fileobj=open(tmp_metaext, "wb") + ) # Append list opening to the meta_ext file. gzips.get("meta_ext").write(b"[\n") f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1]) @@ -261,28 +265,29 @@ def _main(): results = query.all() n = len(results) - 1 - for i, result in enumerate(results): - # Append to packages.gz. - gzips.get("packages").write(f"{result.Name}\n") + with io.TextIOWrapper(gzips.get("packages")) as p: + for i, result in enumerate(results): + # Append to packages.gz. + p.write(f"{result.Name}\n") - # Construct our result JSON dictionary. - item = as_dict(result) - item["URLPath"] = snapshot_uri % result.Name + # Construct our result JSON dictionary. + item = as_dict(result) + item["URLPath"] = snapshot_uri % result.Name - # We stream out package json objects line per line, so - # we also need to include the ',' character at the end - # of package lines (excluding the last package). - suffix = b",\n" if i < n else b"\n" + # We stream out package json objects line per line, so + # we also need to include the ',' character at the end + # of package lines (excluding the last package). + suffix = b",\n" if i < n else b"\n" - # Write out to packagesmetafile - output.append(item) - gzips.get("meta").write(orjson.dumps(output[-1]) + suffix) + # Write out to packagesmetafile + output.append(item) + gzips.get("meta").write(orjson.dumps(output[-1]) + suffix) - if extended: - # Write out to packagesmetaextfile. - data_ = data.get(result.ID, {}) - output[-1].update(data_) - gzips.get("meta_ext").write(orjson.dumps(output[-1]) + suffix) + if extended: + # Write out to packagesmetaextfile. + data_ = data.get(result.ID, {}) + output[-1].update(data_) + gzips.get("meta_ext").write(orjson.dumps(output[-1]) + suffix) # Append the list closing to meta/meta_ext. gzips.get("meta").write(b"]") @@ -295,13 +300,17 @@ def _main(): # Produce pkgbase.gz query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all() tmp_pkgbase = f"{PKGBASE}.tmp" - with gzip.open(tmp_pkgbase, "wt") as f: + pkgbase_gzip = gzip.GzipFile( + filename=PKGBASE, mode="wb", fileobj=open(tmp_pkgbase, "wb") + ) + with io.TextIOWrapper(pkgbase_gzip) as f: f.writelines([f"{base.Name}\n" for i, base in enumerate(query)]) # Produce users.gz query = db.query(User.Username).all() tmp_users = f"{USERS}.tmp" - with gzip.open(tmp_users, "wt") as f: + users_gzip = gzip.GzipFile(filename=USERS, mode="wb", fileobj=open(tmp_users, "wb")) + with io.TextIOWrapper(users_gzip) as f: f.writelines([f"{user.Username}\n" for i, user in enumerate(query)]) files = [ From 5669821b299427081f32de7d9d6712dff8b793dc Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 31 Oct 2022 18:00:39 +0100 Subject: [PATCH 144/415] perf: tweak some queries in mkpkglists We can omit the "distinct" from some queries because constraints in the DB ensure uniqueness: * Groups sub-query PackageGroup: Primary key makes "PackageID" + "GroupID" unique Groups: Unique index on "Name" column -> Technically we can't have a package with the same group-name twice * Licenses sub-query: PackageLicense -> Primary key makes "PackageID" + "LicenseID" unique Licenses -> Unique index on "Name" column -> Technically we can't have a package with the same license-name twice * Keywords sub-query: PackageKeywords -> Primary key makes "PackageBaseID" + "KeywordID" unique (And a Package can only have one PackageBase) Keywords -> Unique index on "Name" column -> Technically we can't have a package with the same Keyword twice * Packages main-query: We join PackageBases and Users on their primary key columns (which are guaranteed to be unique) -> There is no way we could end up with more than one record for a Package Signed-off-by: moson-mo --- aurweb/scripts/mkpkglists.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/aurweb/scripts/mkpkglists.py b/aurweb/scripts/mkpkglists.py index 7ff2690b..d2d11c5e 100755 --- a/aurweb/scripts/mkpkglists.py +++ b/aurweb/scripts/mkpkglists.py @@ -94,7 +94,7 @@ def get_extended_fields(): models.PackageDependency.DepName.label("Name"), models.PackageDependency.DepCondition.label("Cond"), ) - .distinct() + .distinct() # A package could have the same dependency multiple times .order_by("Name"), # PackageRelation db.query(models.PackageRelation) @@ -105,7 +105,7 @@ def get_extended_fields(): models.PackageRelation.RelName.label("Name"), models.PackageRelation.RelCondition.label("Cond"), ) - .distinct() + .distinct() # A package could have the same relation multiple times .order_by("Name"), # Groups db.query(models.PackageGroup) @@ -116,7 +116,6 @@ def get_extended_fields(): models.Group.Name.label("Name"), literal(str()).label("Cond"), ) - .distinct() .order_by("Name"), # Licenses db.query(models.PackageLicense) @@ -127,7 +126,6 @@ def get_extended_fields(): models.License.Name.label("Name"), literal(str()).label("Cond"), ) - .distinct() .order_by("Name"), # Keywords db.query(models.PackageKeyword) @@ -140,7 +138,6 @@ def get_extended_fields(): models.PackageKeyword.Keyword.label("Name"), literal(str()).label("Cond"), ) - .distinct() .order_by("Name"), # Co-Maintainer db.query(models.PackageComaintainer) @@ -155,7 +152,7 @@ def get_extended_fields(): models.User.Username.label("Name"), literal(str()).label("Cond"), ) - .distinct() + .distinct() # A package could have the same co-maintainer multiple times .order_by("Name"), ] query = subqueries[0].union_all(*subqueries[1:]) @@ -230,7 +227,6 @@ def _main(): PackageBase.SubmittedTS.label("FirstSubmitted"), PackageBase.ModifiedTS.label("LastModified"), ) - .distinct() .order_by("Name") ) From f10c1a0505d446dfd0f78fc3a03f842d62be82f7 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sun, 23 Oct 2022 10:28:30 +0100 Subject: [PATCH 145/415] perf: add PackageKeywords.PackageBaseID index This is used on the export for package-meta.v1.gz generation Signed-off-by: Leonidas Spyropoulos --- aurweb/schema.py | 1 + ...57fd7_add_packagekeyword_packagebaseuid.py | 24 +++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 migrations/versions/9e3158957fd7_add_packagekeyword_packagebaseuid.py diff --git a/aurweb/schema.py b/aurweb/schema.py index 5f998ed9..0ba3e9c2 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -201,6 +201,7 @@ PackageKeywords = Table( nullable=False, server_default=text("''"), ), + Index("KeywordsPackageBaseID", "PackageBaseID"), mysql_engine="InnoDB", mysql_charset="utf8mb4", mysql_collate="utf8mb4_general_ci", diff --git a/migrations/versions/9e3158957fd7_add_packagekeyword_packagebaseuid.py b/migrations/versions/9e3158957fd7_add_packagekeyword_packagebaseuid.py new file mode 100644 index 00000000..03291152 --- /dev/null +++ b/migrations/versions/9e3158957fd7_add_packagekeyword_packagebaseuid.py @@ -0,0 +1,24 @@ +"""add PackageKeyword.PackageBaseUID index + +Revision ID: 9e3158957fd7 +Revises: 6441d3b65270 +Create Date: 2022-10-17 11:11:46.203322 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = "9e3158957fd7" +down_revision = "6441d3b65270" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_index( + "KeywordsPackageBaseID", "PackageKeywords", ["PackageBaseID"], unique=False + ) + + +def downgrade(): + op.drop_index("KeywordsPackageBaseID", table_name="PackageKeywords") From d00371f444aa3465c0adc2ea9118c5eb0633e1be Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Tue, 1 Nov 2022 17:17:34 +0000 Subject: [PATCH 146/415] housekeep: bump renovate dependencies Signed-off-by: Leonidas Spyropoulos --- poetry.lock | 341 +++++++++++++++++++++++-------------------------- pyproject.toml | 10 +- 2 files changed, 167 insertions(+), 184 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9cf24f9a..f6b79a30 100644 --- a/poetry.lock +++ b/poetry.lock @@ -153,11 +153,11 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" [[package]] name = "coverage" @@ -234,6 +234,17 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" dnspython = ">=1.15.0" idna = ">=2.0.0" +[[package]] +name = "exceptiongroup" +version = "1.0.0" +description = "Backport of PEP 654 (exception groups)" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "execnet" version = "1.9.0" @@ -247,7 +258,7 @@ testing = ["pre-commit"] [[package]] name = "fakeredis" -version = "1.9.4" +version = "1.10.0" description = "Fake implementation of redis API for testing purposes." category = "main" optional = false @@ -263,7 +274,7 @@ lua = ["lupa (>=1.13,<2.0)"] [[package]] name = "fastapi" -version = "0.85.1" +version = "0.85.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false @@ -276,8 +287,8 @@ starlette = "0.20.4" [package.extras] all = ["email-validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.7.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-orjson (==3.6.2)", "types-ujson (==5.4.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "pytest-cov (>=2.12.0,<5.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "feedgen" @@ -305,14 +316,15 @@ testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pyt [[package]] name = "greenlet" -version = "1.1.3.post0" +version = "2.0.0" description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["faulthandler", "objgraph"] [[package]] name = "gunicorn" @@ -542,7 +554,7 @@ python-versions = ">=3.5" [[package]] name = "orjson" -version = "3.8.0" +version = "3.8.1" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" category = "main" optional = false @@ -628,20 +640,12 @@ prometheus-client = ">=0.8.0,<1.0.0" [[package]] name = "protobuf" -version = "4.21.8" +version = "4.21.9" description = "" category = "main" optional = false python-versions = ">=3.7" -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "pyalpm" version = "0.10.6" @@ -697,7 +701,7 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.1.3" +version = "7.2.0" description = "pytest: simple powerful testing with Python" category = "main" optional = false @@ -706,11 +710,11 @@ python-versions = ">=3.7" [package.dependencies] attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -744,18 +748,6 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] -[[package]] -name = "pytest-forked" -version = "1.4.0" -description = "run tests in isolated forked subprocesses" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -py = "*" -pytest = ">=3.10" - [[package]] name = "pytest-tap" version = "3.3" @@ -770,7 +762,7 @@ pytest = ">=3.0" [[package]] name = "pytest-xdist" -version = "2.5.0" +version = "3.0.2" description = "pytest xdist plugin for distributed testing and loop-on-failing modes" category = "main" optional = false @@ -779,7 +771,6 @@ python-versions = ">=3.6" [package.dependencies] execnet = ">=1.1" pytest = ">=6.2.0" -pytest-forked = "*" [package.extras] psutil = ["psutil (>=3.0)"] @@ -1058,7 +1049,7 @@ h11 = ">=0.9.0,<1" [[package]] name = "zipp" -version = "3.9.0" +version = "3.10.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -1071,7 +1062,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "de9f0dc1d7e3f149a83629ad30d161da38aa1498b81aaa8bdfd2ebed50f232ab" +content-hash = "84f0bae9789174cbdc5aa672b9e72f0ef91763f63ed73e8cafb45f26efd9bb47" [metadata.files] aiofiles = [ @@ -1208,8 +1199,8 @@ click = [ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] coverage = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, @@ -1303,17 +1294,21 @@ email-validator = [ {file = "email_validator-1.3.0-py2.py3-none-any.whl", hash = "sha256:816073f2a7cffef786b29928f58ec16cdac42710a53bb18aa94317e3e145ec5c"}, {file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"}, ] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0-py3-none-any.whl", hash = "sha256:2ac84b496be68464a2da60da518af3785fff8b7ec0d090a581604bc870bdee41"}, + {file = "exceptiongroup-1.0.0.tar.gz", hash = "sha256:affbabf13fb6e98988c38d9c5650e701569fe3c1de3233cfb61c5f33774690ad"}, +] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] fakeredis = [ - {file = "fakeredis-1.9.4-py3-none-any.whl", hash = "sha256:61afe14095aad3e7413a0a6fe63041da1b4bc3e41d5228a33b60bd03fabf22d8"}, - {file = "fakeredis-1.9.4.tar.gz", hash = "sha256:17415645d11994061f5394f3f1c76ba4531f3f8b63f9c55a8fd2120bebcbfae9"}, + {file = "fakeredis-1.10.0-py3-none-any.whl", hash = "sha256:0be420a79fabda234963a2730c4ce609a6d44a598e8dd253ce97785bef944285"}, + {file = "fakeredis-1.10.0.tar.gz", hash = "sha256:2b02370118535893d832bcd3c099ef282de3f13b29ae3922432e2225794ec334"}, ] fastapi = [ - {file = "fastapi-0.85.1-py3-none-any.whl", hash = "sha256:de3166b6b1163dc22da4dc4ebdc3192fcbac7700dd1870a1afa44de636a636b5"}, - {file = "fastapi-0.85.1.tar.gz", hash = "sha256:1facd097189682a4ff11cbd01334a992e51b56be663b2bd50c2c09523624f144"}, + {file = "fastapi-0.85.2-py3-none-any.whl", hash = "sha256:6292db0edd4a11f0d938d6033ccec5f706e9d476958bf33b119e8ddb4e524bde"}, + {file = "fastapi-0.85.2.tar.gz", hash = "sha256:3e10ea0992c700e0b17b6de8c2092d7b9cd763ce92c49ee8d4be10fee3b2f367"}, ] feedgen = [ {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, @@ -1323,72 +1318,61 @@ filelock = [ {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] greenlet = [ - {file = "greenlet-1.1.3.post0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:949c9061b8c6d3e6e439466a9be1e787208dec6246f4ec5fffe9677b4c19fcc3"}, - {file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d7815e1519a8361c5ea2a7a5864945906f8e386fa1bc26797b4d443ab11a4589"}, - {file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9649891ab4153f217f319914455ccf0b86986b55fc0573ce803eb998ad7d6854"}, - {file = "greenlet-1.1.3.post0-cp27-cp27m-win32.whl", hash = "sha256:11fc7692d95cc7a6a8447bb160d98671ab291e0a8ea90572d582d57361360f05"}, - {file = "greenlet-1.1.3.post0-cp27-cp27m-win_amd64.whl", hash = "sha256:05ae7383f968bba4211b1fbfc90158f8e3da86804878442b4fb6c16ccbcaa519"}, - {file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ccbe7129a282ec5797df0451ca1802f11578be018a32979131065565da89b392"}, - {file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8b58232f5b72973350c2b917ea3df0bebd07c3c82a0a0e34775fc2c1f857e9"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f6661b58412879a2aa099abb26d3c93e91dedaba55a6394d1fb1512a77e85de9"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c6e942ca9835c0b97814d14f78da453241837419e0d26f7403058e8db3e38f8"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a812df7282a8fc717eafd487fccc5ba40ea83bb5b13eb3c90c446d88dbdfd2be"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7a6560df073ec9de2b7cb685b199dfd12519bc0020c62db9d1bb522f989fa"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17a69967561269b691747e7f436d75a4def47e5efcbc3c573180fc828e176d80"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:60839ab4ea7de6139a3be35b77e22e0398c270020050458b3d25db4c7c394df5"}, - {file = "greenlet-1.1.3.post0-cp310-cp310-win_amd64.whl", hash = "sha256:8926a78192b8b73c936f3e87929931455a6a6c6c385448a07b9f7d1072c19ff3"}, - {file = "greenlet-1.1.3.post0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:c6f90234e4438062d6d09f7d667f79edcc7c5e354ba3a145ff98176f974b8132"}, - {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814f26b864ed2230d3a7efe0336f5766ad012f94aad6ba43a7c54ca88dd77cba"}, - {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fda1139d87ce5f7bd80e80e54f9f2c6fe2f47983f1a6f128c47bf310197deb6"}, - {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0643250dd0756f4960633f5359884f609a234d4066686754e834073d84e9b51"}, - {file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb863057bed786f6622982fb8b2c122c68e6e9eddccaa9fa98fd937e45ee6c4f"}, - {file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c0581077cf2734569f3e500fab09c0ff6a2ab99b1afcacbad09b3c2843ae743"}, - {file = "greenlet-1.1.3.post0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:695d0d8b5ae42c800f1763c9fce9d7b94ae3b878919379150ee5ba458a460d57"}, - {file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5662492df0588a51d5690f6578f3bbbd803e7f8d99a99f3bf6128a401be9c269"}, - {file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:bffba15cff4802ff493d6edcf20d7f94ab1c2aee7cfc1e1c7627c05f1102eee8"}, - {file = "greenlet-1.1.3.post0-cp35-cp35m-win32.whl", hash = "sha256:7afa706510ab079fd6d039cc6e369d4535a48e202d042c32e2097f030a16450f"}, - {file = "greenlet-1.1.3.post0-cp35-cp35m-win_amd64.whl", hash = "sha256:3a24f3213579dc8459e485e333330a921f579543a5214dbc935bc0763474ece3"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:64e10f303ea354500c927da5b59c3802196a07468332d292aef9ddaca08d03dd"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:eb6ac495dccb1520667cfea50d89e26f9ffb49fa28496dea2b95720d8b45eb54"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:88720794390002b0c8fa29e9602b395093a9a766b229a847e8d88349e418b28a"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39464518a2abe9c505a727af7c0b4efff2cf242aa168be5f0daa47649f4d7ca8"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0914f02fcaa8f84f13b2df4a81645d9e82de21ed95633765dd5cc4d3af9d7403"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96656c5f7c95fc02c36d4f6ef32f4e94bb0b6b36e6a002c21c39785a4eec5f5d"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4f74aa0092602da2069df0bc6553919a15169d77bcdab52a21f8c5242898f519"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3aeac044c324c1a4027dca0cde550bd83a0c0fbff7ef2c98df9e718a5086c194"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-win32.whl", hash = "sha256:fe7c51f8a2ab616cb34bc33d810c887e89117771028e1e3d3b77ca25ddeace04"}, - {file = "greenlet-1.1.3.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:70048d7b2c07c5eadf8393e6398595591df5f59a2f26abc2f81abca09610492f"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:66aa4e9a726b70bcbfcc446b7ba89c8cec40f405e51422c39f42dfa206a96a05"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:025b8de2273d2809f027d347aa2541651d2e15d593bbce0d5f502ca438c54136"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:82a38d7d2077128a017094aff334e67e26194f46bd709f9dcdacbf3835d47ef5"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7d20c3267385236b4ce54575cc8e9f43e7673fc761b069c820097092e318e3b"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8ece5d1a99a2adcb38f69af2f07d96fb615415d32820108cd340361f590d128"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2794eef1b04b5ba8948c72cc606aab62ac4b0c538b14806d9c0d88afd0576d6b"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a8d24eb5cb67996fb84633fdc96dbc04f2d8b12bfcb20ab3222d6be271616b67"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0120a879aa2b1ac5118bce959ea2492ba18783f65ea15821680a256dfad04754"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-win32.whl", hash = "sha256:bef49c07fcb411c942da6ee7d7ea37430f830c482bf6e4b72d92fd506dd3a427"}, - {file = "greenlet-1.1.3.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:62723e7eb85fa52e536e516ee2ac91433c7bb60d51099293671815ff49ed1c21"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d25cdedd72aa2271b984af54294e9527306966ec18963fd032cc851a725ddc1b"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:924df1e7e5db27d19b1359dc7d052a917529c95ba5b8b62f4af611176da7c8ad"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ec615d2912b9ad807afd3be80bf32711c0ff9c2b00aa004a45fd5d5dde7853d9"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0971d37ae0eaf42344e8610d340aa0ad3d06cd2eee381891a10fe771879791f9"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:325f272eb997916b4a3fc1fea7313a8adb760934c2140ce13a2117e1b0a8095d"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75afcbb214d429dacdf75e03a1d6d6c5bd1fa9c35e360df8ea5b6270fb2211c"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5c2d21c2b768d8c86ad935e404cc78c30d53dea009609c3ef3a9d49970c864b5"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:467b73ce5dcd89e381292fb4314aede9b12906c18fab903f995b86034d96d5c8"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-win32.whl", hash = "sha256:8149a6865b14c33be7ae760bcdb73548bb01e8e47ae15e013bf7ef9290ca309a"}, - {file = "greenlet-1.1.3.post0-cp38-cp38-win_amd64.whl", hash = "sha256:104f29dd822be678ef6b16bf0035dcd43206a8a48668a6cae4d2fe9c7a7abdeb"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c8c9301e3274276d3d20ab6335aa7c5d9e5da2009cccb01127bddb5c951f8870"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8415239c68b2ec9de10a5adf1130ee9cb0ebd3e19573c55ba160ff0ca809e012"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:3c22998bfef3fcc1b15694818fc9b1b87c6cc8398198b96b6d355a7bcb8c934e"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa1845944e62f358d63fcc911ad3b415f585612946b8edc824825929b40e59e"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:890f633dc8cb307761ec566bc0b4e350a93ddd77dc172839be122be12bae3e10"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cf37343e43404699d58808e51f347f57efd3010cc7cee134cdb9141bd1ad9ea"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5edf75e7fcfa9725064ae0d8407c849456553a181ebefedb7606bac19aa1478b"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"}, - {file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"}, - {file = "greenlet-1.1.3.post0.tar.gz", hash = "sha256:f5e09dc5c6e1796969fd4b775ea1417d70e49a5df29aaa8e5d10675d9e11872c"}, + {file = "greenlet-2.0.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4be4dedbd2fa9b7c35627f322d6d3139cb125bc18d5ef2f40237990850ea446f"}, + {file = "greenlet-2.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:75c022803de010294366f3608d4bba3e346693b1b7427b79d57e3d924ed03838"}, + {file = "greenlet-2.0.0-cp27-cp27m-win32.whl", hash = "sha256:4a1953465b7651073cffde74ed7d121e602ef9a9740d09ee137b01879ac15a2f"}, + {file = "greenlet-2.0.0-cp27-cp27m-win_amd64.whl", hash = "sha256:a65205e6778142528978b4acca76888e7e7f0be261e395664e49a5c21baa2141"}, + {file = "greenlet-2.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d71feebf5c8041c80dfda76427e14e3ca00bca042481bd3e9612a9d57b2cbbf7"}, + {file = "greenlet-2.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f7edbd2957f72aea357241fe42ffc712a8e9b8c2c42f24e2ef5d97b255f66172"}, + {file = "greenlet-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79687c48e7f564be40c46b3afea6d141b8d66ffc2bc6147e026d491c6827954a"}, + {file = "greenlet-2.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a245898ec5e9ca0bc87a63e4e222cc633dc4d1f1a0769c34a625ad67edb9f9de"}, + {file = "greenlet-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adcf45221f253b3a681c99da46fa6ac33596fa94c2f30c54368f7ee1c4563a39"}, + {file = "greenlet-2.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3dc294afebf2acfd029373dbf3d01d36fd8d6888a03f5a006e2d690f66b153d9"}, + {file = "greenlet-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1cfeae4dda32eb5c64df05d347c4496abfa57ad16a90082798a2bba143c6c854"}, + {file = "greenlet-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:d58d4b4dc82e2d21ebb7dd7d3a6d370693b2236a1407fe3988dc1d4ea07575f9"}, + {file = "greenlet-2.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0d7efab8418c1fb3ea00c4abb89e7b0179a952d0d53ad5fcff798ca7440f8e8"}, + {file = "greenlet-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f8a10e14238407be3978fa6d190eb3724f9d766655fefc0134fd5482f1fb0108"}, + {file = "greenlet-2.0.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:98b848a0b75e76b446dc71fdbac712d9078d96bb1c1607f049562dde1f8801e1"}, + {file = "greenlet-2.0.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:8e8dbad9b4f4c3e37898914cfccb7c4f00dbe3146333cfe52a1a3103cc2ff97c"}, + {file = "greenlet-2.0.0-cp35-cp35m-win32.whl", hash = "sha256:069a8a557541a04518dc3beb9a78637e4e6b286814849a2ecfac529eaa78562b"}, + {file = "greenlet-2.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:cc211c2ff5d3b2ba8d557a71e3b4f0f0a2020067515143a9516ea43884271192"}, + {file = "greenlet-2.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:d4e7642366e638f45d70c5111590a56fbd0ffb7f474af20c6c67c01270bcf5cf"}, + {file = "greenlet-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e7a0dca752b4e3395890ab4085c3ec3838d73714261914c01b53ed7ea23b5867"}, + {file = "greenlet-2.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8c67ecda450ad4eac7837057f5deb96effa836dacaf04747710ccf8eeb73092"}, + {file = "greenlet-2.0.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cc1abaf47cfcfdc9ac0bdff173cebab22cd54e9e3490135a4a9302d0ff3b163"}, + {file = "greenlet-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efdbbbf7b6c8d5be52977afa65b9bb7b658bab570543280e76c0fabc647175ed"}, + {file = "greenlet-2.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:7acaa51355d5b9549d474dc71be6846ee9a8f2cb82f4936e5efa7a50bbeb94ad"}, + {file = "greenlet-2.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2be628bca0395610da08921f9376dd14317f37256d41078f5c618358467681e1"}, + {file = "greenlet-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:eca9c0473de053dcc92156dd62c38c3578628b536c7f0cd66e655e211c14ac32"}, + {file = "greenlet-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:9a4a9fea68fd98814999d91ea585e49ed68d7e199a70bef13a857439f60a4609"}, + {file = "greenlet-2.0.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:6b28420ae290bfbf5d827f976abccc2f74f0a3f5e4fb69b66acf98f1cbe95e7e"}, + {file = "greenlet-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2b8e1c939b363292ecc93999fb1ad53ffc5d0aac8e933e4362b62365241edda5"}, + {file = "greenlet-2.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c5ddadfe40e903c6217ed2b95a79f49e942bb98527547cc339fc7e43a424aad"}, + {file = "greenlet-2.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e5ead803b11b60b347e08e0f37234d9a595f44a6420026e47bcaf94190c3cd6"}, + {file = "greenlet-2.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b89b78ffb516c2921aa180c2794082666e26680eef05996b91f46127da24d964"}, + {file = "greenlet-2.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:939963d0137ec92540d95b68b7f795e8dbadce0a1fca53e3e7ef8ddc18ee47cb"}, + {file = "greenlet-2.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c1e93ef863810fba75faf418f0861dbf59bfe01a7b5d0a91d39603df58d3d3fa"}, + {file = "greenlet-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:6fd342126d825b76bf5b49717a7c682e31ed1114906cdec7f5a0c2ff1bc737a7"}, + {file = "greenlet-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5392ddb893e7fba237b988f846c4a80576557cc08664d56dc1a69c5c02bdc80c"}, + {file = "greenlet-2.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b4fd73b62c1038e7ee938b1de328eaa918f76aa69c812beda3aff8a165494201"}, + {file = "greenlet-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:0ba0f2e5c4a8f141952411e356dba05d6fe0c38325ee0e4f2d0c6f4c2c3263d5"}, + {file = "greenlet-2.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8bacecee0c9348ab7c95df810e12585e9e8c331dfc1e22da4ed0bd635a5f483"}, + {file = "greenlet-2.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341053e0a96d512315c27c34fad4672c4573caf9eb98310c39e7747645c88d8b"}, + {file = "greenlet-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fcdd8ae391ffabb3b672397b58a9737aaff6b8cae0836e8db8ff386fcea802"}, + {file = "greenlet-2.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c3aa7d3bc545162a6676445709b24a2a375284dc5e2f2432d58b80827c2bd91c"}, + {file = "greenlet-2.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d8dca31a39dd9f25641559b8cdf9066168c682dfcfbe0f797f03e4c9718a63a"}, + {file = "greenlet-2.0.0-cp38-cp38-win32.whl", hash = "sha256:aa2b371c3633e694d043d6cec7376cb0031c6f67029f37eef40bda105fd58753"}, + {file = "greenlet-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:0fa2a66fdf0d09929e79f786ad61529d4e752f452466f7ddaa5d03caf77a603d"}, + {file = "greenlet-2.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e7ec3f2465ba9b7d25895307abe1c1c101a257c54b9ea1522bbcbe8ca8793735"}, + {file = "greenlet-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:99e9851e40150504474915605649edcde259a4cd9bce2fcdeb4cf33ad0b5c293"}, + {file = "greenlet-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20bf68672ae14ef2e2e6d3ac1f308834db1d0b920b3b0674eef48b2dce0498dd"}, + {file = "greenlet-2.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30198bccd774f9b6b1ba7564a0d02a79dd1fe926cfeb4107856fe16c9dfb441c"}, + {file = "greenlet-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d65d7d1ff64fb300127d2ffd27db909de4d21712a5dde59a3ad241fb65ee83d7"}, + {file = "greenlet-2.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5d396a5457458460b0c28f738fc8ab2738ee61b00c3f845c7047a333acd96c"}, + {file = "greenlet-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09f00f9938eb5ae1fe203558b56081feb0ca34a2895f8374cd01129ddf4d111c"}, + {file = "greenlet-2.0.0-cp39-cp39-win32.whl", hash = "sha256:089e123d80dbc6f61fff1ff0eae547b02c343d50968832716a7b0a33bea5f792"}, + {file = "greenlet-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc283f99a4815ef70cad537110e3e03abcef56ab7d005ba9a8c6ec33054ce9c0"}, + {file = "greenlet-2.0.0.tar.gz", hash = "sha256:6c66f0da8049ee3c126b762768179820d4c0ae0ca46ae489039e4da2fae39a52"}, ] gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, @@ -1574,48 +1558,55 @@ mysqlclient = [ {file = "mysqlclient-2.1.1.tar.gz", hash = "sha256:828757e419fb11dd6c5ed2576ec92c3efaa93a0f7c39e263586d1ee779c3d782"}, ] orjson = [ - {file = "orjson-3.8.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:9a93850a1bdc300177b111b4b35b35299f046148ba23020f91d6efd7bf6b9d20"}, - {file = "orjson-3.8.0-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7536a2a0b41672f824912aeab545c2467a9ff5ca73a066ff04fb81043a0a177a"}, - {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66c19399bb3b058e3236af7910b57b19a4fc221459d722ed72a7dc90370ca090"}, - {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b391d5c2ddc2f302d22909676b306cb6521022c3ee306c861a6935670291b2c"}, - {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdb1042970ca5f544a047d6c235a7eb4acdb69df75441dd1dfcbc406377ab37"}, - {file = "orjson-3.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d189e2acb510e374700cb98cf11b54f0179916ee40f8453b836157ae293efa79"}, - {file = "orjson-3.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6a23b40c98889e9abac084ce5a1fb251664b41da9f6bdb40a4729e2288ed2ed4"}, - {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, - {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, - {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, - {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, - {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, - {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, - {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, - {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6e3da2e4bd27c3b796519ca74132c7b9e5348fb6746315e0f6c1592bc5cf1caf"}, - {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:896a21a07f1998648d9998e881ab2b6b80d5daac4c31188535e9d50460edfcf7"}, - {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4065906ce3ad6195ac4d1bddde862fe811a42d7be237a1ff762666c3a4bb2151"}, - {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:5f856279872a4449fc629924e6a083b9821e366cf98b14c63c308269336f7c14"}, - {file = "orjson-3.8.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b1cd25acfa77935bb2e791b75211cec0cfc21227fe29387e553c545c3ff87e1"}, - {file = "orjson-3.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3e2459d441ab8fd8b161aa305a73d5269b3cda13b5a2a39eba58b4dd3e394f49"}, - {file = "orjson-3.8.0-cp37-none-win_amd64.whl", hash = "sha256:d2b5dafbe68237a792143137cba413447f60dd5df428e05d73dcba10c1ea6fcf"}, - {file = "orjson-3.8.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5b072ef8520cfe7bd4db4e3c9972d94336763c2253f7c4718a49e8733bada7b8"}, - {file = "orjson-3.8.0-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e68c699471ea3e2dd1b35bfd71c6a0a0e4885b64abbe2d98fce1ef11e0afaff3"}, - {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7225e8b08996d1a0c804d3a641a53e796685e8c9a9fd52bd428980032cad9a"}, - {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f687776a03c19f40b982fb5c414221b7f3d19097841571be2223d1569a59877"}, - {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7990a9caf3b34016ac30be5e6cfc4e7efd76aa85614a1215b0eae4f0c7e3db59"}, - {file = "orjson-3.8.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:02d638d43951ba346a80f0abd5942a872cc87db443e073f6f6fc530fee81e19b"}, - {file = "orjson-3.8.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f4b46dbdda2f0bd6480c39db90b21340a19c3b0fcf34bc4c6e465332930ca539"}, - {file = "orjson-3.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:655d7387a1634a9a477c545eea92a1ee902ab28626d701c6de4914e2ed0fecd2"}, - {file = "orjson-3.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5edb93cdd3eb32977633fa7aaa6a34b8ab54d9c49cdcc6b0d42c247a29091b22"}, - {file = "orjson-3.8.0-cp38-none-win_amd64.whl", hash = "sha256:03ed95814140ff09f550b3a42e6821f855d981c94d25b9cc83e8cca431525d70"}, - {file = "orjson-3.8.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b0e72974a5d3b101226899f111368ec2c9824d3e9804af0e5b31567f53ad98a"}, - {file = "orjson-3.8.0-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ea5fe20ef97545e14dd4d0263e4c5c3bc3d2248d39b4b0aed4b84d528dfc0af"}, - {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6433c956f4a18112342a18281e0bec67fcd8b90be3a5271556c09226e045d805"}, - {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87462791dd57de2e3e53068bf4b7169c125c50960f1bdda08ed30c797cb42a56"}, - {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be02f6acee33bb63862eeff80548cd6b8a62e2d60ad2d8dfd5a8824cc43d8887"}, - {file = "orjson-3.8.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a709c2249c1f2955dbf879506fd43fa08c31fdb79add9aeb891e3338b648bf60"}, - {file = "orjson-3.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2065b6d280dc58f131ffd93393737961ff68ae7eb6884b68879394074cc03c13"}, - {file = "orjson-3.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fd6cac83136e06e538a4d17117eaeabec848c1e86f5742d4811656ad7ee475f"}, - {file = "orjson-3.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25b5e48fbb9f0b428a5e44cf740675c9281dd67816149fc33659803399adbbe8"}, - {file = "orjson-3.8.0-cp39-none-win_amd64.whl", hash = "sha256:2058653cc12b90e482beacb5c2d52dc3d7606f9e9f5a52c1c10ef49371e76f52"}, - {file = "orjson-3.8.0.tar.gz", hash = "sha256:fb42f7cf57d5804a9daa6b624e3490ec9e2631e042415f3aebe9f35a8492ba6c"}, + {file = "orjson-3.8.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:a70aaa2e56356e58c6e1b49f7b7f069df5b15e55db002a74db3ff3f7af67c7ff"}, + {file = "orjson-3.8.1-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d45db052d01d0ab7579470141d5c3592f4402d43cfacb67f023bc1210a67b7bc"}, + {file = "orjson-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2aae92398c0023ac26a6cd026375f765ef5afe127eccabf563c78af7b572d59"}, + {file = "orjson-3.8.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0bd5b4e539db8a9635776bdf9a25c3db84e37165e65d45c8ca90437adc46d6d8"}, + {file = "orjson-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21efb87b168066201a120b0f54a2381f6f51ff3727e07b3908993732412b314a"}, + {file = "orjson-3.8.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:e073338e422f518c1d4d80efc713cd17f3ed6d37c8c7459af04a95459f3206d1"}, + {file = "orjson-3.8.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8f672f3987f6424f60ab2e86ea7ed76dd2806b8e9b506a373fc8499aed85ddb5"}, + {file = "orjson-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:231c30958ed99c23128a21993c5ac0a70e1e568e6a898a47f70d5d37461ca47c"}, + {file = "orjson-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59b4baf71c9f39125d7e535974b146cc180926462969f6d8821b4c5e975e11b3"}, + {file = "orjson-3.8.1-cp310-none-win_amd64.whl", hash = "sha256:fe25f50dc3d45364428baa0dbe3f613a5171c64eb0286eb775136b74e61ba58a"}, + {file = "orjson-3.8.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6802edf98f6918e89df355f56be6e7db369b31eed64ff2496324febb8b0aa43b"}, + {file = "orjson-3.8.1-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a4244f4199a160717f0027e434abb886e322093ceadb2f790ff0c73ed3e17662"}, + {file = "orjson-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6956cf7a1ac97523e96f75b11534ff851df99a6474a561ad836b6e82004acbb8"}, + {file = "orjson-3.8.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b4e3857dd2416b479f700e9bdf4fcec8c690d2716622397d2b7e848f9833e50"}, + {file = "orjson-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8873e490dea0f9cd975d66f84618b6fb57b1ba45ecb218313707a71173d764f"}, + {file = "orjson-3.8.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:124207d2cd04e845eaf2a6171933cde40aebcb8c2d7d3b081e01be066d3014b6"}, + {file = "orjson-3.8.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d8ed77098c2e22181fce971f49a34204c38b79ca91c01d515d07015339ae8165"}, + {file = "orjson-3.8.1-cp311-none-win_amd64.whl", hash = "sha256:8623ac25fa0850a44ac845e9333c4da9ae5707b7cec8ac87cbe9d4e41137180f"}, + {file = "orjson-3.8.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d67a0bd0283a3b17ac43c5ab8e4a7e9d3aa758d6ec5d51c232343c408825a5ad"}, + {file = "orjson-3.8.1-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d89ef8a4444d83e0a5171d14f2ab4895936ab1773165b020f97d29cf289a2d88"}, + {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97839a6abbebb06099294e6057d5b3061721ada08b76ae792e7041b6cb54c97f"}, + {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6071bcf51f0ae4d53b9d3e9164f7138164df4291c484a7b14562075aaa7a2b7b"}, + {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15e7d691cee75b5192fc1fa8487bf541d463246dc25c926b9b40f5b6ab56770"}, + {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:b9abc49c014def1b832fcd53bdc670474b6fe41f373d16f40409882c0d0eccba"}, + {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:3fd5472020042482d7da4c26a0ee65dbd931f691e1c838c6cf4232823179ecc1"}, + {file = "orjson-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e399ed1b0d6f8089b9b6ff2cb3e71ba63a56d8ea88e1d95467949795cc74adfd"}, + {file = "orjson-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e3db6496463c3000d15b7a712da5a9601c6c43682f23f81862fe1d2a338f295"}, + {file = "orjson-3.8.1-cp37-none-win_amd64.whl", hash = "sha256:0f21eed14697083c01f7e00a87e21056fc8fb5851e8a7bca98345189abcdb4d4"}, + {file = "orjson-3.8.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5a9e324213220578d324e0858baeab47808a13d3c3fbc6ba55a3f4f069d757cf"}, + {file = "orjson-3.8.1-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69097c50c3ccbcc61292192b045927f1688ca57ce80525dc5d120e0b91e19bb0"}, + {file = "orjson-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7822cba140f7ca48ed0256229f422dbae69e3a3475176185db0c0538cfadb57"}, + {file = "orjson-3.8.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03389e3750c521a7f3d4837de23cfd21a7f24574b4b3985c9498f440d21adb03"}, + {file = "orjson-3.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f9d9b5c6692097de07dd0b2d5ff20fd135bacd1b2fb7ea383ee717a4150c93"}, + {file = "orjson-3.8.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:c2c9ef10b6344465fd5ac002be2d34f818211274dd79b44c75b2c14a979f84f3"}, + {file = "orjson-3.8.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7adaac93678ac61f5dc070f615b18639d16ee66f6a946d5221dbf315e8b74bec"}, + {file = "orjson-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b0c1750f73658906b82cabbf4be2f74300644c17cb037fbc8b48d746c3b90c76"}, + {file = "orjson-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:da6306e1f03e7085fe0db61d4a3377f70c6fd865118d0afe17f80ae9a8f6f124"}, + {file = "orjson-3.8.1-cp38-none-win_amd64.whl", hash = "sha256:f532c2cbe8c140faffaebcfb34d43c9946599ea8138971f181a399bec7d6b123"}, + {file = "orjson-3.8.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6a7b76d4b44bca418f7797b1e157907b56b7d31caa9091db4e99ebee51c16933"}, + {file = "orjson-3.8.1-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f850489d89ea12be486492e68f0fd63e402fa28e426d4f0b5fc1eec0595e6109"}, + {file = "orjson-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4449e70b98f3ad3e43958360e4be1189c549865c0a128e8629ec96ce92d251c3"}, + {file = "orjson-3.8.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:45357eea9114bd41ef19280066591e9069bb4f6f5bffd533e9bfc12a439d735f"}, + {file = "orjson-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5a9bc5bc4d730153529cb0584c63ff286d50663ccd48c9435423660b1bb12d"}, + {file = "orjson-3.8.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a806aca6b80fa1d996aa16593e4995a71126a085ee1a59fff19ccad29a4e47fd"}, + {file = "orjson-3.8.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:395d02fd6be45f960da014372e7ecefc9e5f8df57a0558b7111a5fa8423c0669"}, + {file = "orjson-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:caff3c1e964cfee044a03a46244ecf6373f3c56142ad16458a1446ac6d69824a"}, + {file = "orjson-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ded261268d5dfd307078fe3370295e5eb15bdde838bbb882acf8538e061c451"}, + {file = "orjson-3.8.1-cp39-none-win_amd64.whl", hash = "sha256:45c1914795ffedb2970bfcd3ed83daf49124c7c37943ed0a7368971c6ea5e278"}, + {file = "orjson-3.8.1.tar.gz", hash = "sha256:07c42de52dfef56cdcaf2278f58e837b26f5b5af5f1fd133a68c4af203851fc7"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -1652,24 +1643,20 @@ prometheus-fastapi-instrumentator = [ {file = "prometheus_fastapi_instrumentator-5.9.1-py3-none-any.whl", hash = "sha256:b5206ea9aa6975a0b07f3bf7376932b8a1b2983164b5abb04878e75ba336d9ed"}, ] protobuf = [ - {file = "protobuf-4.21.8-cp310-abi3-win32.whl", hash = "sha256:c252c55ee15175aa1b21b7b9896e6add5162d066d5202e75c39f96136f08cce3"}, - {file = "protobuf-4.21.8-cp310-abi3-win_amd64.whl", hash = "sha256:809ca0b225d3df42655a12f311dd0f4148a943c51f1ad63c38343e457492b689"}, - {file = "protobuf-4.21.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bbececaf3cfea9ea65ebb7974e6242d310d2a7772a6f015477e0d79993af4511"}, - {file = "protobuf-4.21.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b02eabb9ebb1a089ed20626a90ad7a69cee6bcd62c227692466054b19c38dd1f"}, - {file = "protobuf-4.21.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:4761201b93e024bb70ee3a6a6425d61f3152ca851f403ba946fb0cde88872661"}, - {file = "protobuf-4.21.8-cp37-cp37m-win32.whl", hash = "sha256:f2d55ff22ec300c4d954d3b0d1eeb185681ec8ad4fbecff8a5aee6a1cdd345ba"}, - {file = "protobuf-4.21.8-cp37-cp37m-win_amd64.whl", hash = "sha256:c5f94911dd8feb3cd3786fc90f7565c9aba7ce45d0f254afd625b9628f578c3f"}, - {file = "protobuf-4.21.8-cp38-cp38-win32.whl", hash = "sha256:b37b76efe84d539f16cba55ee0036a11ad91300333abd213849cbbbb284b878e"}, - {file = "protobuf-4.21.8-cp38-cp38-win_amd64.whl", hash = "sha256:2c92a7bfcf4ae76a8ac72e545e99a7407e96ffe52934d690eb29a8809ee44d7b"}, - {file = "protobuf-4.21.8-cp39-cp39-win32.whl", hash = "sha256:89d641be4b5061823fa0e463c50a2607a97833e9f8cfb36c2f91ef5ccfcc3861"}, - {file = "protobuf-4.21.8-cp39-cp39-win_amd64.whl", hash = "sha256:bc471cf70a0f53892fdd62f8cd4215f0af8b3f132eeee002c34302dff9edd9b6"}, - {file = "protobuf-4.21.8-py2.py3-none-any.whl", hash = "sha256:a55545ce9eec4030cf100fcb93e861c622d927ef94070c1a3c01922902464278"}, - {file = "protobuf-4.21.8-py3-none-any.whl", hash = "sha256:0f236ce5016becd989bf39bd20761593e6d8298eccd2d878eda33012645dc369"}, - {file = "protobuf-4.21.8.tar.gz", hash = "sha256:427426593b55ff106c84e4a88cac855175330cb6eb7e889e85aaa7b5652b686d"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, + {file = "protobuf-4.21.9-cp310-abi3-win32.whl", hash = "sha256:6e0be9f09bf9b6cf497b27425487706fa48c6d1632ddd94dab1a5fe11a422392"}, + {file = "protobuf-4.21.9-cp310-abi3-win_amd64.whl", hash = "sha256:a7d0ea43949d45b836234f4ebb5ba0b22e7432d065394b532cdca8f98415e3cf"}, + {file = "protobuf-4.21.9-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b5ab0b8918c136345ff045d4b3d5f719b505b7c8af45092d7f45e304f55e50a1"}, + {file = "protobuf-4.21.9-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:2c9c2ed7466ad565f18668aa4731c535511c5d9a40c6da39524bccf43e441719"}, + {file = "protobuf-4.21.9-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:e575c57dc8b5b2b2caa436c16d44ef6981f2235eb7179bfc847557886376d740"}, + {file = "protobuf-4.21.9-cp37-cp37m-win32.whl", hash = "sha256:9227c14010acd9ae7702d6467b4625b6fe853175a6b150e539b21d2b2f2b409c"}, + {file = "protobuf-4.21.9-cp37-cp37m-win_amd64.whl", hash = "sha256:a419cc95fca8694804709b8c4f2326266d29659b126a93befe210f5bbc772536"}, + {file = "protobuf-4.21.9-cp38-cp38-win32.whl", hash = "sha256:5b0834e61fb38f34ba8840d7dcb2e5a2f03de0c714e0293b3963b79db26de8ce"}, + {file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"}, + {file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"}, + {file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"}, + {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"}, + {file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"}, + {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"}, ] pyalpm = [ {file = "pyalpm-0.10.6.tar.gz", hash = "sha256:99e6ec73b8c46bb12466013f228f831ee0d18e8ab664b91a01c2a3c40de07c7f"}, @@ -1760,8 +1747,8 @@ pyparsing = [ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ - {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, - {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, + {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, + {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, @@ -1771,17 +1758,13 @@ pytest-cov = [ {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, ] -pytest-forked = [ - {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, - {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, -] pytest-tap = [ {file = "pytest-tap-3.3.tar.gz", hash = "sha256:5f0919a147cf0396b2f10d64d365a0bf8062e06543e93c675c9d37f5605e983c"}, {file = "pytest_tap-3.3-py3-none-any.whl", hash = "sha256:4fbbc0e090c2e94f6199bee4e4f68ab3c5e176b37a72a589ad84e0f72a2fce55"}, ] pytest-xdist = [ - {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, - {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, + {file = "pytest-xdist-3.0.2.tar.gz", hash = "sha256:688da9b814370e891ba5de650c9327d1a9d861721a524eb917e620eec3e90291"}, + {file = "pytest_xdist-3.0.2-py3-none-any.whl", hash = "sha256:9feb9a18e1790696ea23e1434fa73b325ed4998b0e9fcb221f16fd1945e6df1b"}, ] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, @@ -1972,6 +1955,6 @@ wsproto = [ {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, ] zipp = [ - {file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"}, - {file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"}, + {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, + {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, ] diff --git a/pyproject.toml b/pyproject.toml index 3b615c73..0bf1bdf8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,13 +62,13 @@ asgiref = "^3.4.1" bcrypt = "^4.0.0" bleach = "^5.0.0" email-validator = "^1.3.0" -fakeredis = "^1.6.1" +fakeredis = "^1.10.0" feedgen = "^0.9.0" httpx = "^0.23.0" itsdangerous = "^2.0.1" lxml = "^4.6.3" -orjson = "^3.6.4" -protobuf = "^4.0.0" +orjson = "^3.8.1" +protobuf = "^4.21.9" pygit2 = "^1.7.0" python-multipart = "^0.0.5" redis = "^4.0.0" @@ -89,7 +89,7 @@ uvicorn = "^0.19.0" gunicorn = "^20.1.0" Hypercorn = "^0.14.0" prometheus-fastapi-instrumentator = "^5.7.1" -pytest-xdist = "^2.4.0" +pytest-xdist = "^3.0.2" filelock = "^3.3.2" posix-ipc = "^1.0.5" pyalpm = "^0.10.6" @@ -98,7 +98,7 @@ srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] coverage = "^6.0.2" -pytest = "^7.0.0" +pytest = "^7.2.0" pytest-asyncio = "^0.20.1" pytest-cov = "^4.0.0" pytest-tap = "^3.2" From c0e806072e705652f0eb6d22dff1f64ab8735dcd Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Tue, 1 Nov 2022 18:31:37 +0000 Subject: [PATCH 147/415] chore: bump to v6.1.8 Signed-off-by: Leonidas Spyropoulos --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index e8ca70d9..49806738 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.7" +AURWEB_VERSION = "v6.1.8" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 0bf1bdf8..7fc0db47 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.7" +version = "v6.1.8" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 4f56a0166208b781adc13da53bc001e376379b57 Mon Sep 17 00:00:00 2001 From: Lex Black Date: Fri, 4 Nov 2022 08:47:03 +0100 Subject: [PATCH 148/415] chore: fix mailing-lists urls Those changed after the migration to mailman3 Signed-off-by: Leonidas Spyropoulos --- templates/home.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/templates/home.html b/templates/home.html index 6a5fca69..3a7bc76d 100644 --- a/templates/home.html +++ b/templates/home.html @@ -42,7 +42,7 @@

    {{ "If you want to discuss a request, you can use the %saur-requests%s mailing list. However, please do not use that list to file requests." | tr - | format('', "") + | format('', "") | safe }}

    @@ -72,8 +72,8 @@

    {{ "General discussion regarding the Arch User Repository (AUR) and Trusted User structure takes place on %saur-general%s. For discussion relating to the development of the AUR web interface, use the %saur-dev%s mailing list." | tr - | format('', "", - '', "") + | format('', "", + '', "") | safe }}

    From c248a74f80d5c72bd6a01f5dfc7ee1c05b2bc6a5 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 7 Nov 2022 14:36:34 +0100 Subject: [PATCH 149/415] chore: fix mailing-list URL on passreset page small addition to the patch provided in #404 Signed-off-by: moson-mo --- templates/passreset.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/passreset.html b/templates/passreset.html index d2c3c2ee..6a31109f 100644 --- a/templates/passreset.html +++ b/templates/passreset.html @@ -47,7 +47,7 @@ {% else %} - {% set url = "https://mailman.archlinux.org/mailman/listinfo/aur-general" %} + {% set url = "https://lists.archlinux.org/mailman3/lists/aur-general.lists.archlinux.org/" %} {{ "If you have forgotten the user name and the primary e-mail " "address you used to register, please send a message to the " "%saur-general%s mailing list." From 73f0bddf0b52bc79ef29b5eaf20f2af8d305528b Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Tue, 8 Nov 2022 13:14:42 +0000 Subject: [PATCH 150/415] fix: handle default requests when using pages The default page shows the pending requests which were working OK if one used the Filters button. This fixes the case when someone submits by using the pager (Next, Last etc). Closes: #405 Signed-off-by: Leonidas Spyropoulos --- aurweb/routers/requests.py | 10 +++++++-- test/test_requests.py | 46 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index ca5fae73..d1f1b830 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -18,6 +18,13 @@ from aurweb.requests.util import get_pkgreq_by_id from aurweb.scripts import notify from aurweb.templates import make_context, render_template +FILTER_PARAMS = { + "filter_pending", + "filter_closed", + "filter_accepted", + "filter_rejected", +} + router = APIRouter() @@ -36,7 +43,7 @@ async def requests( context["q"] = dict(request.query_params) - if len(dict(request.query_params)) == 0: + if not dict(request.query_params).keys() & FILTER_PARAMS: filter_pending = True O, PP = util.sanitize_params(O, PP) @@ -89,7 +96,6 @@ async def requests( .offset(O) .all() ) - return render_template(request, "requests.html", context) diff --git a/test/test_requests.py b/test/test_requests.py index 344b9edc..7dfcf5e5 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -734,6 +734,52 @@ def test_requests( rows = root.xpath('//table[@class="results"]/tbody/tr') assert len(rows) == defaults.PP + # Request page 2 of the requests page. + with client as request: + resp = request.get("/requests", params={"O": 50}, cookies=cookies) # Page 2 + assert resp.status_code == int(HTTPStatus.OK) + + assert "‹ Previous" in resp.text + assert "« First" in resp.text + + root = parse_root(resp.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 5 # There are five records left on the second page. + + +def test_requests_with_filters( + client: TestClient, + tu_user: User, + packages: list[Package], + requests: list[PackageRequest], +): + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + with client as request: + resp = request.get( + "/requests", + params={ + # Pass in url query parameters O, SeB and SB to exercise + # their paths inside of the pager_nav used in this request. + "O": 0, # Page 1 + "SeB": "nd", + "SB": "n", + "filter_pending": True, + "filter_closed": True, + "filter_accepted": True, + "filter_rejected": True, + }, + cookies=cookies, + ) + assert resp.status_code == int(HTTPStatus.OK) + + assert "Next ›" in resp.text + assert "Last »" in resp.text + + root = parse_root(resp.text) + # We have 55 requests, our defaults.PP is 50, so expect we have 50 rows. + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == defaults.PP + # Request page 2 of the requests page. with client as request: resp = request.get( From 50287cb066c02de5337f87508e51852d4a1e5ccb Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 7 Nov 2022 14:19:38 +0100 Subject: [PATCH 151/415] feat(rpc): add "by" parameters - package relations This adds new "by" search-parameters: provides, conflicts and replaces Signed-off-by: moson-mo --- aurweb/packages/search.py | 34 ++++++++++++++++++++++++++++++++++ aurweb/rpc.py | 3 +++ test/test_rpc.py | 36 ++++++++++++++++++++++++++++++++++++ 3 files changed, 73 insertions(+) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 224212d1..7e767bde 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -14,6 +14,7 @@ from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_keyword import PackageKeyword from aurweb.models.package_notification import PackageNotification from aurweb.models.package_vote import PackageVote +from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID class PackageSearch: @@ -286,6 +287,9 @@ class RPCSearch(PackageSearch): "makedepends": self._search_by_makedepends, "optdepends": self._search_by_optdepends, "checkdepends": self._search_by_checkdepends, + "provides": self._search_by_provides, + "conflicts": self._search_by_conflicts, + "replaces": self._search_by_replaces, } ) @@ -304,6 +308,18 @@ class RPCSearch(PackageSearch): ) return self.query + def _join_relations(self, rel_type_id: int) -> orm.Query: + """Join Package with PackageRelation and filter results + based on `rel_type_id`. + + :param rel_type_id: RelationType ID + :returns: PackageRelation-joined orm.Query + """ + self.query = self.query.join(models.PackageRelation).filter( + models.PackageRelation.RelTypeID == rel_type_id + ) + return self.query + def _search_by_depends(self, keywords: str) -> "RPCSearch": self.query = self._join_depends(DEPENDS_ID).filter( models.PackageDependency.DepName == keywords @@ -328,6 +344,24 @@ class RPCSearch(PackageSearch): ) return self + def _search_by_provides(self, keywords: str) -> "RPCSearch": + self.query = self._join_relations(PROVIDES_ID).filter( + models.PackageRelation.RelName == keywords + ) + return self + + def _search_by_conflicts(self, keywords: str) -> "RPCSearch": + self.query = self._join_relations(CONFLICTS_ID).filter( + models.PackageRelation.RelName == keywords + ) + return self + + def _search_by_replaces(self, keywords: str) -> "RPCSearch": + self.query = self._join_relations(REPLACES_ID).filter( + models.PackageRelation.RelName == keywords + ) + return self + def search_by(self, by: str, keywords: str) -> "RPCSearch": """Override inherited search_by. In this override, we reduce the scope of what we handle within this function. We do not set `by` diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 515c6ffb..9004a51f 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -83,6 +83,9 @@ class RPC: "makedepends", "optdepends", "checkdepends", + "provides", + "conflicts", + "replaces", } # A mapping of by aliases. diff --git a/test/test_rpc.py b/test/test_rpc.py index f417d379..c5004f07 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -852,6 +852,42 @@ def test_rpc_search_checkdepends( assert result.get("Name") == packages[0].Name +def test_rpc_search_provides( + client: TestClient, packages: list[Package], relations: list[PackageRelation] +): + params = {"v": 5, "type": "search", "by": "provides", "arg": "chungus-provides"} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 1 + result = data.get("results")[0] + assert result.get("Name") == packages[0].Name + + +def test_rpc_search_conflicts( + client: TestClient, packages: list[Package], relations: list[PackageRelation] +): + params = {"v": 5, "type": "search", "by": "conflicts", "arg": "chungus-conflicts"} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 1 + result = data.get("results")[0] + assert result.get("Name") == packages[0].Name + + +def test_rpc_search_replaces( + client: TestClient, packages: list[Package], relations: list[PackageRelation] +): + params = {"v": 5, "type": "search", "by": "replaces", "arg": "chungus-replaces"} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 1 + result = data.get("results")[0] + assert result.get("Name") == packages[0].Name + + def test_rpc_incorrect_by(client: TestClient): params = {"v": 5, "type": "search", "by": "fake", "arg": "big"} with client as request: From 0583f30a53880b8908dd3746bf81b8f560bc09b2 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 7 Nov 2022 21:41:42 +0100 Subject: [PATCH 152/415] feat(rpc): add "by" parameter - groups Adding "by" parameter to search by "groups" Signed-off-by: moson-mo --- aurweb/packages/search.py | 17 ++++++++++++++++- aurweb/rpc.py | 1 + test/test_rpc.py | 25 ++++++++++++++++++++++++- 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 7e767bde..60e9f0fc 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -3,7 +3,7 @@ from typing import Set from sqlalchemy import and_, case, or_, orm from aurweb import db, models -from aurweb.models import Package, PackageBase, User +from aurweb.models import Group, Package, PackageBase, User from aurweb.models.dependency_type import ( CHECKDEPENDS_ID, DEPENDS_ID, @@ -11,6 +11,7 @@ from aurweb.models.dependency_type import ( OPTDEPENDS_ID, ) from aurweb.models.package_comaintainer import PackageComaintainer +from aurweb.models.package_group import PackageGroup from aurweb.models.package_keyword import PackageKeyword from aurweb.models.package_notification import PackageNotification from aurweb.models.package_vote import PackageVote @@ -290,6 +291,7 @@ class RPCSearch(PackageSearch): "provides": self._search_by_provides, "conflicts": self._search_by_conflicts, "replaces": self._search_by_replaces, + "groups": self._search_by_groups, } ) @@ -320,6 +322,14 @@ class RPCSearch(PackageSearch): ) return self.query + def _join_groups(self) -> orm.Query: + """Join Package with PackageGroup and Group. + + :returns: PackageGroup/Group-joined orm.Query + """ + self.query = self.query.join(PackageGroup).join(Group) + return self.query + def _search_by_depends(self, keywords: str) -> "RPCSearch": self.query = self._join_depends(DEPENDS_ID).filter( models.PackageDependency.DepName == keywords @@ -362,6 +372,11 @@ class RPCSearch(PackageSearch): ) return self + def _search_by_groups(self, keywords: str) -> orm.Query: + self._join_groups() + self.query = self.query.filter(Group.Name == keywords) + return self + def search_by(self, by: str, keywords: str) -> "RPCSearch": """Override inherited search_by. In this override, we reduce the scope of what we handle within this function. We do not set `by` diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 9004a51f..5cdf675d 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -86,6 +86,7 @@ class RPC: "provides", "conflicts", "replaces", + "groups", } # A mapping of by aliases. diff --git a/test/test_rpc.py b/test/test_rpc.py index c5004f07..bbd74588 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -13,10 +13,12 @@ from aurweb import asgi, config, db, rpc, scripts, time from aurweb.aur_redis import redis_connection from aurweb.models.account_type import USER_ID from aurweb.models.dependency_type import DEPENDS_ID +from aurweb.models.group import Group from aurweb.models.license import License from aurweb.models.package import Package from aurweb.models.package_base import PackageBase from aurweb.models.package_dependency import PackageDependency +from aurweb.models.package_group import PackageGroup from aurweb.models.package_keyword import PackageKeyword from aurweb.models.package_license import PackageLicense from aurweb.models.package_relation import PackageRelation @@ -139,11 +141,14 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: output.append(pkg) # Setup a few more related records on the first package: - # a license, some keywords and some votes. + # a license, group, some keywords and some votes. with db.begin(): lic = db.create(License, Name="GPL") db.create(PackageLicense, Package=output[0], License=lic) + grp = db.create(Group, Name="testgroup") + db.create(PackageGroup, Package=output[0], Group=grp) + for keyword in ["big-chungus", "smol-chungus", "sizeable-chungus"]: db.create( PackageKeyword, PackageBase=output[0].PackageBase, Keyword=keyword @@ -326,6 +331,7 @@ def test_rpc_singular_info( "Replaces": ["chungus-replaces<=200"], "License": [pkg.package_licenses.first().License.Name], "Keywords": ["big-chungus", "sizeable-chungus", "smol-chungus"], + "Groups": ["testgroup"], } ], "resultcount": 1, @@ -888,6 +894,23 @@ def test_rpc_search_replaces( assert result.get("Name") == packages[0].Name +def test_rpc_search_groups( + client: TestClient, packages: list[Package], depends: list[PackageDependency] +): + params = { + "v": 5, + "type": "search", + "by": "groups", + "arg": "testgroup", + } + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 1 + result = data.get("results")[0] + assert result.get("Name") == packages[0].Name + + def test_rpc_incorrect_by(client: TestClient): params = {"v": 5, "type": "search", "by": "fake", "arg": "big"} with client as request: From 5484e68b42392c95a90a3425841419a5782c412a Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 7 Nov 2022 22:46:24 +0100 Subject: [PATCH 153/415] feat(rpc): add "by" parameter - submitter Add "by" parameter: submitter Signed-off-by: moson-mo --- aurweb/packages/search.py | 2 +- aurweb/rpc.py | 3 ++- test/test_rpc.py | 31 +++++++++++++++++++++++++++++-- 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 60e9f0fc..51d97d8e 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -269,7 +269,7 @@ class RPCSearch(PackageSearch): sanitization done for the PackageSearch `by` argument. """ - keys_removed = ("b", "N", "B", "k", "c", "M", "s") + keys_removed = ("b", "N", "B", "k", "c", "M") def __init__(self) -> "RPCSearch": super().__init__() diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 5cdf675d..fa36486e 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -87,10 +87,11 @@ class RPC: "conflicts", "replaces", "groups", + "submitter", } # A mapping of by aliases. - BY_ALIASES = {"name-desc": "nd", "name": "n", "maintainer": "m"} + BY_ALIASES = {"name-desc": "nd", "name": "n", "maintainer": "m", "submitter": "s"} def __init__(self, version: int = 0, type: str = None) -> "RPC": self.version = version diff --git a/test/test_rpc.py b/test/test_rpc.py index bbd74588..5d59d16b 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -81,7 +81,11 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: # Create package records used in our tests. with db.begin(): pkgbase = db.create( - PackageBase, Name="big-chungus", Maintainer=user, Packager=user + PackageBase, + Name="big-chungus", + Maintainer=user, + Packager=user, + Submitter=user2, ) pkg = db.create( Package, @@ -93,7 +97,11 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: output.append(pkg) pkgbase = db.create( - PackageBase, Name="chungy-chungus", Maintainer=user, Packager=user + PackageBase, + Name="chungy-chungus", + Maintainer=user, + Packager=user, + Submitter=user2, ) pkg = db.create( Package, @@ -911,6 +919,25 @@ def test_rpc_search_groups( assert result.get("Name") == packages[0].Name +def test_rpc_search_submitter(client: TestClient, user2: User, packages: list[Package]): + params = {"v": 5, "type": "search", "by": "submitter", "arg": user2.Username} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + + # user2 submitted 2 packages + assert data.get("resultcount") == 2 + names = list(sorted(r.get("Name") for r in data.get("results"))) + expected_results = ["big-chungus", "chungy-chungus"] + assert names == expected_results + + # Search for a non-existent submitter, giving us zero packages. + params["arg"] = "blah-blah" + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 0 + + def test_rpc_incorrect_by(client: TestClient): params = {"v": 5, "type": "search", "by": "fake", "arg": "big"} with client as request: From efd20ed2c740910996e9f1aa7e24a2337be4db11 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Tue, 8 Nov 2022 15:26:27 +0100 Subject: [PATCH 154/415] feat(rpc): add "by" parameter - keywords Add "by" parameter: keywords Signed-off-by: moson-mo --- aurweb/packages/search.py | 9 +++++++-- aurweb/rpc.py | 9 ++++++++- test/test_rpc.py | 19 +++++++++++++++++++ 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 51d97d8e..37a5b6ff 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -269,7 +269,7 @@ class RPCSearch(PackageSearch): sanitization done for the PackageSearch `by` argument. """ - keys_removed = ("b", "N", "B", "k", "c", "M") + keys_removed = ("b", "N", "B", "c", "M") def __init__(self) -> "RPCSearch": super().__init__() @@ -372,11 +372,16 @@ class RPCSearch(PackageSearch): ) return self - def _search_by_groups(self, keywords: str) -> orm.Query: + def _search_by_groups(self, keywords: str) -> "RPCSearch": self._join_groups() self.query = self.query.filter(Group.Name == keywords) return self + def _search_by_keywords(self, keywords: str) -> "RPCSearch": + self._join_keywords() + self.query = self.query.filter(PackageKeyword.Keyword == keywords) + return self + def search_by(self, by: str, keywords: str) -> "RPCSearch": """Override inherited search_by. In this override, we reduce the scope of what we handle within this function. We do not set `by` diff --git a/aurweb/rpc.py b/aurweb/rpc.py index fa36486e..2a07f6c7 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -88,10 +88,17 @@ class RPC: "replaces", "groups", "submitter", + "keywords", } # A mapping of by aliases. - BY_ALIASES = {"name-desc": "nd", "name": "n", "maintainer": "m", "submitter": "s"} + BY_ALIASES = { + "name-desc": "nd", + "name": "n", + "maintainer": "m", + "submitter": "s", + "keywords": "k", + } def __init__(self, version: int = 0, type: str = None) -> "RPC": self.version = version diff --git a/test/test_rpc.py b/test/test_rpc.py index 5d59d16b..9c3ca883 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -938,6 +938,25 @@ def test_rpc_search_submitter(client: TestClient, user2: User, packages: list[Pa assert data.get("resultcount") == 0 +def test_rpc_search_keywords(client: TestClient, packages: list[Package]): + params = {"v": 5, "type": "search", "by": "keywords", "arg": "big-chungus"} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + + # should get 2 packages + assert data.get("resultcount") == 1 + names = list(sorted(r.get("Name") for r in data.get("results"))) + expected_results = ["big-chungus"] + assert names == expected_results + + # non-existent search + params["arg"] = "blah-blah" + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 0 + + def test_rpc_incorrect_by(client: TestClient): params = {"v": 5, "type": "search", "by": "fake", "arg": "big"} with client as request: From bcd808ddc11c570d9259a93a69d165403e48230e Mon Sep 17 00:00:00 2001 From: moson-mo Date: Tue, 8 Nov 2022 16:44:59 +0100 Subject: [PATCH 155/415] feat(rpc): add "by" parameter - comaintainers Add "by" parameter: comaintainers Signed-off-by: moson-mo --- aurweb/packages/search.py | 2 +- aurweb/rpc.py | 2 ++ test/test_rpc.py | 31 ++++++++++++++++++++++++++++++- 3 files changed, 33 insertions(+), 2 deletions(-) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 37a5b6ff..c0740cda 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -269,7 +269,7 @@ class RPCSearch(PackageSearch): sanitization done for the PackageSearch `by` argument. """ - keys_removed = ("b", "N", "B", "c", "M") + keys_removed = ("b", "N", "B", "M") def __init__(self) -> "RPCSearch": super().__init__() diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 2a07f6c7..34caf756 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -89,6 +89,7 @@ class RPC: "groups", "submitter", "keywords", + "comaintainers", } # A mapping of by aliases. @@ -98,6 +99,7 @@ class RPC: "maintainer": "m", "submitter": "s", "keywords": "k", + "comaintainers": "c", } def __init__(self, version: int = 0, type: str = None) -> "RPC": diff --git a/test/test_rpc.py b/test/test_rpc.py index 9c3ca883..4768a2da 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -17,6 +17,7 @@ from aurweb.models.group import Group from aurweb.models.license import License from aurweb.models.package import Package from aurweb.models.package_base import PackageBase +from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_dependency import PackageDependency from aurweb.models.package_group import PackageGroup from aurweb.models.package_keyword import PackageKeyword @@ -149,7 +150,7 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: output.append(pkg) # Setup a few more related records on the first package: - # a license, group, some keywords and some votes. + # a license, group, some keywords, comaintainer and some votes. with db.begin(): lic = db.create(License, Name="GPL") db.create(PackageLicense, Package=output[0], License=lic) @@ -157,6 +158,13 @@ def packages(user: User, user2: User, user3: User) -> list[Package]: grp = db.create(Group, Name="testgroup") db.create(PackageGroup, Package=output[0], Group=grp) + db.create( + PackageComaintainer, + PackageBase=output[0].PackageBase, + User=user2, + Priority=1, + ) + for keyword in ["big-chungus", "smol-chungus", "sizeable-chungus"]: db.create( PackageKeyword, PackageBase=output[0].PackageBase, Keyword=keyword @@ -957,6 +965,27 @@ def test_rpc_search_keywords(client: TestClient, packages: list[Package]): assert data.get("resultcount") == 0 +def test_rpc_search_comaintainers( + client: TestClient, user2: User, packages: list[Package] +): + params = {"v": 5, "type": "search", "by": "comaintainers", "arg": user2.Username} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + + # should get 1 package + assert data.get("resultcount") == 1 + names = list(sorted(r.get("Name") for r in data.get("results"))) + expected_results = ["big-chungus"] + assert names == expected_results + + # non-existent search + params["arg"] = "blah-blah" + response = request.get("/rpc", params=params) + data = response.json() + assert data.get("resultcount") == 0 + + def test_rpc_incorrect_by(client: TestClient): params = {"v": 5, "type": "search", "by": "fake", "arg": "big"} with client as request: From 500d6b403b827e51e602818bb17e4ecbcd2b5842 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Fri, 4 Nov 2022 14:09:09 +0000 Subject: [PATCH 156/415] feat: add co-maintainers to RPC Signed-off-by: Leonidas Spyropoulos --- aurweb/rpc.py | 16 ++++++++++++++++ test/test_rpc.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 34caf756..af31d2de 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -284,6 +284,22 @@ class RPC: ) .distinct() .order_by("Name"), + # Co-Maintainer + db.query(models.PackageComaintainer) + .join(models.User, models.User.ID == models.PackageComaintainer.UsersID) + .join( + models.Package, + models.Package.PackageBaseID + == models.PackageComaintainer.PackageBaseID, + ) + .with_entities( + models.Package.ID, + literal("CoMaintainers").label("Type"), + models.User.Username.label("Name"), + literal(str()).label("Cond"), + ) + .distinct() # A package could have the same co-maintainer multiple times + .order_by("Name"), ] # Union all subqueries together. diff --git a/test/test_rpc.py b/test/test_rpc.py index 4768a2da..424352db 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -272,6 +272,33 @@ def relations(user: User, packages: list[Package]) -> list[PackageRelation]: yield output +@pytest.fixture +def comaintainer( + user2: User, user3: User, packages: list[Package] +) -> list[PackageComaintainer]: + output = [] + + with db.begin(): + comaintainer = db.create( + PackageComaintainer, + User=user2, + PackageBase=packages[0].PackageBase, + Priority=1, + ) + output.append(comaintainer) + + comaintainer = db.create( + PackageComaintainer, + User=user3, + PackageBase=packages[0].PackageBase, + Priority=1, + ) + output.append(comaintainer) + + # Finally, yield the packages. + yield output + + @pytest.fixture(autouse=True) def setup(db_test): # Create some extra package relationships. @@ -321,6 +348,7 @@ def test_rpc_singular_info( packages: list[Package], depends: list[PackageDependency], relations: list[PackageRelation], + comaintainer: list[PackageComaintainer], ): # Define expected response. pkg = packages[0] @@ -343,6 +371,7 @@ def test_rpc_singular_info( "MakeDepends": ["chungus-makedepends"], "CheckDepends": ["chungus-checkdepends"], "Conflicts": ["chungus-conflicts"], + "CoMaintainers": ["user2", "user3"], "Provides": ["chungus-provides<=200"], "Replaces": ["chungus-replaces<=200"], "License": [pkg.package_licenses.first().License.Name], From bce5b81acd2b2dfcdfaf46ae962241e7dbe61ef9 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 10 Nov 2022 21:28:16 +0000 Subject: [PATCH 157/415] feat: allow filtering requests from maintainers These are usually easy to handle from TUs so allow to filter for them Signed-off-by: Leonidas Spyropoulos --- aurweb/routers/requests.py | 19 +++++++++++--- pytest.ini | 11 +++----- templates/requests.html | 5 ++++ test/test_requests.py | 53 ++++++++++++++++++++++++++++++++------ 4 files changed, 69 insertions(+), 19 deletions(-) diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index d1f1b830..6880abd9 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -2,12 +2,12 @@ from http import HTTPStatus from fastapi import APIRouter, Form, Query, Request from fastapi.responses import RedirectResponse -from sqlalchemy import case +from sqlalchemy import case, orm from aurweb import db, defaults, time, util from aurweb.auth import creds, requires_auth from aurweb.exceptions import handle_form_exceptions -from aurweb.models import PackageRequest +from aurweb.models import PackageBase, PackageRequest, User from aurweb.models.package_request import ( ACCEPTED_ID, CLOSED_ID, @@ -23,6 +23,7 @@ FILTER_PARAMS = { "filter_closed", "filter_accepted", "filter_rejected", + "filter_maintainers_requests", } router = APIRouter() @@ -38,6 +39,7 @@ async def requests( filter_closed: bool = False, filter_accepted: bool = False, filter_rejected: bool = False, + filter_maintainer_requests: bool = False, ): context = make_context(request, "Requests") @@ -53,9 +55,17 @@ async def requests( context["filter_closed"] = filter_closed context["filter_accepted"] = filter_accepted context["filter_rejected"] = filter_rejected + context["filter_maintainer_requests"] = filter_maintainer_requests + Maintainer = orm.aliased(User) # A PackageRequest query - query = db.query(PackageRequest) + query = ( + db.query(PackageRequest) + .join(PackageBase) + .join(User, PackageRequest.UsersID == User.ID, isouter=True) + .join(Maintainer, PackageBase.MaintainerUID == Maintainer.ID, isouter=True) + ) + # query = db.query(PackageRequest).join(User) # Requests statistics context["total_requests"] = query.count() @@ -79,6 +89,9 @@ async def requests( if filter_rejected: in_filters.append(REJECTED_ID) filtered = query.filter(PackageRequest.Status.in_(in_filters)) + # Additionally filter for requests made from package maintainer + if filter_maintainer_requests: + filtered = filtered.filter(PackageRequest.UsersID == PackageBase.MaintainerUID) # If the request user is not elevated (TU or Dev), then # filter PackageRequests which are owned by the request user. if not request.user.is_elevated(): diff --git a/pytest.ini b/pytest.ini index 9f70a2bd..62d1922a 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,13 +1,8 @@ [pytest] -# Ignore the following DeprecationWarning(s): -# - asyncio.base_events -# - DeprecationWarning speaking about internal asyncio -# using the loop= argument being deprecated starting -# with python 3.8, before python 3.10. -# - Note: This is a bug in upstream filed at -# https://bugs.python.org/issue45097 filterwarnings = - ignore::DeprecationWarning:asyncio.base_events + # This is coming from https://github.com/pytest-dev/pytest-xdist/issues/825 and it's caused from pytest-cov + # Remove once fixed: https://github.com/pytest-dev/pytest-cov/issues/557 + ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning # Build in coverage and pytest-xdist multiproc testing. addopts = --cov=aurweb --cov-append --dist load --dist loadfile -n auto diff --git a/templates/requests.html b/templates/requests.html index 9037855c..669b46b0 100644 --- a/templates/requests.html +++ b/templates/requests.html @@ -56,6 +56,11 @@

    +
    + + +
    diff --git a/test/test_requests.py b/test/test_requests.py index 7dfcf5e5..6475fae6 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -96,7 +96,21 @@ def maintainer() -> User: @pytest.fixture -def packages(maintainer: User) -> list[Package]: +def maintainer2() -> User: + """Yield a specific User used to maintain packages.""" + with db.begin(): + maintainer = db.create( + User, + Username="test_maintainer2", + Email="test_maintainer2@example.org", + Passwd="testPassword", + AccountTypeID=USER_ID, + ) + yield maintainer + + +@pytest.fixture +def packages(maintainer: User, maintainer2: User) -> list[Package]: """Yield 55 packages named pkg_0 .. pkg_54.""" packages_ = [] now = time.utcnow() @@ -105,7 +119,7 @@ def packages(maintainer: User) -> list[Package]: pkgbase = db.create( PackageBase, Name=f"pkg_{i}", - Maintainer=maintainer, + Maintainer=maintainer2 if i > 52 else maintainer, Packager=maintainer, Submitter=maintainer, ModifiedTS=now, @@ -117,14 +131,18 @@ def packages(maintainer: User) -> list[Package]: @pytest.fixture -def requests(user: User, packages: list[Package]) -> list[PackageRequest]: +def requests( + user: User, maintainer2: User, packages: list[Package] +) -> list[PackageRequest]: pkgreqs = [] with db.begin(): for i in range(55): pkgreq = db.create( PackageRequest, ReqTypeID=DELETION_ID, - User=user, + User=maintainer2 + if packages[i].PackageBase.Maintainer.Username == "test_maintainer2" + else user, PackageBase=packages[i].PackageBase, PackageBaseName=packages[i].Name, Comments=f"Deletion request for pkg_{i}", @@ -717,10 +735,6 @@ def test_requests( "O": 0, # Page 1 "SeB": "nd", "SB": "n", - "filter_pending": True, - "filter_closed": True, - "filter_accepted": True, - "filter_rejected": True, }, cookies=cookies, ) @@ -767,6 +781,7 @@ def test_requests_with_filters( "filter_closed": True, "filter_accepted": True, "filter_rejected": True, + "filter_maintainer_requests": False, }, cookies=cookies, ) @@ -790,6 +805,7 @@ def test_requests_with_filters( "filter_closed": True, "filter_accepted": True, "filter_rejected": True, + "filter_maintainer_requests": False, }, cookies=cookies, ) # Page 2 @@ -803,6 +819,27 @@ def test_requests_with_filters( assert len(rows) == 5 # There are five records left on the second page. +def test_requests_for_maintainer_requests( + client: TestClient, + tu_user: User, + packages: list[Package], + requests: list[PackageRequest], +): + cookies = {"AURSID": tu_user.login(Request(), "testPassword")} + with client as request: + resp = request.get( + "/requests", + params={"filter_maintainer_requests": True}, + cookies=cookies, + ) + assert resp.status_code == int(HTTPStatus.OK) + + root = parse_root(resp.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + # We only expect 2 requests since we are looking for requests from the maintainers + assert len(rows) == 2 + + def test_requests_by_deleted_users( client: TestClient, user: User, tu_user: User, pkgreq: PackageRequest ): From ff92e95f7a36bd51afa7f5108c9f3ff758d43cba Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Mon, 21 Nov 2022 13:39:43 +0000 Subject: [PATCH 158/415] fix: delete associated ssh public keys with account deletion Signed-off-by: Leonidas Spyropoulos --- aurweb/models/ssh_pub_key.py | 2 +- test/test_accounts_routes.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/aurweb/models/ssh_pub_key.py b/aurweb/models/ssh_pub_key.py index a2af34f4..c0b59445 100644 --- a/aurweb/models/ssh_pub_key.py +++ b/aurweb/models/ssh_pub_key.py @@ -13,7 +13,7 @@ class SSHPubKey(Base): User = relationship( "User", - backref=backref("ssh_pub_keys", lazy="dynamic"), + backref=backref("ssh_pub_keys", lazy="dynamic", cascade="all, delete"), foreign_keys=[__table__.c.UserID], ) diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index 33baa0ea..f44fd44e 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -2032,6 +2032,37 @@ def test_account_delete_self(client: TestClient, user: User): assert record is None +def test_account_delete_self_with_ssh_public_key(client: TestClient, user: User): + username = user.Username + + with db.begin(): + db.create( + SSHPubKey, User=user, Fingerprint="testFingerprint", PubKey="testPubKey" + ) + + # Confirm that we can view our own account deletion page + cookies = {"AURSID": user.login(Request(), "testPassword")} + endpoint = f"/account/{username}/delete" + with client as request: + resp = request.get(endpoint, cookies=cookies) + assert resp.status_code == HTTPStatus.OK + + # Supply everything correctly and delete ourselves + with client as request: + resp = request.post( + endpoint, + data={"passwd": "testPassword", "confirm": True}, + cookies=cookies, + ) + assert resp.status_code == HTTPStatus.SEE_OTHER + + # Check that our User record no longer exists in the database + user_record = db.query(User).filter(User.Username == username).first() + assert user_record is None + sshpubkey_record = db.query(SSHPubKey).filter(SSHPubKey.User == user).first() + assert sshpubkey_record is None + + def test_account_delete_as_tu(client: TestClient, tu_user: User): with db.begin(): user = create_user("user2") From d5e102e3f4622b4c55edd75bb086ae9f764a71c9 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Tue, 22 Nov 2022 18:39:15 +0100 Subject: [PATCH 159/415] feat: add "Submitter" field to /rpc info request Signed-off-by: moson-mo --- aurweb/rpc.py | 54 +++++++++++++++++++++++++++++++++--------------- test/test_rpc.py | 5 +++++ 2 files changed, 42 insertions(+), 17 deletions(-) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index af31d2de..2aa27500 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -154,6 +154,7 @@ class RPC: "PackageBase": package.PackageBaseName, # Maintainer should be set following this update if one exists. "Maintainer": package.Maintainer, + "Submitter": package.Submitter, "Version": package.Version, "Description": package.Description, "URL": package.URL, @@ -192,22 +193,35 @@ class RPC: def entities(self, query: orm.Query) -> orm.Query: """Select specific RPC columns on `query`.""" - return query.with_entities( - models.Package.ID, - models.Package.Name, - models.Package.Version, - models.Package.Description, - models.Package.URL, - models.Package.PackageBaseID, - models.PackageBase.Name.label("PackageBaseName"), - models.PackageBase.NumVotes, - models.PackageBase.Popularity, - models.PackageBase.PopularityUpdated, - models.PackageBase.OutOfDateTS, - models.PackageBase.SubmittedTS, - models.PackageBase.ModifiedTS, - models.User.Username.label("Maintainer"), - ).group_by(models.Package.ID) + Submitter = orm.aliased(models.User) + + query = ( + query.join( + Submitter, + Submitter.ID == models.PackageBase.SubmitterUID, + isouter=True, + ) + .with_entities( + models.Package.ID, + models.Package.Name, + models.Package.Version, + models.Package.Description, + models.Package.URL, + models.Package.PackageBaseID, + models.PackageBase.Name.label("PackageBaseName"), + models.PackageBase.NumVotes, + models.PackageBase.Popularity, + models.PackageBase.PopularityUpdated, + models.PackageBase.OutOfDateTS, + models.PackageBase.SubmittedTS, + models.PackageBase.ModifiedTS, + models.User.Username.label("Maintainer"), + Submitter.Username.label("Submitter"), + ) + .group_by(models.Package.ID) + ) + + return query def subquery(self, ids: set[int]): Package = models.Package @@ -367,7 +381,13 @@ class RPC: if len(results) > max_results: raise RPCError("Too many package results.") - return self._assemble_json_data(results, self.get_json_data) + data = self._assemble_json_data(results, self.get_json_data) + + # remove Submitter for search results + for pkg in data: + pkg.pop("Submitter") + + return data def _handle_msearch_type( self, args: list[str] = [], **kwargs diff --git a/test/test_rpc.py b/test/test_rpc.py index 424352db..04efd38f 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -345,6 +345,7 @@ def test_rpc_documentation_missing(): def test_rpc_singular_info( client: TestClient, user: User, + user2: User, packages: list[Package], depends: list[PackageDependency], relations: list[PackageRelation], @@ -365,6 +366,7 @@ def test_rpc_singular_info( "Popularity": float(pkg.PackageBase.Popularity), "OutOfDate": None, "Maintainer": user.Username, + "Submitter": user2.Username, "URLPath": f"/cgit/aur.git/snapshot/{pkg.Name}.tar.gz", "Depends": ["chungus-depends"], "OptDepends": ["chungus-optdepends=50"], @@ -498,6 +500,7 @@ def test_rpc_mixedargs(client: TestClient, packages: list[Package]): def test_rpc_no_dependencies_omits_key( client: TestClient, user: User, + user2: User, packages: list[Package], depends: list[PackageDependency], relations: list[PackageRelation], @@ -520,6 +523,7 @@ def test_rpc_no_dependencies_omits_key( "Popularity": int(pkg.PackageBase.Popularity), "OutOfDate": None, "Maintainer": user.Username, + "Submitter": user2.Username, "URLPath": "/cgit/aur.git/snapshot/chungy-chungus.tar.gz", "Depends": ["chungy-depends"], "Conflicts": ["chungy-conflicts"], @@ -799,6 +803,7 @@ def test_rpc_search(client: TestClient, packages: list[Package]): result = data.get("results")[0] assert result.get("Name") == packages[0].Name + assert result.get("Submitter") is None # Test the If-None-Match headers. etag = response.headers.get("ETag").strip('"') From 6b0978b9a518bebb9197b9e71ff0d53f24f77bc9 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Tue, 22 Nov 2022 21:51:15 +0000 Subject: [PATCH 160/415] fix(deps): update dependencies from renovate Signed-off-by: Leonidas Spyropoulos --- .pre-commit-config.yaml | 4 +- poetry.lock | 598 +++++++++++++++++----------------------- pyproject.toml | 14 +- 3 files changed, 261 insertions(+), 355 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 09659269..ab4240c9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: debug-statements - repo: https://github.com/myint/autoflake - rev: v1.4 + rev: v1.7.7 hooks: - id: autoflake args: @@ -25,7 +25,7 @@ repos: - id: isort - repo: https://github.com/psf/black - rev: 22.6.0 + rev: 22.10.0 hooks: - id: black diff --git a/poetry.lock b/poetry.lock index f6b79a30..22cbd3fd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -175,7 +175,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "38.0.1" +version = "38.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -192,20 +192,6 @@ sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] -[[package]] -name = "deprecated" -version = "1.2.13" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] - [[package]] name = "dnspython" version = "2.2.1" @@ -236,7 +222,7 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.0.0" +version = "1.0.4" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false @@ -258,14 +244,14 @@ testing = ["pre-commit"] [[package]] name = "fakeredis" -version = "1.10.0" +version = "2.0.0" description = "Fake implementation of redis API for testing purposes." category = "main" optional = false python-versions = ">=3.7,<4.0" [package.dependencies] -redis = "<4.4" +redis = "<4.5" sortedcontainers = ">=2.4.0,<3.0.0" [package.extras] @@ -316,7 +302,7 @@ testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pyt [[package]] name = "greenlet" -version = "2.0.0" +version = "2.0.1" description = "Lightweight in-process concurrent programming" category = "main" optional = false @@ -324,7 +310,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph"] +test = ["faulthandler", "objgraph", "psutil"] [[package]] name = "gunicorn" @@ -345,11 +331,11 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "h11" -version = "0.12.0" +version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "h2" @@ -373,16 +359,16 @@ python-versions = ">=3.6.1" [[package]] name = "httpcore" -version = "0.15.0" +version = "0.16.1" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -anyio = ">=3.0.0,<4.0.0" +anyio = ">=3.0,<5.0" certifi = "*" -h11 = ">=0.11,<0.13" +h11 = ">=0.13,<0.15" sniffio = ">=1.0.0,<2.0.0" [package.extras] @@ -391,7 +377,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" -version = "0.23.0" +version = "0.23.1" description = "The next generation HTTP client." category = "main" optional = false @@ -399,7 +385,7 @@ python-versions = ">=3.7" [package.dependencies] certifi = "*" -httpcore = ">=0.15.0,<0.16.0" +httpcore = ">=0.15.0,<0.17.0" rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" @@ -508,7 +494,7 @@ source = ["Cython (>=0.29.7)"] [[package]] name = "mako" -version = "1.2.3" +version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false @@ -554,7 +540,7 @@ python-versions = ">=3.5" [[package]] name = "orjson" -version = "3.8.1" +version = "3.8.2" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" category = "main" optional = false @@ -679,11 +665,11 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygit2" -version = "1.10.1" +version = "1.11.1" description = "Python bindings for libgit2." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] cffi = ">=1.9.1" @@ -721,7 +707,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.20.1" +version = "0.20.2" description = "Pytest support for asyncio" category = "dev" optional = false @@ -801,7 +787,7 @@ six = ">=1.4.0" [[package]] name = "redis" -version = "4.3.4" +version = "4.3.5" description = "Python client for Redis database and key-value store" category = "main" optional = false @@ -809,7 +795,6 @@ python-versions = ">=3.6" [package.dependencies] async-timeout = ">=4.0.2" -deprecated = ">=1.2.3" packaging = ">=20.4" [package.extras] @@ -850,7 +835,7 @@ idna2008 = ["idna"] [[package]] name = "setuptools" -version = "65.5.0" +version = "65.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false @@ -858,7 +843,7 @@ python-versions = ">=3.7" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -887,7 +872,7 @@ python-versions = "*" [[package]] name = "sqlalchemy" -version = "1.4.42" +version = "1.4.44" description = "Database Abstraction Library" category = "main" optional = false @@ -993,7 +978,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" -version = "0.19.0" +version = "0.20.0" description = "The lightning-fast ASGI server." category = "main" optional = false @@ -1004,7 +989,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "webencodings" @@ -1028,14 +1013,6 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog"] -[[package]] -name = "wrapt" -version = "1.14.1" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - [[package]] name = "wsproto" version = "1.2.0" @@ -1062,7 +1039,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "84f0bae9789174cbdc5aa672b9e72f0ef91763f63ed73e8cafb45f26efd9bb47" +content-hash = "b178f1fcbba93d9cbc8dd23193b25afd5e1ba971196757abf098a1dfa2666cba" [metadata.files] aiofiles = [ @@ -1255,36 +1232,32 @@ coverage = [ {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] cryptography = [ - {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:10d1f29d6292fc95acb597bacefd5b9e812099d75a6469004fd38ba5471a977f"}, - {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3fc26e22840b77326a764ceb5f02ca2d342305fba08f002a8c1f139540cdfaad"}, - {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b72c360427889b40f36dc214630e688c2fe03e16c162ef0aa41da7ab1455153"}, - {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:194044c6b89a2f9f169df475cc167f6157eb9151cc69af8a2a163481d45cc407"}, - {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca9f6784ea96b55ff41708b92c3f6aeaebde4c560308e5fbbd3173fbc466e94e"}, - {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:16fa61e7481f4b77ef53991075de29fc5bacb582a1244046d2e8b4bb72ef66d0"}, - {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d4ef6cc305394ed669d4d9eebf10d3a101059bdcf2669c366ec1d14e4fb227bd"}, - {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3261725c0ef84e7592597606f6583385fed2a5ec3909f43bc475ade9729a41d6"}, - {file = "cryptography-38.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0297ffc478bdd237f5ca3a7dc96fc0d315670bfa099c04dc3a4a2172008a405a"}, - {file = "cryptography-38.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89ed49784ba88c221756ff4d4755dbc03b3c8d2c5103f6d6b4f83a0fb1e85294"}, - {file = "cryptography-38.0.1-cp36-abi3-win32.whl", hash = "sha256:ac7e48f7e7261207d750fa7e55eac2d45f720027d5703cd9007e9b37bbb59ac0"}, - {file = "cryptography-38.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ad7353f6ddf285aeadfaf79e5a6829110106ff8189391704c1d8801aa0bae45a"}, - {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:896dd3a66959d3a5ddcfc140a53391f69ff1e8f25d93f0e2e7830c6de90ceb9d"}, - {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d3971e2749a723e9084dd507584e2a2761f78ad2c638aa31e80bc7a15c9db4f9"}, - {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:79473cf8a5cbc471979bd9378c9f425384980fcf2ab6534b18ed7d0d9843987d"}, - {file = "cryptography-38.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9e69ae01f99abe6ad646947bba8941e896cb3aa805be2597a0400e0764b5818"}, - {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5067ee7f2bce36b11d0e334abcd1ccf8c541fc0bbdaf57cdd511fdee53e879b6"}, - {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3e3a2599e640927089f932295a9a247fc40a5bdf69b0484532f530471a382750"}, - {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2e5856248a416767322c8668ef1845ad46ee62629266f84a8f007a317141013"}, - {file = "cryptography-38.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:64760ba5331e3f1794d0bcaabc0d0c39e8c60bf67d09c93dc0e54189dfd7cfe5"}, - {file = "cryptography-38.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b6c9b706316d7b5a137c35e14f4103e2115b088c412140fdbd5f87c73284df61"}, - {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0163a849b6f315bf52815e238bc2b2346604413fa7c1601eea84bcddb5fb9ac"}, - {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d1a5bd52d684e49a36582193e0b89ff267704cd4025abefb9e26803adeb3e5fb"}, - {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:765fa194a0f3372d83005ab83ab35d7c5526c4e22951e46059b8ac678b44fa5a"}, - {file = "cryptography-38.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:52e7bee800ec869b4031093875279f1ff2ed12c1e2f74923e8f49c916afd1d3b"}, - {file = "cryptography-38.0.1.tar.gz", hash = "sha256:1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7"}, -] -deprecated = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, + {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"}, + {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"}, + {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"}, + {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"}, + {file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"}, + {file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"}, + {file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"}, ] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, @@ -1295,16 +1268,16 @@ email-validator = [ {file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0-py3-none-any.whl", hash = "sha256:2ac84b496be68464a2da60da518af3785fff8b7ec0d090a581604bc870bdee41"}, - {file = "exceptiongroup-1.0.0.tar.gz", hash = "sha256:affbabf13fb6e98988c38d9c5650e701569fe3c1de3233cfb61c5f33774690ad"}, + {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, + {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, ] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] fakeredis = [ - {file = "fakeredis-1.10.0-py3-none-any.whl", hash = "sha256:0be420a79fabda234963a2730c4ce609a6d44a598e8dd253ce97785bef944285"}, - {file = "fakeredis-1.10.0.tar.gz", hash = "sha256:2b02370118535893d832bcd3c099ef282de3f13b29ae3922432e2225794ec334"}, + {file = "fakeredis-2.0.0-py3-none-any.whl", hash = "sha256:fb3186cbbe4c549f922b0f08eb84b09c0e51ecf8efbed3572d20544254f93a97"}, + {file = "fakeredis-2.0.0.tar.gz", hash = "sha256:6d1dc2417921b7ce56a80877afa390d6335a3154146f201a86e3a14417bdc79e"}, ] fastapi = [ {file = "fastapi-0.85.2-py3-none-any.whl", hash = "sha256:6292db0edd4a11f0d938d6033ccec5f706e9d476958bf33b119e8ddb4e524bde"}, @@ -1318,69 +1291,74 @@ filelock = [ {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] greenlet = [ - {file = "greenlet-2.0.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4be4dedbd2fa9b7c35627f322d6d3139cb125bc18d5ef2f40237990850ea446f"}, - {file = "greenlet-2.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:75c022803de010294366f3608d4bba3e346693b1b7427b79d57e3d924ed03838"}, - {file = "greenlet-2.0.0-cp27-cp27m-win32.whl", hash = "sha256:4a1953465b7651073cffde74ed7d121e602ef9a9740d09ee137b01879ac15a2f"}, - {file = "greenlet-2.0.0-cp27-cp27m-win_amd64.whl", hash = "sha256:a65205e6778142528978b4acca76888e7e7f0be261e395664e49a5c21baa2141"}, - {file = "greenlet-2.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d71feebf5c8041c80dfda76427e14e3ca00bca042481bd3e9612a9d57b2cbbf7"}, - {file = "greenlet-2.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f7edbd2957f72aea357241fe42ffc712a8e9b8c2c42f24e2ef5d97b255f66172"}, - {file = "greenlet-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79687c48e7f564be40c46b3afea6d141b8d66ffc2bc6147e026d491c6827954a"}, - {file = "greenlet-2.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a245898ec5e9ca0bc87a63e4e222cc633dc4d1f1a0769c34a625ad67edb9f9de"}, - {file = "greenlet-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adcf45221f253b3a681c99da46fa6ac33596fa94c2f30c54368f7ee1c4563a39"}, - {file = "greenlet-2.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3dc294afebf2acfd029373dbf3d01d36fd8d6888a03f5a006e2d690f66b153d9"}, - {file = "greenlet-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1cfeae4dda32eb5c64df05d347c4496abfa57ad16a90082798a2bba143c6c854"}, - {file = "greenlet-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:d58d4b4dc82e2d21ebb7dd7d3a6d370693b2236a1407fe3988dc1d4ea07575f9"}, - {file = "greenlet-2.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0d7efab8418c1fb3ea00c4abb89e7b0179a952d0d53ad5fcff798ca7440f8e8"}, - {file = "greenlet-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f8a10e14238407be3978fa6d190eb3724f9d766655fefc0134fd5482f1fb0108"}, - {file = "greenlet-2.0.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:98b848a0b75e76b446dc71fdbac712d9078d96bb1c1607f049562dde1f8801e1"}, - {file = "greenlet-2.0.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:8e8dbad9b4f4c3e37898914cfccb7c4f00dbe3146333cfe52a1a3103cc2ff97c"}, - {file = "greenlet-2.0.0-cp35-cp35m-win32.whl", hash = "sha256:069a8a557541a04518dc3beb9a78637e4e6b286814849a2ecfac529eaa78562b"}, - {file = "greenlet-2.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:cc211c2ff5d3b2ba8d557a71e3b4f0f0a2020067515143a9516ea43884271192"}, - {file = "greenlet-2.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:d4e7642366e638f45d70c5111590a56fbd0ffb7f474af20c6c67c01270bcf5cf"}, - {file = "greenlet-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e7a0dca752b4e3395890ab4085c3ec3838d73714261914c01b53ed7ea23b5867"}, - {file = "greenlet-2.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8c67ecda450ad4eac7837057f5deb96effa836dacaf04747710ccf8eeb73092"}, - {file = "greenlet-2.0.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cc1abaf47cfcfdc9ac0bdff173cebab22cd54e9e3490135a4a9302d0ff3b163"}, - {file = "greenlet-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efdbbbf7b6c8d5be52977afa65b9bb7b658bab570543280e76c0fabc647175ed"}, - {file = "greenlet-2.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:7acaa51355d5b9549d474dc71be6846ee9a8f2cb82f4936e5efa7a50bbeb94ad"}, - {file = "greenlet-2.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2be628bca0395610da08921f9376dd14317f37256d41078f5c618358467681e1"}, - {file = "greenlet-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:eca9c0473de053dcc92156dd62c38c3578628b536c7f0cd66e655e211c14ac32"}, - {file = "greenlet-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:9a4a9fea68fd98814999d91ea585e49ed68d7e199a70bef13a857439f60a4609"}, - {file = "greenlet-2.0.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:6b28420ae290bfbf5d827f976abccc2f74f0a3f5e4fb69b66acf98f1cbe95e7e"}, - {file = "greenlet-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2b8e1c939b363292ecc93999fb1ad53ffc5d0aac8e933e4362b62365241edda5"}, - {file = "greenlet-2.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c5ddadfe40e903c6217ed2b95a79f49e942bb98527547cc339fc7e43a424aad"}, - {file = "greenlet-2.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e5ead803b11b60b347e08e0f37234d9a595f44a6420026e47bcaf94190c3cd6"}, - {file = "greenlet-2.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b89b78ffb516c2921aa180c2794082666e26680eef05996b91f46127da24d964"}, - {file = "greenlet-2.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:939963d0137ec92540d95b68b7f795e8dbadce0a1fca53e3e7ef8ddc18ee47cb"}, - {file = "greenlet-2.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c1e93ef863810fba75faf418f0861dbf59bfe01a7b5d0a91d39603df58d3d3fa"}, - {file = "greenlet-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:6fd342126d825b76bf5b49717a7c682e31ed1114906cdec7f5a0c2ff1bc737a7"}, - {file = "greenlet-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5392ddb893e7fba237b988f846c4a80576557cc08664d56dc1a69c5c02bdc80c"}, - {file = "greenlet-2.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b4fd73b62c1038e7ee938b1de328eaa918f76aa69c812beda3aff8a165494201"}, - {file = "greenlet-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:0ba0f2e5c4a8f141952411e356dba05d6fe0c38325ee0e4f2d0c6f4c2c3263d5"}, - {file = "greenlet-2.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8bacecee0c9348ab7c95df810e12585e9e8c331dfc1e22da4ed0bd635a5f483"}, - {file = "greenlet-2.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341053e0a96d512315c27c34fad4672c4573caf9eb98310c39e7747645c88d8b"}, - {file = "greenlet-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fcdd8ae391ffabb3b672397b58a9737aaff6b8cae0836e8db8ff386fcea802"}, - {file = "greenlet-2.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c3aa7d3bc545162a6676445709b24a2a375284dc5e2f2432d58b80827c2bd91c"}, - {file = "greenlet-2.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d8dca31a39dd9f25641559b8cdf9066168c682dfcfbe0f797f03e4c9718a63a"}, - {file = "greenlet-2.0.0-cp38-cp38-win32.whl", hash = "sha256:aa2b371c3633e694d043d6cec7376cb0031c6f67029f37eef40bda105fd58753"}, - {file = "greenlet-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:0fa2a66fdf0d09929e79f786ad61529d4e752f452466f7ddaa5d03caf77a603d"}, - {file = "greenlet-2.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e7ec3f2465ba9b7d25895307abe1c1c101a257c54b9ea1522bbcbe8ca8793735"}, - {file = "greenlet-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:99e9851e40150504474915605649edcde259a4cd9bce2fcdeb4cf33ad0b5c293"}, - {file = "greenlet-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20bf68672ae14ef2e2e6d3ac1f308834db1d0b920b3b0674eef48b2dce0498dd"}, - {file = "greenlet-2.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30198bccd774f9b6b1ba7564a0d02a79dd1fe926cfeb4107856fe16c9dfb441c"}, - {file = "greenlet-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d65d7d1ff64fb300127d2ffd27db909de4d21712a5dde59a3ad241fb65ee83d7"}, - {file = "greenlet-2.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5d396a5457458460b0c28f738fc8ab2738ee61b00c3f845c7047a333acd96c"}, - {file = "greenlet-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09f00f9938eb5ae1fe203558b56081feb0ca34a2895f8374cd01129ddf4d111c"}, - {file = "greenlet-2.0.0-cp39-cp39-win32.whl", hash = "sha256:089e123d80dbc6f61fff1ff0eae547b02c343d50968832716a7b0a33bea5f792"}, - {file = "greenlet-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc283f99a4815ef70cad537110e3e03abcef56ab7d005ba9a8c6ec33054ce9c0"}, - {file = "greenlet-2.0.0.tar.gz", hash = "sha256:6c66f0da8049ee3c126b762768179820d4c0ae0ca46ae489039e4da2fae39a52"}, + {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, + {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, + {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, + {file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"}, + {file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"}, + {file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"}, + {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"}, + {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"}, + {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"}, + {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"}, + {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"}, + {file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"}, + {file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"}, + {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"}, + {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"}, + {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"}, + {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"}, + {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"}, + {file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"}, + {file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"}, + {file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"}, + {file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"}, + {file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"}, + {file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"}, + {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"}, + {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"}, + {file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"}, + {file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"}, + {file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, + {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, + {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, + {file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, + {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, + {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, + {file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, + {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, + {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, + {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, ] gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] h11 = [ - {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, - {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] h2 = [ {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, @@ -1391,12 +1369,12 @@ hpack = [ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, ] httpcore = [ - {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, - {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, + {file = "httpcore-0.16.1-py3-none-any.whl", hash = "sha256:8d393db683cc8e35cc6ecb02577c5e1abfedde52b38316d038932a84b4875ecb"}, + {file = "httpcore-0.16.1.tar.gz", hash = "sha256:3d3143ff5e1656a5740ea2f0c167e8e9d48c5a9bbd7f00ad1f8cff5711b08543"}, ] httpx = [ - {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, - {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, + {file = "httpx-0.23.1-py3-none-any.whl", hash = "sha256:0b9b1f0ee18b9978d637b0776bfd7f54e2ca278e063e3586d8f01cda89e042a8"}, + {file = "httpx-0.23.1.tar.gz", hash = "sha256:202ae15319be24efe9a8bd4ed4360e68fde7b38bcc2ce87088d416f026667d19"}, ] hypercorn = [ {file = "Hypercorn-0.14.3-py3-none-any.whl", hash = "sha256:7c491d5184f28ee960dcdc14ab45d14633ca79d72ddd13cf4fcb4cb854d679ab"}, @@ -1499,8 +1477,8 @@ lxml = [ {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, ] mako = [ - {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, - {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, ] markdown = [ {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, @@ -1558,55 +1536,55 @@ mysqlclient = [ {file = "mysqlclient-2.1.1.tar.gz", hash = "sha256:828757e419fb11dd6c5ed2576ec92c3efaa93a0f7c39e263586d1ee779c3d782"}, ] orjson = [ - {file = "orjson-3.8.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:a70aaa2e56356e58c6e1b49f7b7f069df5b15e55db002a74db3ff3f7af67c7ff"}, - {file = "orjson-3.8.1-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d45db052d01d0ab7579470141d5c3592f4402d43cfacb67f023bc1210a67b7bc"}, - {file = "orjson-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2aae92398c0023ac26a6cd026375f765ef5afe127eccabf563c78af7b572d59"}, - {file = "orjson-3.8.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0bd5b4e539db8a9635776bdf9a25c3db84e37165e65d45c8ca90437adc46d6d8"}, - {file = "orjson-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21efb87b168066201a120b0f54a2381f6f51ff3727e07b3908993732412b314a"}, - {file = "orjson-3.8.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:e073338e422f518c1d4d80efc713cd17f3ed6d37c8c7459af04a95459f3206d1"}, - {file = "orjson-3.8.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8f672f3987f6424f60ab2e86ea7ed76dd2806b8e9b506a373fc8499aed85ddb5"}, - {file = "orjson-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:231c30958ed99c23128a21993c5ac0a70e1e568e6a898a47f70d5d37461ca47c"}, - {file = "orjson-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59b4baf71c9f39125d7e535974b146cc180926462969f6d8821b4c5e975e11b3"}, - {file = "orjson-3.8.1-cp310-none-win_amd64.whl", hash = "sha256:fe25f50dc3d45364428baa0dbe3f613a5171c64eb0286eb775136b74e61ba58a"}, - {file = "orjson-3.8.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6802edf98f6918e89df355f56be6e7db369b31eed64ff2496324febb8b0aa43b"}, - {file = "orjson-3.8.1-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a4244f4199a160717f0027e434abb886e322093ceadb2f790ff0c73ed3e17662"}, - {file = "orjson-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6956cf7a1ac97523e96f75b11534ff851df99a6474a561ad836b6e82004acbb8"}, - {file = "orjson-3.8.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b4e3857dd2416b479f700e9bdf4fcec8c690d2716622397d2b7e848f9833e50"}, - {file = "orjson-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8873e490dea0f9cd975d66f84618b6fb57b1ba45ecb218313707a71173d764f"}, - {file = "orjson-3.8.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:124207d2cd04e845eaf2a6171933cde40aebcb8c2d7d3b081e01be066d3014b6"}, - {file = "orjson-3.8.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d8ed77098c2e22181fce971f49a34204c38b79ca91c01d515d07015339ae8165"}, - {file = "orjson-3.8.1-cp311-none-win_amd64.whl", hash = "sha256:8623ac25fa0850a44ac845e9333c4da9ae5707b7cec8ac87cbe9d4e41137180f"}, - {file = "orjson-3.8.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d67a0bd0283a3b17ac43c5ab8e4a7e9d3aa758d6ec5d51c232343c408825a5ad"}, - {file = "orjson-3.8.1-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d89ef8a4444d83e0a5171d14f2ab4895936ab1773165b020f97d29cf289a2d88"}, - {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97839a6abbebb06099294e6057d5b3061721ada08b76ae792e7041b6cb54c97f"}, - {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6071bcf51f0ae4d53b9d3e9164f7138164df4291c484a7b14562075aaa7a2b7b"}, - {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15e7d691cee75b5192fc1fa8487bf541d463246dc25c926b9b40f5b6ab56770"}, - {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:b9abc49c014def1b832fcd53bdc670474b6fe41f373d16f40409882c0d0eccba"}, - {file = "orjson-3.8.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:3fd5472020042482d7da4c26a0ee65dbd931f691e1c838c6cf4232823179ecc1"}, - {file = "orjson-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e399ed1b0d6f8089b9b6ff2cb3e71ba63a56d8ea88e1d95467949795cc74adfd"}, - {file = "orjson-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e3db6496463c3000d15b7a712da5a9601c6c43682f23f81862fe1d2a338f295"}, - {file = "orjson-3.8.1-cp37-none-win_amd64.whl", hash = "sha256:0f21eed14697083c01f7e00a87e21056fc8fb5851e8a7bca98345189abcdb4d4"}, - {file = "orjson-3.8.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5a9e324213220578d324e0858baeab47808a13d3c3fbc6ba55a3f4f069d757cf"}, - {file = "orjson-3.8.1-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69097c50c3ccbcc61292192b045927f1688ca57ce80525dc5d120e0b91e19bb0"}, - {file = "orjson-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7822cba140f7ca48ed0256229f422dbae69e3a3475176185db0c0538cfadb57"}, - {file = "orjson-3.8.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03389e3750c521a7f3d4837de23cfd21a7f24574b4b3985c9498f440d21adb03"}, - {file = "orjson-3.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f9d9b5c6692097de07dd0b2d5ff20fd135bacd1b2fb7ea383ee717a4150c93"}, - {file = "orjson-3.8.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:c2c9ef10b6344465fd5ac002be2d34f818211274dd79b44c75b2c14a979f84f3"}, - {file = "orjson-3.8.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7adaac93678ac61f5dc070f615b18639d16ee66f6a946d5221dbf315e8b74bec"}, - {file = "orjson-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b0c1750f73658906b82cabbf4be2f74300644c17cb037fbc8b48d746c3b90c76"}, - {file = "orjson-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:da6306e1f03e7085fe0db61d4a3377f70c6fd865118d0afe17f80ae9a8f6f124"}, - {file = "orjson-3.8.1-cp38-none-win_amd64.whl", hash = "sha256:f532c2cbe8c140faffaebcfb34d43c9946599ea8138971f181a399bec7d6b123"}, - {file = "orjson-3.8.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6a7b76d4b44bca418f7797b1e157907b56b7d31caa9091db4e99ebee51c16933"}, - {file = "orjson-3.8.1-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f850489d89ea12be486492e68f0fd63e402fa28e426d4f0b5fc1eec0595e6109"}, - {file = "orjson-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4449e70b98f3ad3e43958360e4be1189c549865c0a128e8629ec96ce92d251c3"}, - {file = "orjson-3.8.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:45357eea9114bd41ef19280066591e9069bb4f6f5bffd533e9bfc12a439d735f"}, - {file = "orjson-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5a9bc5bc4d730153529cb0584c63ff286d50663ccd48c9435423660b1bb12d"}, - {file = "orjson-3.8.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a806aca6b80fa1d996aa16593e4995a71126a085ee1a59fff19ccad29a4e47fd"}, - {file = "orjson-3.8.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:395d02fd6be45f960da014372e7ecefc9e5f8df57a0558b7111a5fa8423c0669"}, - {file = "orjson-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:caff3c1e964cfee044a03a46244ecf6373f3c56142ad16458a1446ac6d69824a"}, - {file = "orjson-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ded261268d5dfd307078fe3370295e5eb15bdde838bbb882acf8538e061c451"}, - {file = "orjson-3.8.1-cp39-none-win_amd64.whl", hash = "sha256:45c1914795ffedb2970bfcd3ed83daf49124c7c37943ed0a7368971c6ea5e278"}, - {file = "orjson-3.8.1.tar.gz", hash = "sha256:07c42de52dfef56cdcaf2278f58e837b26f5b5af5f1fd133a68c4af203851fc7"}, + {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, + {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, + {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, + {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, + {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, + {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, + {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, + {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, + {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, + {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, + {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, + {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, + {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, + {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, + {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, + {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, + {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, + {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, + {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, + {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, + {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, + {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, + {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, + {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, + {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, + {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, + {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, + {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, + {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, + {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, + {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, + {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, + {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, + {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, + {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, + {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, + {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, + {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, + {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, + {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, + {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, + {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, + {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, + {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, + {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, + {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, + {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, + {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, + {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -1704,43 +1682,37 @@ pydantic = [ {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pygit2 = [ - {file = "pygit2-1.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e3f60e47c6a7a87f18a112753eb98848f4c5333986bec1940558ce09cdaf53bf"}, - {file = "pygit2-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0f69ea42231bebf08006c85cd5aa233c9c047c5a88b7fcfb4b639476b70e31b"}, - {file = "pygit2-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0097b6631ef05c837c4800fad559d0865a90c55475a18f38c6f2f5a12750e914"}, - {file = "pygit2-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddb3b5bdcdfae205d9cc0c80bc53fad222a5ba67e66fd336ef223f86b0ac5835"}, - {file = "pygit2-1.10.1-cp310-cp310-win32.whl", hash = "sha256:3efd2a2ab2bb443e1b758525546d74a5a12fe27006194d3c02b3e6ecc1e101e6"}, - {file = "pygit2-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:11225811194ae6b9dbb34c2e8900e0eba6eacc180d82766e3dbddcbd2c6e6454"}, - {file = "pygit2-1.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:73e251d0b73f1010ad28c20bcdcf33e312fb363f10b7268ad2bcfa09770f9ac2"}, - {file = "pygit2-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cb73f7967207a9ac485722ef0e517e5ca482f3c1308a0ac934707cb267b0ac7a"}, - {file = "pygit2-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b115bef251af4daf18f2f967287b56da2eae2941d5389dc1666bd0160892d769"}, - {file = "pygit2-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd55a6cf7ad6276fb5772e5c60c51fca2d9a5e68ea3e7237847421c10080a68"}, - {file = "pygit2-1.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:33138c256ad0ff084f5d8a82ab7d280f9ed6706ebb000ac82e3d133e2d82b366"}, - {file = "pygit2-1.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f4f507e5cd775f6d5d95ec65761af4cdb33b2f859af15bf10a06d11efd0d3b2"}, - {file = "pygit2-1.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752f844d5379081fae5ef78e3bf6f0f35ae9b11aafc37e5e03e1c3607b196806"}, - {file = "pygit2-1.10.1-cp37-cp37m-win32.whl", hash = "sha256:b31ffdbc87629613ae03a533e01eee79112a12f66faf375fa08934074044a664"}, - {file = "pygit2-1.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:e09386b71ad474f2c2c02b6b251fa904b1145dabfe9095955ab30a789aaf84c0"}, - {file = "pygit2-1.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:564e832e750f889aea3bb3e82674e1c860c9b89a141404530271e1341723a258"}, - {file = "pygit2-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43bb910272866eb822e930dbd0feecc340e0c24934143aab651fa180cc5ebfb0"}, - {file = "pygit2-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e4905cbb87db598b1cb38800ff995c0ba1f58745e2f52af4d54dbc93b9bda8"}, - {file = "pygit2-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f4689ce38cd62a7000d38602ba4d704df5cec708e5d98dadaffcf510f3317"}, - {file = "pygit2-1.10.1-cp38-cp38-win32.whl", hash = "sha256:b67ef30f3c022be1d6da9ef0188f60fc2d20639bff44693ef5653818e887001b"}, - {file = "pygit2-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:dcd849c44bd743d829dbd9dc9d7e13c14cf31a47c22e2e3f9e98fa845a8b8b28"}, - {file = "pygit2-1.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8bb9002924975271d64e8869b44ea97f068e85b5edd03e802e4917b770aaf2d"}, - {file = "pygit2-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:889ca83528c0649afd970da700cc6ed47dc340481f146a39ba5bfbeca1ddd6f8"}, - {file = "pygit2-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5465db21c6fd481ec29aa7afcca9a85b1fdb19b2f2d09a31b4bdba2f1bd0e75"}, - {file = "pygit2-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ceecd5d30583f9db56aadcd7238bb3c76a2934d8a932de47aed77fe3c188e7"}, - {file = "pygit2-1.10.1-cp39-cp39-win32.whl", hash = "sha256:9d6e1270b91e7bf70185bb4c3686e04cca87a385c8a2d5c74eec8770091531be"}, - {file = "pygit2-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:d4251830276018c2346ddccad4b4ce06ed1d983b002a633c4d894b13669052d0"}, - {file = "pygit2-1.10.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7eb2cee54a1cb468b5502493ee4f3ec2f1f82db9c46fab7dacaa37afc4fcde8e"}, - {file = "pygit2-1.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411dc8af5f25c30a0c3d79ee1e22fb892d6fd6ccb54d4c1fb7746e6274e36426"}, - {file = "pygit2-1.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe41da630f4e7cb290dc7e97edf30a59d634426af52a89d4ab5c0fb1ea9ccfe4"}, - {file = "pygit2-1.10.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9da53c6f5c08308450059d7dfb3067d59c45f14bee99743e536c5f9d9823f154"}, - {file = "pygit2-1.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb49f9469a893f75f105cdf2c79254859aaf2fdce1078c38514ca12fe185a759"}, - {file = "pygit2-1.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff838665d6410b5a605f53c1ccd2d2f87ca30de59e89773e7cb5e10211426f90"}, - {file = "pygit2-1.10.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9d23bb613f5692da78c09a79ae40d6ced57b772ae9153aed23a9aa1889a16c85"}, - {file = "pygit2-1.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a3cc867fa6907bfc78d7d1322f3dabd4107b16238205df7e2dec9ee265f0c0"}, - {file = "pygit2-1.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb3eb2f1d437db6e115d5f56d122f2f3737fa2e6063aa42e4d856ca76d785ce6"}, - {file = "pygit2-1.10.1.tar.gz", hash = "sha256:354651bf062c02d1f08041d6fbf1a9b4bf7a93afce65979bdc08bdc65653aa2e"}, + {file = "pygit2-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:263e05ac655a4ce0a1083aaaedfd0a900b8dee2c3bb3ecf4f4e504a404467d1f"}, + {file = "pygit2-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee6b4a0e181c576cdb64b1568bfbff3d1c2cd7e99808f578c8b08875c0f43739"}, + {file = "pygit2-1.11.1-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:d1b5fcaac1f29337f2d1465fa095e2e375b76a06385bda9391cb418c7937fb54"}, + {file = "pygit2-1.11.1-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:96ff745d3199909d06cab5e419a6b953be99992414a08ec4dddb682f395de8f1"}, + {file = "pygit2-1.11.1-cp310-cp310-win32.whl", hash = "sha256:b3c8726f0c9a2b0e04aac37b18027c58c2697b9c021d3458b28bc250b9b6aecf"}, + {file = "pygit2-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:f42409d25bbfc090fd1af1f5f47584d7e0c4212b037a7f86639a02c30420c6ee"}, + {file = "pygit2-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:29f89d96bbb404ca1566418463521039903094fad2f81a76d7083810d2ea3aad"}, + {file = "pygit2-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5c158b9430c5e76ca728b1a214bf21d355af6ac6e2da86ed17775b870b6c6eb"}, + {file = "pygit2-1.11.1-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:6c3434b143e7570ec45cd1a0e344fe7a12e64b99e7155fa38b74f724c8fc243c"}, + {file = "pygit2-1.11.1-cp311-cp311-manylinux_2_24_x86_64.whl", hash = "sha256:550aa503c86ef0061ce64d61c3672b15b500c2b1e4224c405acecfac2211b5d9"}, + {file = "pygit2-1.11.1-cp311-cp311-win32.whl", hash = "sha256:f270f86a0185ca2064e1aa6b8db3bb677b1bf76ee35f48ca5ce28a921fad5632"}, + {file = "pygit2-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:56b9deeab214653805214f05337f5e9552b47bf268c285551f20ea51a6056c3e"}, + {file = "pygit2-1.11.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3c5838e6516abc4384498f4b4c7f88578221596dc2ba8db2320ff2cfebe9787e"}, + {file = "pygit2-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a886aab5aae8d8db572e20b9f56c13cd506775265222ea7f35b2c781e4fa3a5e"}, + {file = "pygit2-1.11.1-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:3be4534180edd53e3e1da93c5b091975566bfdffdc73f21930d79fef096a25d2"}, + {file = "pygit2-1.11.1-cp38-cp38-manylinux_2_24_x86_64.whl", hash = "sha256:4d6209c703764ae0ba57b17038482f3e54f432f80f88ccd490d7f8b70b167db6"}, + {file = "pygit2-1.11.1-cp38-cp38-win32.whl", hash = "sha256:ddb032fa71d4b4a64bf101e37eaa21f5369f20a862b5e34bbc33854a3a35f641"}, + {file = "pygit2-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:f8de0091e5eeaea2004f63f7dcb4540780f2124f68c0bcb670ae0fa9ada8bf66"}, + {file = "pygit2-1.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b44674e53efa9eca36e44f2f3d1a29e53e78649ba13105ae0b037d557f2c076"}, + {file = "pygit2-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0170f31c2efb15f6779689df328c05a8005ecb2b92784a37ff967d713cdafe82"}, + {file = "pygit2-1.11.1-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:960a55ff78f48887a7aa8ece952aad0f52f0a2ba1ad7bddd7064fbbefd85dfbb"}, + {file = "pygit2-1.11.1-cp39-cp39-manylinux_2_24_x86_64.whl", hash = "sha256:df722c90fb54a42fa019dcf8d8f82961c3099c3024f1fda46c53e0886ff8f0f3"}, + {file = "pygit2-1.11.1-cp39-cp39-win32.whl", hash = "sha256:3b091e7fd00dd2a2cd3a6b5e235b6cbfbc1c07f15ee83a5cb3f188e1d6d1bca1"}, + {file = "pygit2-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:da040dc28800831bcbefef0850466739f103bfc769d952bd10c449646d52ce8f"}, + {file = "pygit2-1.11.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:585daa3956f1dc10d08e3459c20b57be42c7f9c0fbde21e797b3a00b5948f061"}, + {file = "pygit2-1.11.1-pp38-pypy38_pp73-manylinux_2_24_aarch64.whl", hash = "sha256:273878adeced2aec7885745b73fffb91a8e67868c105bf881b61008d42497ad6"}, + {file = "pygit2-1.11.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:48cfd72283a08a9226aca115870799ee92898d692699f541a3b3f519805108ec"}, + {file = "pygit2-1.11.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a9ca4cb2481d2df14d23c765facef325f717d9a3966a986b86e88d92eef11929"}, + {file = "pygit2-1.11.1-pp39-pypy39_pp73-manylinux_2_24_aarch64.whl", hash = "sha256:d5f64a424d9123b047458b0107c5dd33559184b56a1f58b10056ea5cbac74360"}, + {file = "pygit2-1.11.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f13e190cc080bde093138e12bcb609500276227e3e8e8bd8765a2fd49ae2efb8"}, + {file = "pygit2-1.11.1.tar.gz", hash = "sha256:793f583fd33620f0ac38376db0f57768ef2922b89b459e75b1ac440377eb64ec"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, @@ -1751,8 +1723,8 @@ pytest = [ {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, - {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, + {file = "pytest-asyncio-0.20.2.tar.gz", hash = "sha256:32a87a9836298a881c0ec637ebcc952cfe23a56436bdc0d09d1511941dd8a812"}, + {file = "pytest_asyncio-0.20.2-py3-none-any.whl", hash = "sha256:07e0abf9e6e6b95894a39f688a4a875d63c2128f76c02d03d16ccbc35bcc0f8a"}, ] pytest-cov = [ {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, @@ -1774,8 +1746,8 @@ python-multipart = [ {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"}, ] redis = [ - {file = "redis-4.3.4-py3-none-any.whl", hash = "sha256:a52d5694c9eb4292770084fa8c863f79367ca19884b329ab574d5cb2036b3e54"}, - {file = "redis-4.3.4.tar.gz", hash = "sha256:ddf27071df4adf3821c4f2ca59d67525c3a82e5f268bed97b813cb4fabf87880"}, + {file = "redis-4.3.5-py3-none-any.whl", hash = "sha256:46652271dc7525cd5a9667e5b0ca983c848c75b2b8f7425403395bb8379dcf25"}, + {file = "redis-4.3.5.tar.gz", hash = "sha256:30c07511627a4c5c4d970e060000772f323174f75e745a26938319817ead7a12"}, ] requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, @@ -1786,8 +1758,8 @@ rfc3986 = [ {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, + {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, + {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1802,47 +1774,47 @@ sortedcontainers = [ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, - {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, + {file = "SQLAlchemy-1.4.44-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:da60b98b0f6f0df9fbf8b72d67d13b73aa8091923a48af79a951d4088530a239"}, + {file = "SQLAlchemy-1.4.44-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:95f4f8d62589755b507218f2e3189475a4c1f5cc9db2aec772071a7dc6cd5726"}, + {file = "SQLAlchemy-1.4.44-cp27-cp27m-win32.whl", hash = "sha256:afd1ac99179d1864a68c06b31263a08ea25a49df94e272712eb2824ef151e294"}, + {file = "SQLAlchemy-1.4.44-cp27-cp27m-win_amd64.whl", hash = "sha256:f8e5443295b218b08bef8eb85d31b214d184b3690d99a33b7bd8e5591e2b0aa1"}, + {file = "SQLAlchemy-1.4.44-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:53f90a2374f60e703c94118d21533765412da8225ba98659de7dd7998641ab17"}, + {file = "SQLAlchemy-1.4.44-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:65a0ad931944fcb0be12a8e0ac322dbd3ecf17c53f088bc10b6da8f0caac287b"}, + {file = "SQLAlchemy-1.4.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b185041a4dc5c685283ea98c2f67bbfa47bb28e4a4f5b27ebf40684e7a9f8"}, + {file = "SQLAlchemy-1.4.44-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:80ead36fb1d676cc019586ffdc21c7e906ce4bf243fe4021e4973dae332b6038"}, + {file = "SQLAlchemy-1.4.44-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68e0cd5d32a32c4395168d42f2fefbb03b817ead3a8f3704b8bd5697c0b26c24"}, + {file = "SQLAlchemy-1.4.44-cp310-cp310-win32.whl", hash = "sha256:ae1ed1ebc407d2f66c6f0ec44ef7d56e3f455859df5494680e2cf89dad8e3ae0"}, + {file = "SQLAlchemy-1.4.44-cp310-cp310-win_amd64.whl", hash = "sha256:6f0ea4d7348feb5e5d0bf317aace92e28398fa9a6e38b7be9ec1f31aad4a8039"}, + {file = "SQLAlchemy-1.4.44-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5e8ed9cde48b76318ab989deeddc48f833d2a6a7b7c393c49b704f67dedf01d"}, + {file = "SQLAlchemy-1.4.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857676d810ca196be73c98eb839125d6fa849bfa3589be06201a6517f9961c"}, + {file = "SQLAlchemy-1.4.44-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c56e6899fa6e767e4be5d106941804a4201c5cb9620a409c0b80448ec70b656"}, + {file = "SQLAlchemy-1.4.44-cp311-cp311-win32.whl", hash = "sha256:c46322354c58d4dc039a2c982d28284330f8919f31206894281f4b595b9d8dbe"}, + {file = "SQLAlchemy-1.4.44-cp311-cp311-win_amd64.whl", hash = "sha256:7313e4acebb9ae88dbde14a8a177467a7625b7449306c03a3f9f309b30e163d0"}, + {file = "SQLAlchemy-1.4.44-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:17aee7bfcef7bf0dea92f10e5dfdd67418dcf6fe0759f520e168b605855c003e"}, + {file = "SQLAlchemy-1.4.44-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9470633395e5f24d6741b4c8a6e905bce405a28cf417bba4ccbaadf3dab0111d"}, + {file = "SQLAlchemy-1.4.44-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:393f51a09778e8984d735b59a810731394308b4038acdb1635397c2865dae2b6"}, + {file = "SQLAlchemy-1.4.44-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7e3b9e01fdbe1ce3a165cc7e1ff52b24813ee79c6df6dee0d1e13888a97817e"}, + {file = "SQLAlchemy-1.4.44-cp36-cp36m-win32.whl", hash = "sha256:6a06c2506c41926d2769f7968759995f2505e31c5b5a0821e43ca5a3ddb0e8ae"}, + {file = "SQLAlchemy-1.4.44-cp36-cp36m-win_amd64.whl", hash = "sha256:3ca21b35b714ce36f4b8d1ee8d15f149db8eb43a472cf71600bf18dae32286e7"}, + {file = "SQLAlchemy-1.4.44-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:3cbdbed8cdcae0f83640a9c44fa02b45a6c61e149c58d45a63c9581aba62850f"}, + {file = "SQLAlchemy-1.4.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22208c1982f1fe2ae82e5e4c3d4a6f2445a7a0d65fb7983a3d7cbbe3983f5a4"}, + {file = "SQLAlchemy-1.4.44-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d3b9ac11f36ab9a726097fba7c7f6384f0129aedb017f1d4d1d4fce9052a1320"}, + {file = "SQLAlchemy-1.4.44-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d654870a66027af3a26df1372cf7f002e161c6768ebe4c9c6fdc0da331cb5173"}, + {file = "SQLAlchemy-1.4.44-cp37-cp37m-win32.whl", hash = "sha256:0be9b479c5806cece01f1581726573a8d6515f8404e082c375b922c45cfc2a7b"}, + {file = "SQLAlchemy-1.4.44-cp37-cp37m-win_amd64.whl", hash = "sha256:3eba07f740488c3a125f17c092a81eeae24a6c7ec32ac9dbc52bf7afaf0c4f16"}, + {file = "SQLAlchemy-1.4.44-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ad5f966623905ee33694680dda1b735544c99c7638f216045d21546d3d8c6f5b"}, + {file = "SQLAlchemy-1.4.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f68eab46649504eb95be36ca529aea16cd199f080726c28cbdbcbf23d20b2a2"}, + {file = "SQLAlchemy-1.4.44-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:21f3df74a0ab39e1255e94613556e33c1dc3b454059fe0b365ec3bbb9ed82e4a"}, + {file = "SQLAlchemy-1.4.44-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8080bc51a775627865e0f1dbfc0040ff4ace685f187f6036837e1727ba2ed10"}, + {file = "SQLAlchemy-1.4.44-cp38-cp38-win32.whl", hash = "sha256:b6a337a2643a41476fb6262059b8740f4b9a2ec29bf00ffb18c18c080f6e0aed"}, + {file = "SQLAlchemy-1.4.44-cp38-cp38-win_amd64.whl", hash = "sha256:b737fbeb2f78926d1f59964feb287bbbd050e7904766f87c8ce5cfb86e6d840c"}, + {file = "SQLAlchemy-1.4.44-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c9aa372b295a36771cffc226b6517df3011a7d146ac22d19fa6a75f1cdf9d7e6"}, + {file = "SQLAlchemy-1.4.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237067ba0ef45a518b64606e1807f7229969ad568288b110ed5f0ca714a3ed3a"}, + {file = "SQLAlchemy-1.4.44-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6d7e1b28342b45f19e3dea7873a9479e4a57e15095a575afca902e517fb89652"}, + {file = "SQLAlchemy-1.4.44-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c0093678001f5d79f2dcbf3104c54d6c89e41ab50d619494c503a4d3f1aef2"}, + {file = "SQLAlchemy-1.4.44-cp39-cp39-win32.whl", hash = "sha256:7cf7c7adbf4417e3f46fc5a2dbf8395a5a69698217337086888f79700a12e93a"}, + {file = "SQLAlchemy-1.4.44-cp39-cp39-win_amd64.whl", hash = "sha256:d3b6d4588994da73567bb00af9d7224a16c8027865a8aab53ae9be83f9b7cbd1"}, + {file = "SQLAlchemy-1.4.44.tar.gz", hash = "sha256:2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90"}, ] srcinfo = [ {file = "srcinfo-0.0.8-py3-none-any.whl", hash = "sha256:0922ee4302b927d7ddea74c47e539b226a0a7738dc89f95b66404a28d07f3f6b"}, @@ -1873,8 +1845,8 @@ urllib3 = [ {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] uvicorn = [ - {file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"}, - {file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"}, + {file = "uvicorn-0.20.0-py3-none-any.whl", hash = "sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd"}, + {file = "uvicorn-0.20.0.tar.gz", hash = "sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8"}, ] webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, @@ -1884,72 +1856,6 @@ werkzeug = [ {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, ] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] wsproto = [ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, diff --git a/pyproject.toml b/pyproject.toml index 7fc0db47..e977ad4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,16 +62,16 @@ asgiref = "^3.4.1" bcrypt = "^4.0.0" bleach = "^5.0.0" email-validator = "^1.3.0" -fakeredis = "^1.10.0" +fakeredis = "^2.0.0" feedgen = "^0.9.0" -httpx = "^0.23.0" +httpx = "^0.23.1" itsdangerous = "^2.0.1" lxml = "^4.6.3" -orjson = "^3.8.1" +orjson = "^3.8.2" protobuf = "^4.21.9" -pygit2 = "^1.7.0" +pygit2 = "^1.11.1" python-multipart = "^0.0.5" -redis = "^4.0.0" +redis = "^4.3.5" requests = "^2.28.1" paginate = "^0.5.6" @@ -85,7 +85,7 @@ Werkzeug = "^2.0.2" SQLAlchemy = "^1.4.26" # ASGI -uvicorn = "^0.19.0" +uvicorn = "^0.20.0" gunicorn = "^20.1.0" Hypercorn = "^0.14.0" prometheus-fastapi-instrumentator = "^5.7.1" @@ -99,7 +99,7 @@ srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] coverage = "^6.0.2" pytest = "^7.2.0" -pytest-asyncio = "^0.20.1" +pytest-asyncio = "^0.20.2" pytest-cov = "^4.0.0" pytest-tap = "^3.2" From 512ba0238914c28a4e445d04ae08b714bd14558c Mon Sep 17 00:00:00 2001 From: renovate Date: Wed, 23 Nov 2022 00:25:31 +0000 Subject: [PATCH 161/415] fix(deps): update dependency fastapi to ^0.87.0 --- poetry.lock | 80 +++++++++++++++++++------------------------------- pyproject.toml | 2 +- 2 files changed, 31 insertions(+), 51 deletions(-) diff --git a/poetry.lock b/poetry.lock index 22cbd3fd..12770f64 100644 --- a/poetry.lock +++ b/poetry.lock @@ -69,7 +69,7 @@ python-versions = ">=3.5" dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "authlib" @@ -138,7 +138,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode-backport = ["unicodedata2"] +unicode_backport = ["unicodedata2"] [[package]] name = "click" @@ -260,7 +260,7 @@ lua = ["lupa (>=1.13,<2.0)"] [[package]] name = "fastapi" -version = "0.85.2" +version = "0.87.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false @@ -268,13 +268,13 @@ python-versions = ">=3.7" [package.dependencies] pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" -starlette = "0.20.4" +starlette = "0.21.0" [package.extras] -all = ["email-validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] -dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] +all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.114)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "pytest-cov (>=2.12.0,<5.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "coverage[toml] (>=6.5.0,<7.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.114)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "feedgen" @@ -309,7 +309,7 @@ optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["docutils (<0.18)", "sphinx"] test = ["faulthandler", "objgraph", "psutil"] [[package]] @@ -320,9 +320,6 @@ category = "main" optional = false python-versions = ">=3.5" -[package.dependencies] -setuptools = ">=3.0" - [package.extras] eventlet = ["eventlet (>=0.24.1)"] gevent = ["gevent (>=1.4.0)"] @@ -411,7 +408,7 @@ toml = "*" wsproto = ">=0.14.0" [package.extras] -docs = ["pydata_sphinx_theme"] +docs = ["pydata-sphinx-theme"] h3 = ["aioquic (>=0.9.0,<1.0)"] trio = ["trio (>=0.11.0)"] uvloop = ["uvloop"] @@ -489,7 +486,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] +htmlsoup = ["beautifulsoup4"] source = ["Cython (>=0.29.7)"] [[package]] @@ -504,7 +501,7 @@ python-versions = ">=3.7" MarkupSafe = ">=0.9.2" [package.extras] -babel = ["Babel"] +babel = ["babel"] lingua = ["lingua"] testing = ["pytest"] @@ -817,7 +814,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rfc3986" @@ -833,19 +830,6 @@ idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] idna2008 = ["idna"] -[[package]] -name = "setuptools" -version = "65.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -886,21 +870,21 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] +mssql_pymssql = ["pymssql"] +mssql_pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] +mysql_connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql_psycopg2binary = ["psycopg2-binary"] +postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "srcinfo" @@ -915,7 +899,7 @@ parse = "*" [[package]] name = "starlette" -version = "0.20.4" +version = "0.21.0" description = "The little ASGI library that shines." category = "main" optional = false @@ -926,10 +910,10 @@ anyio = ">=3.4.0,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] [[package]] -name = "tap-py" +name = "tap.py" version = "3.1" description = "Test Anything Protocol (TAP) tools" category = "dev" @@ -1039,7 +1023,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.11" -content-hash = "b178f1fcbba93d9cbc8dd23193b25afd5e1ba971196757abf098a1dfa2666cba" +content-hash = "02d70de5b58cf84a7b9015fc1d1a598bdb139b32f7239846183eb924e336ce86" [metadata.files] aiofiles = [ @@ -1280,8 +1264,8 @@ fakeredis = [ {file = "fakeredis-2.0.0.tar.gz", hash = "sha256:6d1dc2417921b7ce56a80877afa390d6335a3154146f201a86e3a14417bdc79e"}, ] fastapi = [ - {file = "fastapi-0.85.2-py3-none-any.whl", hash = "sha256:6292db0edd4a11f0d938d6033ccec5f706e9d476958bf33b119e8ddb4e524bde"}, - {file = "fastapi-0.85.2.tar.gz", hash = "sha256:3e10ea0992c700e0b17b6de8c2092d7b9cd763ce92c49ee8d4be10fee3b2f367"}, + {file = "fastapi-0.87.0-py3-none-any.whl", hash = "sha256:254453a2e22f64e2a1b4e1d8baf67d239e55b6c8165c079d25746a5220c81bb4"}, + {file = "fastapi-0.87.0.tar.gz", hash = "sha256:07032e53df9a57165047b4f38731c38bdcc3be5493220471015e2b4b51b486a4"}, ] feedgen = [ {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, @@ -1757,10 +1741,6 @@ rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] -setuptools = [ - {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, - {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, -] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -1821,10 +1801,10 @@ srcinfo = [ {file = "srcinfo-0.0.8.tar.gz", hash = "sha256:5ac610cf8b15d4b0a0374bd1f7ad301675c2938f0414addf3ef7d7e3fcaf5c65"}, ] starlette = [ - {file = "starlette-0.20.4-py3-none-any.whl", hash = "sha256:c0414d5a56297d37f3db96a84034d61ce29889b9eaccf65eb98a0b39441fcaa3"}, - {file = "starlette-0.20.4.tar.gz", hash = "sha256:42fcf3122f998fefce3e2c5ad7e5edbf0f02cf685d646a83a08d404726af5084"}, + {file = "starlette-0.21.0-py3-none-any.whl", hash = "sha256:0efc058261bbcddeca93cad577efd36d0c8a317e44376bcfc0e097a2b3dc24a7"}, + {file = "starlette-0.21.0.tar.gz", hash = "sha256:b1b52305ee8f7cfc48cde383496f7c11ab897cd7112b33d998b1317dc8ef9027"}, ] -tap-py = [ +"tap.py" = [ {file = "tap.py-3.1-py3-none-any.whl", hash = "sha256:928c852f3361707b796c93730cc5402c6378660b161114461066acf53d65bf5d"}, {file = "tap.py-3.1.tar.gz", hash = "sha256:3c0cd45212ad5a25b35445964e2517efa000a118a1bfc3437dae828892eaf1e1"}, ] diff --git a/pyproject.toml b/pyproject.toml index e977ad4e..762a52c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,7 +93,7 @@ pytest-xdist = "^3.0.2" filelock = "^3.3.2" posix-ipc = "^1.0.5" pyalpm = "^0.10.6" -fastapi = "^0.85.1" +fastapi = "^0.87.0" srcinfo = "^0.0.8" [tool.poetry.dev-dependencies] From 1216399d53b3f3163eccc2ea0aacaeaf23562373 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Thu, 24 Nov 2022 22:23:37 +0100 Subject: [PATCH 162/415] fix(test): FastAPI 0.87.0 - error fixes FastAPI 0.87.0 switched to the httpx library for their TestClient * allow_redirects is deprecated and replaced by follow_redirects Signed-off-by: moson-mo --- test/test_accounts_routes.py | 78 ++++++--------- test/test_auth_routes.py | 28 +++--- test/test_homepage.py | 9 +- test/test_packages_routes.py | 45 +++++---- test/test_pkgbase_routes.py | 163 +++++++++++++++++-------------- test/test_requests.py | 31 +++--- test/test_routes.py | 8 +- test/test_trusted_user_routes.py | 82 +++++++--------- 8 files changed, 218 insertions(+), 226 deletions(-) diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index f44fd44e..44226627 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -70,6 +70,9 @@ def client() -> TestClient: # Necessary for forged login CSRF protection on the login route. Set here # instead of only on the necessary requests for convenience. client.headers.update(TEST_REFERER) + + # disable redirects for our tests + client.follow_redirects = False yield client @@ -104,9 +107,7 @@ def test_get_passreset_authed_redirects(client: TestClient, user: User): assert sid is not None with client as request: - response = request.get( - "/passreset", cookies={"AURSID": sid}, allow_redirects=False - ) + response = request.get("/passreset", cookies={"AURSID": sid}) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -140,7 +141,7 @@ def test_get_passreset_translation(client: TestClient): def test_get_passreset_with_resetkey(client: TestClient): with client as request: - response = request.get("/passreset", data={"resetkey": "abcd"}) + response = request.get("/passreset", params={"resetkey": "abcd"}) assert response.status_code == int(HTTPStatus.OK) @@ -153,7 +154,6 @@ def test_post_passreset_authed_redirects(client: TestClient, user: User): "/passreset", cookies={"AURSID": sid}, data={"user": "blah"}, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -323,7 +323,7 @@ def post_register(request, **kwargs): for k, v in args.items(): data[k] = v - return request.post("/register", data=data, allow_redirects=False) + return request.post("/register", data=data) def test_post_register(client: TestClient): @@ -737,7 +737,7 @@ def test_get_account_edit_unauthorized(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: # Try to edit `test2` while authenticated as `test`. - response = request.get(endpoint, cookies={"AURSID": sid}, allow_redirects=False) + response = request.get(endpoint, cookies={"AURSID": sid}) assert response.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/account/{user2.Username}" @@ -755,7 +755,6 @@ def test_post_account_edit(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -841,9 +840,7 @@ def test_post_account_edit_dev(client: TestClient, tu_user: User): endpoint = f"/account/{tu_user.Username}/edit" with client as request: - response = request.post( - endpoint, cookies={"AURSID": sid}, data=post_data, allow_redirects=False - ) + response = request.post(endpoint, cookies={"AURSID": sid}, data=post_data) assert response.status_code == int(HTTPStatus.OK) expected = "The account, test, " @@ -867,7 +864,6 @@ def test_post_account_edit_language(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -897,7 +893,6 @@ def test_post_account_edit_timezone(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -914,7 +909,6 @@ def test_post_account_edit_error_missing_password(client: TestClient, user: User "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -934,7 +928,6 @@ def test_post_account_edit_error_invalid_password(client: TestClient, user: User "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1039,9 +1032,7 @@ def test_post_account_edit_error_unauthorized(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: # Attempt to edit 'test2' while logged in as 'test'. - response = request.post( - endpoint, cookies={"AURSID": sid}, data=post_data, allow_redirects=False - ) + response = request.post(endpoint, cookies={"AURSID": sid}, data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/account/{user2.Username}" @@ -1064,7 +1055,6 @@ def test_post_account_edit_ssh_pub_key(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -1077,7 +1067,6 @@ def test_post_account_edit_ssh_pub_key(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -1099,7 +1088,6 @@ def test_post_account_edit_missing_ssh_pubkey(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -1116,7 +1104,6 @@ def test_post_account_edit_missing_ssh_pubkey(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -1133,9 +1120,7 @@ def test_post_account_edit_invalid_ssh_pubkey(client: TestClient, user: User): } cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.post( - "/account/test/edit", data=data, cookies=cookies, allow_redirects=False - ) + response = request.post("/account/test/edit", data=data, cookies=cookies) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1157,7 +1142,6 @@ def test_post_account_edit_password(client: TestClient, user: User): "/account/test/edit", cookies={"AURSID": sid}, data=post_data, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -1197,7 +1181,7 @@ def test_post_account_edit_other_user_as_user(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/account/{user2.Username}" @@ -1208,7 +1192,7 @@ def test_post_account_edit_self_type_as_tu(client: TestClient, tu_user: User): # We cannot see the Account Type field on our own edit page. with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) assert "id_type" in resp.text @@ -1239,7 +1223,7 @@ def test_post_account_edit_other_user_type_as_tu( # As a TU, we can see the Account Type field for other users. with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) assert "id_type" in resp.text @@ -1277,19 +1261,20 @@ def test_post_account_edit_other_user_suspend_as_tu(client: TestClient, tu_user: # apart from `tu_user`s during our testing. user_client = TestClient(app=app) user_client.headers.update(TEST_REFERER) + user_client.follow_redirects = False # Test that `user` can view their account edit page while logged in. user_cookies = {"AURSID": sid} with client as request: endpoint = f"/account/{user.Username}/edit" - resp = request.get(endpoint, cookies=user_cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=user_cookies) assert resp.status_code == HTTPStatus.OK cookies = {"AURSID": tu_user.login(Request(), "testPassword")} assert cookies is not None # This is useless, we create the dict here ^ # As a TU, we can see the Account for other users. with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) # As a TU, we can modify other user's account types. data = { @@ -1299,12 +1284,13 @@ def test_post_account_edit_other_user_suspend_as_tu(client: TestClient, tu_user: "passwd": "testPassword", } with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) # Test that `user` no longer has a session. with user_client as request: - resp = request.get(endpoint, cookies=user_cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=user_cookies) assert resp.status_code == HTTPStatus.SEE_OTHER # Since user is now suspended, they should not be able to login. @@ -1341,9 +1327,7 @@ def test_get_account(client: TestClient, user: User): sid = user.login(request, "testPassword") with client as request: - response = request.get( - "/account/test", cookies={"AURSID": sid}, allow_redirects=False - ) + response = request.get("/account/test", cookies={"AURSID": sid}) assert response.status_code == int(HTTPStatus.OK) @@ -1353,16 +1337,14 @@ def test_get_account_not_found(client: TestClient, user: User): sid = user.login(request, "testPassword") with client as request: - response = request.get( - "/account/not_found", cookies={"AURSID": sid}, allow_redirects=False - ) + response = request.get("/account/not_found", cookies={"AURSID": sid}) assert response.status_code == int(HTTPStatus.NOT_FOUND) def test_get_account_unauthenticated(client: TestClient, user: User): with client as request: - response = request.get("/account/test", allow_redirects=False) + response = request.get("/account/test") assert response.status_code == int(HTTPStatus.UNAUTHORIZED) content = response.content.decode() @@ -1832,7 +1814,7 @@ def test_get_terms_of_service(client: TestClient, user: User): ) with client as request: - response = request.get("/tos", allow_redirects=False) + response = request.get("/tos") assert response.status_code == int(HTTPStatus.SEE_OTHER) request = Request() @@ -1842,12 +1824,12 @@ def test_get_terms_of_service(client: TestClient, user: User): # First of all, let's test that we get redirected to /tos # when attempting to browse authenticated without accepting terms. with client as request: - response = request.get("/", cookies=cookies, allow_redirects=False) + response = request.get("/", cookies=cookies) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tos" with client as request: - response = request.get("/tos", cookies=cookies, allow_redirects=False) + response = request.get("/tos", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) with db.begin(): @@ -1856,7 +1838,7 @@ def test_get_terms_of_service(client: TestClient, user: User): ) with client as request: - response = request.get("/tos", cookies=cookies, allow_redirects=False) + response = request.get("/tos", cookies=cookies) # We accepted the term, there's nothing left to accept. assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -1865,7 +1847,7 @@ def test_get_terms_of_service(client: TestClient, user: User): term.Revision = 2 with client as request: - response = request.get("/tos", cookies=cookies, allow_redirects=False) + response = request.get("/tos", cookies=cookies) # This time, we have a modified term Revision that hasn't # yet been agreed to via AcceptedTerm update. assert response.status_code == int(HTTPStatus.OK) @@ -1874,7 +1856,7 @@ def test_get_terms_of_service(client: TestClient, user: User): accepted_term.Revision = term.Revision with client as request: - response = request.get("/tos", cookies=cookies, allow_redirects=False) + response = request.get("/tos", cookies=cookies) # We updated the term revision, there's nothing left to accept. assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -1931,7 +1913,7 @@ def test_post_terms_of_service(client: TestClient, user: User): # Now, see that GET redirects us to / with no terms left to accept. with client as request: - response = request.get("/tos", cookies=cookies, allow_redirects=False) + response = request.get("/tos", cookies=cookies) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -1946,7 +1928,7 @@ def test_account_comments_not_found(client: TestClient, user: User): def test_accounts_unauthorized(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get("/accounts", cookies=cookies, allow_redirects=False) + resp = request.get("/accounts", cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" diff --git a/test/test_auth_routes.py b/test/test_auth_routes.py index 87ad86f6..150625cd 100644 --- a/test/test_auth_routes.py +++ b/test/test_auth_routes.py @@ -33,6 +33,9 @@ def client() -> TestClient: # Necessary for forged login CSRF protection on the login route. Set here # instead of only on the necessary requests for convenience. client.headers.update(TEST_REFERER) + + # disable redirects for our tests + client.follow_redirects = False yield client @@ -58,21 +61,20 @@ def test_login_logout(client: TestClient, user: User): response = request.get("/login") assert response.status_code == int(HTTPStatus.OK) - response = request.post("/login", data=post_data, allow_redirects=False) + response = request.post("/login", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) # Simulate following the redirect location from above's response. response = request.get(response.headers.get("location")) assert response.status_code == int(HTTPStatus.OK) - response = request.post("/logout", data=post_data, allow_redirects=False) + response = request.post("/logout", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) response = request.post( "/logout", data=post_data, cookies={"AURSID": response.cookies.get("AURSID")}, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -94,7 +96,7 @@ def test_login_email(client: TestClient, user: user): post_data = {"user": user.Email, "passwd": "testPassword", "next": "/"} with client as request: - resp = request.post("/login", data=post_data, allow_redirects=False) + resp = request.post("/login", data=post_data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert "AURSID" in resp.cookies @@ -119,14 +121,14 @@ def test_insecure_login(getboolean: mock.Mock, client: TestClient, user: User): # Perform a login request with the data matching our user. with client as request: - response = request.post("/login", data=post_data, allow_redirects=False) + response = request.post("/login", data=post_data) # Make sure we got the expected status out of it. assert response.status_code == int(HTTPStatus.SEE_OTHER) # Let's check what we got in terms of cookies for AURSID. # Make sure that a secure cookie got passed to us. - cookie = next(c for c in response.cookies if c.name == "AURSID") + cookie = next(c for c in response.cookies.jar if c.name == "AURSID") assert cookie.secure is False assert cookie.has_nonstandard_attr("HttpOnly") is False assert cookie.has_nonstandard_attr("SameSite") is True @@ -160,14 +162,14 @@ def test_secure_login(getboolean: mock.Mock, client: TestClient, user: User): # Perform a login request with the data matching our user. with client as request: - response = request.post("/login", data=post_data, allow_redirects=False) + response = request.post("/login", data=post_data) # Make sure we got the expected status out of it. assert response.status_code == int(HTTPStatus.SEE_OTHER) # Let's check what we got in terms of cookies for AURSID. # Make sure that a secure cookie got passed to us. - cookie = next(c for c in response.cookies if c.name == "AURSID") + cookie = next(c for c in response.cookies.jar if c.name == "AURSID") assert cookie.secure is True assert cookie.has_nonstandard_attr("HttpOnly") is True assert cookie.has_nonstandard_attr("SameSite") is True @@ -186,7 +188,7 @@ def test_authenticated_login(client: TestClient, user: User): with client as request: # Try to login. - response = request.post("/login", data=post_data, allow_redirects=False) + response = request.post("/login", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -194,9 +196,7 @@ def test_authenticated_login(client: TestClient, user: User): # when requesting GET /login as an authenticated user. # Now, let's verify that we receive 403 Forbidden when we # try to get /login as an authenticated user. - response = request.get( - "/login", cookies=response.cookies, allow_redirects=False - ) + response = request.get("/login", cookies=response.cookies) assert response.status_code == int(HTTPStatus.OK) assert "Logged-in as: test" in response.text @@ -205,7 +205,7 @@ def test_unauthenticated_logout_unauthorized(client: TestClient): with client as request: # Alright, let's verify that attempting to /logout when not # authenticated returns 401 Unauthorized. - response = request.post("/logout", allow_redirects=False) + response = request.post("/logout") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location").startswith("/login") @@ -232,7 +232,7 @@ def test_login_remember_me(client: TestClient, user: User): } with client as request: - response = request.post("/login", data=post_data, allow_redirects=False) + response = request.post("/login", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert "AURSID" in response.cookies diff --git a/test/test_homepage.py b/test/test_homepage.py index 521f71c4..1aad30f7 100644 --- a/test/test_homepage.py +++ b/test/test_homepage.py @@ -253,7 +253,8 @@ def test_homepage_dashboard_requests(redis, packages, user): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.get("/", cookies=cookies) + request.cookies = cookies + response = request.get("/") assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -270,7 +271,8 @@ def test_homepage_dashboard_flagged_packages(redis, packages, user): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.get("/", cookies=cookies) + request.cookies = cookies + response = request.get("/") assert response.status_code == int(HTTPStatus.OK) # Check to see that the package showed up in the Flagged Packages table. @@ -293,7 +295,8 @@ def test_homepage_dashboard_flagged(user: User, user2: User, package: Package): # flagged co-maintained packages. comaint_cookies = {"AURSID": user2.login(Request(), "testPassword")} with client as request: - resp = request.get("/", cookies=comaint_cookies) + request.cookies = comaint_cookies + resp = request.get("/") assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index 3b717783..29872cb8 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -65,7 +65,11 @@ def setup(db_test): @pytest.fixture def client() -> TestClient: """Yield a FastAPI TestClient.""" - yield TestClient(app=asgi.app) + client = TestClient(app=asgi.app) + + # disable redirects for our tests + client.follow_redirects = False + yield client def create_user(username: str) -> User: @@ -1142,7 +1146,6 @@ def test_packages_post_unknown_action(client: TestClient, user: User, package: P "/packages", data={"action": "unknown"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1159,7 +1162,6 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): "/packages", data={"action": "stub"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1180,7 +1182,6 @@ def test_packages_post(client: TestClient, user: User, package: Package): "/packages", data={"action": "stub"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.OK) @@ -1203,7 +1204,8 @@ def test_packages_post_unflag( # Don't supply any packages. post_data = {"action": "unflag", "IDs": []} with client as request: - resp = request.post("/packages", data=post_data, cookies=cookies) + request.cookies = cookies + resp = request.post("/packages", data=post_data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to unflag." @@ -1212,7 +1214,8 @@ def test_packages_post_unflag( # Unflag the package as `user`. post_data = {"action": "unflag", "IDs": [package.ID]} with client as request: - resp = request.post("/packages", data=post_data, cookies=cookies) + request.cookies = cookies + resp = request.post("/packages", data=post_data) assert resp.status_code == int(HTTPStatus.OK) assert package.PackageBase.Flagger is None successes = get_successes(resp.text) @@ -1229,7 +1232,8 @@ def test_packages_post_unflag( maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} post_data = {"action": "unflag", "IDs": [package.ID]} with client as request: - resp = request.post("/packages", data=post_data, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.post("/packages", data=post_data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to unflag." @@ -1387,7 +1391,8 @@ def test_packages_post_disown_as_maintainer( # Try to run the disown action with no IDs; get an error. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "disown"}, cookies=cookies) + request.cookies = cookies + resp = request.post("/packages", data={"action": "disown"}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to disown." @@ -1396,9 +1401,8 @@ def test_packages_post_disown_as_maintainer( # Try to disown `package` without giving the confirm argument. with client as request: - resp = request.post( - "/packages", data={"action": "disown", "IDs": [package.ID]}, cookies=cookies - ) + request.cookies = cookies + resp = request.post("/packages", data={"action": "disown", "IDs": [package.ID]}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) assert package.PackageBase.Maintainer is not None errors = get_errors(resp.text) @@ -1411,10 +1415,10 @@ def test_packages_post_disown_as_maintainer( # Now, try to disown `package` without credentials (as `user`). user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = user_cookies resp = request.post( "/packages", data={"action": "disown", "IDs": [package.ID], "confirm": True}, - cookies=user_cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) assert package.PackageBase.Maintainer is not None @@ -1424,10 +1428,10 @@ def test_packages_post_disown_as_maintainer( # Now, let's really disown `package` as `maintainer`. with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "disown", "IDs": [package.ID], "confirm": True}, - cookies=cookies, ) assert package.PackageBase.Maintainer is None @@ -1463,9 +1467,8 @@ def test_packages_post_delete( # First, let's try to use the delete action with no packages IDs. user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post( - "/packages", data={"action": "delete"}, cookies=user_cookies - ) + request.cookies = user_cookies + resp = request.post("/packages", data={"action": "delete"}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to delete." @@ -1473,10 +1476,10 @@ def test_packages_post_delete( # Now, let's try to delete real packages without supplying "confirm". with client as request: + request.cookies = user_cookies resp = request.post( "/packages", data={"action": "delete", "IDs": [package.ID]}, - cookies=user_cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1488,10 +1491,10 @@ def test_packages_post_delete( # And again, with everything, but `user` doesn't have permissions. with client as request: + request.cookies = user_cookies resp = request.post( "/packages", data={"action": "delete", "IDs": [package.ID], "confirm": True}, - cookies=user_cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1503,10 +1506,10 @@ def test_packages_post_delete( # an invalid package ID. tu_cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: + request.cookies = tu_cookies resp = request.post( "/packages", data={"action": "delete", "IDs": [0], "confirm": True}, - cookies=tu_cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1516,10 +1519,10 @@ def test_packages_post_delete( # Whoo. Now, let's finally make a valid request as `tu_user` # to delete `package`. with client as request: + request.cookies = tu_cookies resp = request.post( "/packages", data={"action": "delete", "IDs": [package.ID], "confirm": True}, - cookies=tu_cookies, ) assert resp.status_code == int(HTTPStatus.OK) successes = get_successes(resp.text) @@ -1541,7 +1544,7 @@ def test_account_comments_unauthorized(client: TestClient, user: User): leverage existing fixtures.""" endpoint = f"/account/{user.Username}/comments" with client as request: - resp = request.get(endpoint, allow_redirects=False) + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location").startswith("/login") diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index 18c11626..dd92d72d 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -59,7 +59,11 @@ def setup(db_test): @pytest.fixture def client() -> TestClient: """Yield a FastAPI TestClient.""" - yield TestClient(app=asgi.app) + client = TestClient(app=asgi.app) + + # disable redirects for our tests + client.follow_redirects = False + yield client def create_user(username: str) -> User: @@ -245,7 +249,7 @@ def test_pkgbase_not_found(client: TestClient): def test_pkgbase_redirect(client: TestClient, package: Package): with client as request: - resp = request.get(f"/pkgbase/{package.Name}", allow_redirects=False) + resp = request.get(f"/pkgbase/{package.Name}") assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/packages/{package.Name}" @@ -256,7 +260,7 @@ def test_pkgbase(client: TestClient, package: Package): expected = [package.Name, second.Name] with client as request: - resp = request.get(f"/pkgbase/{package.Name}", allow_redirects=False) + resp = request.get(f"/pkgbase/{package.Name}") assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -287,7 +291,7 @@ def test_pkgbase_maintainer( ) with client as request: - resp = request.get(f"/pkgbase/{package.Name}") + resp = request.get(f"/pkgbase/{package.Name}", follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -308,7 +312,7 @@ def test_pkgbase_voters(client: TestClient, tu_user: User, package: Package): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) # We should've gotten one link to the voter, tu_user. @@ -327,7 +331,7 @@ def test_pkgbase_voters_unauthorized(client: TestClient, user: User, package: Pa db.create(PackageVote, User=user, PackageBase=pkgbase, VoteTS=now) with client as request: - resp = request.get(endpoint, allow_redirects=False) + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -420,7 +424,7 @@ def test_pkgbase_comments( assert resp.headers.get("location")[:prefix_len] == expected_prefix with client as request: - resp = request.get(resp.headers.get("location")) + resp = request.get(resp.headers.get("location"), follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -461,7 +465,7 @@ def test_pkgbase_comments( assert resp.status_code == int(HTTPStatus.SEE_OTHER) with client as request: - resp = request.get(resp.headers.get("location")) + resp = request.get(resp.headers.get("location"), follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -527,7 +531,8 @@ def test_pkgbase_comment_delete( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment.ID}/delete" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/pkgbase/{pkgbasename}" @@ -537,12 +542,14 @@ def test_pkgbase_comment_delete( maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbasename}/comments/{comment.ID}/undelete" with client as request: - resp = request.post(endpoint, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) # And move on to undeleting it. with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -670,7 +677,7 @@ def test_pkgbase_comaintainers_not_found(client: TestClient, maintainer: User): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = "/pkgbase/fake/comaintainers" with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -678,7 +685,7 @@ def test_pkgbase_comaintainers_post_not_found(client: TestClient, maintainer: Us cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = "/pkgbase/fake/comaintainers" with client as request: - resp = request.post(endpoint, cookies=cookies, allow_redirects=False) + resp = request.post(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -689,7 +696,7 @@ def test_pkgbase_comaintainers_unauthorized( endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -701,7 +708,7 @@ def test_pkgbase_comaintainers_post_unauthorized( endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=cookies, allow_redirects=False) + resp = request.post(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -713,9 +720,7 @@ def test_pkgbase_comaintainers_post_invalid_user( endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post( - endpoint, data={"users": "\nfake\n"}, cookies=cookies, allow_redirects=False - ) + resp = request.post(endpoint, data={"users": "\nfake\n"}, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -737,7 +742,6 @@ def test_pkgbase_comaintainers( endpoint, data={"users": f"\n{user.Username}\n{maintainer.Username}\n"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -748,7 +752,6 @@ def test_pkgbase_comaintainers( endpoint, data={"users": f"\n{user.Username}\n{maintainer.Username}\n"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -757,7 +760,7 @@ def test_pkgbase_comaintainers( # let's perform a GET request to make sure that the backend produces # the user we added in the users textarea. with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -766,14 +769,12 @@ def test_pkgbase_comaintainers( # Finish off by removing all the comaintainers. with client as request: - resp = request.post( - endpoint, data={"users": str()}, cookies=cookies, allow_redirects=False - ) + resp = request.post(endpoint, data={"users": str()}, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -856,7 +857,6 @@ def test_pkgbase_request_post_merge_not_found_error( "comments": "We want to merge this.", }, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.OK) @@ -880,7 +880,6 @@ def test_pkgbase_request_post_merge_no_merge_into_error( "comments": "We want to merge this.", }, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.OK) @@ -904,7 +903,6 @@ def test_pkgbase_request_post_merge_self_error( "comments": "We want to merge this.", }, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.OK) @@ -927,26 +925,28 @@ def test_pkgbase_flag( # Get the flag page. with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) # Now, let's check the /pkgbase/{name}/flag-comment route. flag_comment_endpoint = f"/pkgbase/{pkgbase.Name}/flag-comment" with client as request: - resp = request.get( - flag_comment_endpoint, cookies=cookies, allow_redirects=False - ) + request.cookies = cookies + resp = request.get(flag_comment_endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" # Try to flag it without a comment. with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) # Flag it with a valid comment. with client as request: - resp = request.post(endpoint, data={"comments": "Test"}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"comments": "Test"}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgbase.Flagger == user assert pkgbase.FlaggerComment == "Test" @@ -957,15 +957,15 @@ def test_pkgbase_flag( # Now, let's check the /pkgbase/{name}/flag-comment route. flag_comment_endpoint = f"/pkgbase/{pkgbase.Name}/flag-comment" with client as request: - resp = request.get( - flag_comment_endpoint, cookies=cookies, allow_redirects=False - ) + request.cookies = cookies + resp = request.get(flag_comment_endpoint) assert resp.status_code == int(HTTPStatus.OK) # Now try to perform a get; we should be redirected because # it's already flagged. with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) with db.begin(): @@ -982,27 +982,29 @@ def test_pkgbase_flag( user2_cookies = {"AURSID": user2.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbase.Name}/unflag" with client as request: - resp = request.post(endpoint, cookies=user2_cookies) + request.cookies = user2_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgbase.Flagger == user # Now, test that the 'maintainer' user can. maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgbase.Flagger is None # Flag it again. with client as request: - resp = request.post( - f"/pkgbase/{pkgbase.Name}/flag", data={"comments": "Test"}, cookies=cookies - ) + request.cookies = cookies + resp = request.post(f"/pkgbase/{pkgbase.Name}/flag", data={"comments": "Test"}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Now, unflag it for real. with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgbase.Flagger is None @@ -1113,7 +1115,7 @@ def test_pkgbase_disown_as_maint_with_comaint( maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: resp = request.post( - endp, data=post_data, cookies=maint_cookies, allow_redirects=True + endp, data=post_data, cookies=maint_cookies, follow_redirects=True ) assert resp.status_code == int(HTTPStatus.OK) @@ -1145,52 +1147,62 @@ def test_pkgbase_disown( # GET as a normal user, which is rejected for lack of credentials. with client as request: - resp = request.get(endpoint, cookies=user_cookies, allow_redirects=False) + request.cookies = user_cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # GET as a comaintainer. with client as request: - resp = request.get(endpoint, cookies=comaint_cookies, allow_redirects=False) + request.cookies = comaint_cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) # Ensure that the comaintainer can see "Disown Package" link with client as request: - resp = request.get(pkgbase_endp, cookies=comaint_cookies) + request.cookies = comaint_cookies + resp = request.get(pkgbase_endp, follow_redirects=True) assert "Disown Package" in resp.text # GET as the maintainer. with client as request: - resp = request.get(endpoint, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) # Ensure that the maintainer can see "Disown Package" link with client as request: - resp = request.get(pkgbase_endp, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.get(pkgbase_endp, follow_redirects=True) assert "Disown Package" in resp.text # POST as a normal user, which is rejected for lack of credentials. with client as request: - resp = request.post(endpoint, cookies=user_cookies) + request.cookies = user_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # POST as the comaintainer without "confirm". with client as request: - resp = request.post(endpoint, cookies=comaint_cookies) + request.cookies = comaint_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) # POST as the maintainer without "confirm". with client as request: - resp = request.post(endpoint, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) # POST as the comaintainer with "confirm". with client as request: - resp = request.post(endpoint, data={"confirm": True}, cookies=comaint_cookies) + request.cookies = comaint_cookies + resp = request.post(endpoint, data={"confirm": True}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # POST as the maintainer with "confirm". with client as request: - resp = request.post(endpoint, data={"confirm": True}, cookies=maint_cookies) + request.cookies = maint_cookies + resp = request.post(endpoint, data={"confirm": True}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -1207,21 +1219,21 @@ def test_pkgbase_adopt( # Adopt the package base. with client as request: - resp = request.post(endpoint, cookies=cookies, allow_redirects=False) + resp = request.post(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == maintainer # Try to adopt it when it already has a maintainer; nothing changes. user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=user_cookies, allow_redirects=False) + resp = request.post(endpoint, cookies=user_cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == maintainer # Steal the package as a TU. tu_cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=tu_cookies, allow_redirects=False) + resp = request.post(endpoint, cookies=tu_cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == tu_user @@ -1233,7 +1245,7 @@ def test_pkgbase_delete_unauthorized(client: TestClient, user: User, package: Pa # Test GET. with client as request: - resp = request.get(endpoint, cookies=cookies, allow_redirects=False) + resp = request.get(endpoint, cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -1308,7 +1320,6 @@ def test_packages_post_unknown_action(client: TestClient, user: User, package: P "/packages", data={"action": "unknown"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1325,7 +1336,6 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): "/packages", data={"action": "stub"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1346,7 +1356,6 @@ def test_packages_post(client: TestClient, user: User, package: Package): "/packages", data={"action": "stub"}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.OK) @@ -1521,7 +1530,7 @@ def test_pkgbase_merge_post( def test_pkgbase_keywords(client: TestClient, user: User, package: Package): endpoint = f"/pkgbase/{package.PackageBase.Name}" with client as request: - resp = request.get(endpoint) + resp = request.get(endpoint, follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -1532,13 +1541,16 @@ def test_pkgbase_keywords(client: TestClient, user: User, package: Package): cookies = {"AURSID": maint.login(Request(), "testPassword")} post_endpoint = f"{endpoint}/keywords" with client as request: + request.cookies = cookies resp = request.post( - post_endpoint, data={"keywords": "abc test"}, cookies=cookies + post_endpoint, + data={"keywords": "abc test"}, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) with client as request: - resp = request.get(resp.headers.get("location")) + request.cookies = {} + resp = request.get(resp.headers.get("location"), follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -1552,7 +1564,8 @@ def test_pkgbase_keywords(client: TestClient, user: User, package: Package): def test_pkgbase_empty_keywords(client: TestClient, user: User, package: Package): endpoint = f"/pkgbase/{package.PackageBase.Name}" with client as request: - resp = request.get(endpoint) + request.cookies = {} + resp = request.get(endpoint, follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -1563,15 +1576,16 @@ def test_pkgbase_empty_keywords(client: TestClient, user: User, package: Package cookies = {"AURSID": maint.login(Request(), "testPassword")} post_endpoint = f"{endpoint}/keywords" with client as request: + request.cookies = cookies resp = request.post( post_endpoint, data={"keywords": "abc test foo bar "}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) with client as request: - resp = request.get(resp.headers.get("location")) + request.cookies = {} + resp = request.get(resp.headers.get("location"), follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -1608,12 +1622,12 @@ def test_independent_user_unflag(client: TestClient, user: User, package: Packag pkgbase = package.PackageBase cookies = {"AURSID": flagger.login(Request(), "testPassword")} with client as request: + request.cookies = cookies endp = f"/pkgbase/{pkgbase.Name}/flag" response = request.post( endp, data={"comments": "This thing needs a flag!"}, - cookies=cookies, - allow_redirects=True, + follow_redirects=True, ) assert response.status_code == HTTPStatus.OK @@ -1622,7 +1636,8 @@ def test_independent_user_unflag(client: TestClient, user: User, package: Packag # page when browsing as that `flagger` user. with client as request: endp = f"/pkgbase/{pkgbase.Name}" - response = request.get(endp, cookies=cookies, allow_redirects=True) + request.cookies = cookies + response = request.get(endp, follow_redirects=True) assert response.status_code == HTTPStatus.OK # Assert that the "Unflag package" link appears in the DOM. @@ -1633,7 +1648,8 @@ def test_independent_user_unflag(client: TestClient, user: User, package: Packag # Now, unflag the package by "clicking" the "Unflag package" link. with client as request: endp = f"/pkgbase/{pkgbase.Name}/unflag" - response = request.post(endp, cookies=cookies, allow_redirects=True) + request.cookies = cookies + response = request.post(endp, follow_redirects=True) assert response.status_code == HTTPStatus.OK # For the last time, let's check the GET response. The package should @@ -1641,7 +1657,8 @@ def test_independent_user_unflag(client: TestClient, user: User, package: Packag # should be missing. with client as request: endp = f"/pkgbase/{pkgbase.Name}" - response = request.get(endp, cookies=cookies, allow_redirects=True) + request.cookies = cookies + response = request.get(endp, follow_redirects=True) assert response.status_code == HTTPStatus.OK # Assert that the "Unflag package" link does not appear in the DOM. diff --git a/test/test_requests.py b/test/test_requests.py index 6475fae6..1d681d58 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -29,7 +29,11 @@ def setup(db_test) -> None: @pytest.fixture def client() -> TestClient: """Yield a TestClient.""" - yield TestClient(app=asgi.app) + client = TestClient(app=asgi.app) + + # disable redirects for our tests + client.follow_redirects = False + yield client def create_user(username: str, email: str) -> User: @@ -321,7 +325,8 @@ def test_request_post_deletion_autoaccept( endpoint = f"/pkgbase/{pkgbase.Name}/request" data = {"comments": "Test request.", "type": "deletion"} with client as request: - resp = request.post(endpoint, data=data, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = ( @@ -642,7 +647,8 @@ def test_request_post_orphan_autoaccept( "comments": "Test request.", } with client as request: - resp = request.post(endpoint, data=data, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = pkgbase.requests.first() @@ -715,7 +721,7 @@ def test_pkgreq_by_id_not_found(): def test_requests_unauthorized(client: TestClient): with client as request: - resp = request.get("/requests", allow_redirects=False) + resp = request.get("/requests") assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -879,9 +885,7 @@ def test_requests_selfmade( def test_requests_close(client: TestClient, user: User, pkgreq: PackageRequest): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get( - f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False - ) + resp = request.get(f"/requests/{pkgreq.ID}/close", cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) @@ -890,9 +894,7 @@ def test_requests_close_unauthorized( ): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.get( - f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False - ) + resp = request.get(f"/requests/{pkgreq.ID}/close", cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" @@ -906,7 +908,6 @@ def test_requests_close_post_unauthorized( f"/requests/{pkgreq.ID}/close", data={"reason": ACCEPTED_ID}, cookies=cookies, - allow_redirects=False, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" @@ -915,9 +916,7 @@ def test_requests_close_post_unauthorized( def test_requests_close_post(client: TestClient, user: User, pkgreq: PackageRequest): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post( - f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False - ) + resp = request.post(f"/requests/{pkgreq.ID}/close", cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgreq.Status == REJECTED_ID @@ -930,9 +929,7 @@ def test_requests_close_post_rejected( ): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post( - f"/requests/{pkgreq.ID}/close", cookies=cookies, allow_redirects=False - ) + resp = request.post(f"/requests/{pkgreq.ID}/close", cookies=cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgreq.Status == REJECTED_ID diff --git a/test/test_routes.py b/test/test_routes.py index 78b0a65b..b4bc30ee 100644 --- a/test/test_routes.py +++ b/test/test_routes.py @@ -20,7 +20,11 @@ def setup(db_test): @pytest.fixture def client() -> TestClient: - yield TestClient(app=app) + client = TestClient(app=app) + + # disable redirects for our tests + client.follow_redirects = False + yield client @pytest.fixture @@ -66,7 +70,7 @@ def test_favicon(client: TestClient): """Test the favicon route at '/favicon.ico'.""" with client as request: response1 = request.get("/static/images/favicon.ico") - response2 = request.get("/favicon.ico") + response2 = request.get("/favicon.ico", follow_redirects=True) assert response1.status_code == int(HTTPStatus.OK) assert response1.content == response2.content diff --git a/test/test_trusted_user_routes.py b/test/test_trusted_user_routes.py index 203008e3..dc468808 100644 --- a/test/test_trusted_user_routes.py +++ b/test/test_trusted_user_routes.py @@ -81,7 +81,11 @@ def setup(db_test): def client(): from aurweb.asgi import app - yield TestClient(app=app) + client = TestClient(app=app) + + # disable redirects for our tests + client.follow_redirects = False + yield client @pytest.fixture @@ -151,7 +155,7 @@ def proposal(user, tu_user): def test_tu_index_guest(client): headers = {"referer": config.get("options", "aur_location") + "/tu"} with client as request: - response = request.get("/tu", allow_redirects=False, headers=headers) + response = request.get("/tu", headers=headers) assert response.status_code == int(HTTPStatus.SEE_OTHER) params = filters.urlencode({"next": "/tu"}) @@ -162,7 +166,7 @@ def test_tu_index_unauthorized(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: # Login as a normal user, not a TU. - response = request.get("/tu", cookies=cookies, allow_redirects=False) + response = request.get("/tu", cookies=cookies) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -173,7 +177,7 @@ def test_tu_empty_index(client, tu_user): # Make a default get request to /tu. cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies, allow_redirects=False) + response = request.get("/tu", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) # Parse lxml root. @@ -226,7 +230,6 @@ def test_tu_index(client, tu_user): "/tu", cookies=cookies, params={"cby": "BAD!", "pby": "blah"}, - allow_redirects=False, ) assert response.status_code == int(HTTPStatus.OK) @@ -292,7 +295,7 @@ def test_tu_index(client, tu_user): def test_tu_stats(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies, allow_redirects=False) + response = request.get("/tu", cookies=cookies) assert response.status_code == HTTPStatus.OK root = parse_root(response.text) @@ -313,7 +316,7 @@ def test_tu_stats(client: TestClient, tu_user: User): tu_user.InactivityTS = time.utcnow() with client as request: - response = request.get("/tu", cookies=cookies, allow_redirects=False) + response = request.get("/tu", cookies=cookies) assert response.status_code == HTTPStatus.OK root = parse_root(response.text) @@ -361,7 +364,7 @@ def test_tu_index_table_paging(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies, allow_redirects=False) + response = request.get("/tu", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) # Parse lxml.etree root. @@ -391,9 +394,7 @@ def test_tu_index_table_paging(client, tu_user): # Now, get the next page of current votes. offset = 10 # Specify coff=10 with client as request: - response = request.get( - "/tu", cookies=cookies, params={"coff": offset}, allow_redirects=False - ) + response = request.get("/tu", cookies=cookies, params={"coff": offset}) assert response.status_code == int(HTTPStatus.OK) old_rows = rows @@ -420,9 +421,7 @@ def test_tu_index_table_paging(client, tu_user): offset = 20 # Specify coff=10 with client as request: - response = request.get( - "/tu", cookies=cookies, params={"coff": offset}, allow_redirects=False - ) + response = request.get("/tu", cookies=cookies, params={"coff": offset}) assert response.status_code == int(HTTPStatus.OK) # Do it again, we only have five left. @@ -471,7 +470,7 @@ def test_tu_index_sorting(client, tu_user): # Make a default request to /tu. cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies, allow_redirects=False) + response = request.get("/tu", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) # Get lxml handles of the document. @@ -498,9 +497,7 @@ def test_tu_index_sorting(client, tu_user): # Make another request; one that sorts the current votes # in ascending order instead of the default descending order. with client as request: - response = request.get( - "/tu", cookies=cookies, params={"cby": "asc"}, allow_redirects=False - ) + response = request.get("/tu", cookies=cookies, params={"cby": "asc"}) assert response.status_code == int(HTTPStatus.OK) # Get lxml handles of the document. @@ -573,7 +570,8 @@ def test_tu_index_last_votes( def test_tu_proposal_not_found(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", params={"id": 1}, cookies=cookies) + request.cookies = cookies + response = request.get("/tu", params={"id": 1}, follow_redirects=True) assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -583,14 +581,12 @@ def test_tu_proposal_unauthorized( cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/tu/{proposal[2].ID}" with client as request: - response = request.get(endpoint, cookies=cookies, allow_redirects=False) + response = request.get(endpoint, cookies=cookies) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" with client as request: - response = request.post( - endpoint, cookies=cookies, data={"decision": False}, allow_redirects=False - ) + response = request.post(endpoint, cookies=cookies, data={"decision": False}) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" @@ -606,7 +602,9 @@ def test_tu_running_proposal( proposal_id = voteinfo.ID cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get(f"/tu/{proposal_id}", cookies=cookies) + response = request.get( + f"/tu/{proposal_id}", cookies=cookies, follow_redirects=True + ) assert response.status_code == int(HTTPStatus.OK) # Alright, now let's continue on to verifying some markup. @@ -676,7 +674,9 @@ def test_tu_running_proposal( # Make another request now that we've voted. with client as request: - response = request.get("/tu", params={"id": voteinfo.ID}, cookies=cookies) + response = request.get( + "/tu", params={"id": voteinfo.ID}, cookies=cookies, follow_redirects=True + ) assert response.status_code == int(HTTPStatus.OK) # Parse our new root. @@ -734,9 +734,7 @@ def test_tu_proposal_vote_not_found(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post( - "/tu/1", cookies=cookies, data=data, allow_redirects=False - ) + response = request.post("/tu/1", cookies=cookies, data=data) assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -777,9 +775,7 @@ def test_tu_proposal_vote_unauthorized( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post( - f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False - ) + response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) assert response.status_code == int(HTTPStatus.UNAUTHORIZED) root = parse_root(response.text) @@ -788,9 +784,7 @@ def test_tu_proposal_vote_unauthorized( with client as request: data = {"decision": "Yes"} - response = request.get( - f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False - ) + response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, params=data) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -808,9 +802,7 @@ def test_tu_proposal_vote_cant_self_vote(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post( - f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False - ) + response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) root = parse_root(response.text) @@ -819,9 +811,7 @@ def test_tu_proposal_vote_cant_self_vote(client, proposal): with client as request: data = {"decision": "Yes"} - response = request.get( - f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False - ) + response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, params=data) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -840,9 +830,7 @@ def test_tu_proposal_vote_already_voted(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post( - f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False - ) + response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) root = parse_root(response.text) @@ -851,9 +839,7 @@ def test_tu_proposal_vote_already_voted(client, proposal): with client as request: data = {"decision": "Yes"} - response = request.get( - f"/tu/{voteinfo.ID}", cookies=cookies, data=data, allow_redirects=False - ) + response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, params=data) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -884,12 +870,12 @@ def test_tu_addvote_unauthorized( ): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.get("/addvote", cookies=cookies, allow_redirects=False) + response = request.get("/addvote", cookies=cookies) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" with client as request: - response = request.post("/addvote", cookies=cookies, allow_redirects=False) + response = request.post("/addvote", cookies=cookies) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" From a832b3cddb999f8b31a54b111ae6340c64f07cd0 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Thu, 24 Nov 2022 22:43:31 +0100 Subject: [PATCH 163/415] fix(test): FastAPI 0.87.0 - warning fixes FastAPI 0.87.0 switched to the httpx library for their TestClient * cookies need to be defined on the request instance instead of method calls Signed-off-by: moson-mo --- test/test_accounts_routes.py | 263 +++++++++++++++++++------------ test/test_auth_routes.py | 9 +- test/test_git_archives.py | 3 +- test/test_homepage.py | 6 +- test/test_html.py | 15 +- test/test_packages_routes.py | 59 +++---- test/test_pkgbase_routes.py | 200 ++++++++++++++--------- test/test_requests.py | 74 ++++++--- test/test_routes.py | 4 +- test/test_trusted_user_routes.py | 111 ++++++++----- 10 files changed, 463 insertions(+), 281 deletions(-) diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index 44226627..d3ddb174 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -107,7 +107,8 @@ def test_get_passreset_authed_redirects(client: TestClient, user: User): assert sid is not None with client as request: - response = request.get("/passreset", cookies={"AURSID": sid}) + request.cookies = {"AURSID": sid} + response = request.get("/passreset") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -122,7 +123,8 @@ def test_get_passreset(client: TestClient): def test_get_passreset_translation(client: TestClient): # Test that translation works; set it to de. with client as request: - response = request.get("/passreset", cookies={"AURLANG": "de"}) + request.cookies = {"AURLANG": "de"} + response = request.get("/passreset") # The header title should be translated. assert "Passwort zurücksetzen" in response.text @@ -136,7 +138,8 @@ def test_get_passreset_translation(client: TestClient): # Restore english. with client as request: - response = request.get("/passreset", cookies={"AURLANG": "en"}) + request.cookies = {"AURLANG": "en"} + response = request.get("/passreset") def test_get_passreset_with_resetkey(client: TestClient): @@ -150,9 +153,9 @@ def test_post_passreset_authed_redirects(client: TestClient, user: User): assert sid is not None with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/passreset", - cookies={"AURSID": sid}, data={"user": "blah"}, ) @@ -652,7 +655,8 @@ def test_get_account_edit_tu_as_tu(client: TestClient, tu_user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: - response = request.get(endpoint, cookies=cookies) + request.cookies = cookies + response = request.get(endpoint) assert response.status_code == int(HTTPStatus.OK) # Verify that we have an account type selection and that the @@ -677,7 +681,8 @@ def test_get_account_edit_as_tu(client: TestClient, tu_user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: - response = request.get(endpoint, cookies=cookies) + request.cookies = cookies + response = request.get(endpoint) assert response.status_code == int(HTTPStatus.OK) # Verify that we have an account type selection and that the @@ -700,7 +705,8 @@ def test_get_account_edit_type(client: TestClient, user: User): endpoint = f"/account/{user.Username}/edit" with client as request: - response = request.get(endpoint, cookies=cookies) + request.cookies = cookies + response = request.get(endpoint) assert response.status_code == int(HTTPStatus.OK) assert "id_type" not in response.text @@ -713,7 +719,8 @@ def test_get_account_edit_type_as_tu(client: TestClient, tu_user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: - response = request.get(endpoint, cookies=cookies) + request.cookies = cookies + response = request.get(endpoint) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -737,7 +744,8 @@ def test_get_account_edit_unauthorized(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: # Try to edit `test2` while authenticated as `test`. - response = request.get(endpoint, cookies={"AURSID": sid}) + request.cookies = {"AURSID": sid} + response = request.get(endpoint) assert response.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/account/{user2.Username}" @@ -751,9 +759,9 @@ def test_post_account_edit(client: TestClient, user: User): post_data = {"U": "test", "E": "test666@example.org", "passwd": "testPassword"} with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -777,7 +785,8 @@ def test_post_account_edit_type_as_tu(client: TestClient, tu_user: User): "passwd": "testPassword", } with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) @@ -795,7 +804,8 @@ def test_post_account_edit_type_as_dev(client: TestClient, tu_user: User): "passwd": "testPassword", } with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) assert user2.AccountTypeID == at.DEVELOPER_ID @@ -814,7 +824,8 @@ def test_post_account_edit_invalid_type_as_tu(client: TestClient, tu_user: User) "passwd": "testPassword", } with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) assert user2.AccountTypeID == at.USER_ID @@ -840,7 +851,8 @@ def test_post_account_edit_dev(client: TestClient, tu_user: User): endpoint = f"/account/{tu_user.Username}/edit" with client as request: - response = request.post(endpoint, cookies={"AURSID": sid}, data=post_data) + request.cookies = {"AURSID": sid} + response = request.post(endpoint, data=post_data) assert response.status_code == int(HTTPStatus.OK) expected = "The account, test, " @@ -860,9 +872,9 @@ def test_post_account_edit_language(client: TestClient, user: User): } with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -889,9 +901,9 @@ def test_post_account_edit_timezone(client: TestClient, user: User): } with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -905,9 +917,9 @@ def test_post_account_edit_error_missing_password(client: TestClient, user: User post_data = {"U": "test", "E": "test@example.org", "TZ": "CET", "passwd": ""} with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -924,9 +936,9 @@ def test_post_account_edit_error_invalid_password(client: TestClient, user: User post_data = {"U": "test", "E": "test@example.org", "TZ": "CET", "passwd": "invalid"} with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -945,9 +957,8 @@ def test_post_account_edit_suspend_unauthorized(client: TestClient, user: User): "passwd": "testPassword", } with client as request: - resp = request.post( - f"/account/{user.Username}/edit", data=post_data, cookies=cookies - ) + request.cookies = cookies + resp = request.post(f"/account/{user.Username}/edit", data=post_data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -968,9 +979,8 @@ def test_post_account_edit_inactivity(client: TestClient, user: User): "passwd": "testPassword", } with client as request: - resp = request.post( - f"/account/{user.Username}/edit", data=post_data, cookies=cookies - ) + request.cookies = cookies + resp = request.post(f"/account/{user.Username}/edit", data=post_data) assert resp.status_code == int(HTTPStatus.OK) # Make sure the user record got updated correctly. @@ -978,9 +988,8 @@ def test_post_account_edit_inactivity(client: TestClient, user: User): post_data.update({"J": False}) with client as request: - resp = request.post( - f"/account/{user.Username}/edit", data=post_data, cookies=cookies - ) + request.cookies = cookies + resp = request.post(f"/account/{user.Username}/edit", data=post_data) assert resp.status_code == int(HTTPStatus.OK) assert user.InactivityTS == 0 @@ -1000,7 +1009,8 @@ def test_post_account_edit_suspended(client: TestClient, user: User): } endpoint = f"/account/{user.Username}/edit" with client as request: - resp = request.post(endpoint, data=post_data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=post_data) assert resp.status_code == int(HTTPStatus.OK) # Make sure the user record got updated correctly. @@ -1032,7 +1042,8 @@ def test_post_account_edit_error_unauthorized(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: # Attempt to edit 'test2' while logged in as 'test'. - response = request.post(endpoint, cookies={"AURSID": sid}, data=post_data) + request.cookies = {"AURSID": sid} + response = request.post(endpoint, data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) expected = f"/account/{user2.Username}" @@ -1051,9 +1062,9 @@ def test_post_account_edit_ssh_pub_key(client: TestClient, user: User): } with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -1063,9 +1074,9 @@ def test_post_account_edit_ssh_pub_key(client: TestClient, user: User): post_data["PK"] = make_ssh_pubkey() with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -1084,9 +1095,9 @@ def test_post_account_edit_missing_ssh_pubkey(client: TestClient, user: User): } with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -1100,9 +1111,9 @@ def test_post_account_edit_missing_ssh_pubkey(client: TestClient, user: User): } with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -1120,7 +1131,8 @@ def test_post_account_edit_invalid_ssh_pubkey(client: TestClient, user: User): } cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.post("/account/test/edit", data=data, cookies=cookies) + request.cookies = cookies + response = request.post("/account/test/edit", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1138,9 +1150,9 @@ def test_post_account_edit_password(client: TestClient, user: User): } with client as request: + request.cookies = {"AURSID": sid} response = request.post( "/account/test/edit", - cookies={"AURSID": sid}, data=post_data, ) @@ -1154,7 +1166,8 @@ def test_post_account_edit_self_type_as_user(client: TestClient, user: User): endpoint = f"/account/{user.Username}/edit" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) assert "id_type" not in resp.text @@ -1165,7 +1178,8 @@ def test_post_account_edit_self_type_as_user(client: TestClient, user: User): "passwd": "testPassword", } with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1181,7 +1195,8 @@ def test_post_account_edit_other_user_as_user(client: TestClient, user: User): endpoint = f"/account/{user2.Username}/edit" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/account/{user2.Username}" @@ -1192,7 +1207,8 @@ def test_post_account_edit_self_type_as_tu(client: TestClient, tu_user: User): # We cannot see the Account Type field on our own edit page. with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) assert "id_type" in resp.text @@ -1204,7 +1220,8 @@ def test_post_account_edit_self_type_as_tu(client: TestClient, tu_user: User): "passwd": "testPassword", } with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) assert tu_user.AccountTypeID == USER_ID @@ -1223,7 +1240,8 @@ def test_post_account_edit_other_user_type_as_tu( # As a TU, we can see the Account Type field for other users. with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) assert "id_type" in resp.text @@ -1234,8 +1252,10 @@ def test_post_account_edit_other_user_type_as_tu( "T": TRUSTED_USER_ID, "passwd": "testPassword", } + with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) # Let's make sure the DB got updated properly. @@ -1267,14 +1287,16 @@ def test_post_account_edit_other_user_suspend_as_tu(client: TestClient, tu_user: user_cookies = {"AURSID": sid} with client as request: endpoint = f"/account/{user.Username}/edit" - resp = request.get(endpoint, cookies=user_cookies) + request.cookies = user_cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.OK cookies = {"AURSID": tu_user.login(Request(), "testPassword")} assert cookies is not None # This is useless, we create the dict here ^ # As a TU, we can see the Account for other users. with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) # As a TU, we can modify other user's account types. data = { @@ -1290,7 +1312,8 @@ def test_post_account_edit_other_user_suspend_as_tu(client: TestClient, tu_user: # Test that `user` no longer has a session. with user_client as request: - resp = request.get(endpoint, cookies=user_cookies) + request.cookies = user_cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.SEE_OTHER # Since user is now suspended, they should not be able to login. @@ -1314,7 +1337,8 @@ def test_post_account_edit_other_user_type_as_tu_invalid_type( # As a TU, we can modify other user's account types. data = {"U": user2.Username, "E": user2.Email, "T": 0, "passwd": "testPassword"} with client as request: - resp = request.post(endpoint, data=data, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1327,7 +1351,8 @@ def test_get_account(client: TestClient, user: User): sid = user.login(request, "testPassword") with client as request: - response = request.get("/account/test", cookies={"AURSID": sid}) + request.cookies = {"AURSID": sid} + response = request.get("/account/test") assert response.status_code == int(HTTPStatus.OK) @@ -1337,7 +1362,8 @@ def test_get_account_not_found(client: TestClient, user: User): sid = user.login(request, "testPassword") with client as request: - response = request.get("/account/not_found", cookies={"AURSID": sid}) + request.cookies = {"AURSID": sid} + response = request.get("/account/not_found") assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -1358,7 +1384,8 @@ def test_get_accounts(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.get("/accounts", cookies=cookies) + request.cookies = cookies + response = request.get("/accounts") assert response.status_code == int(HTTPStatus.OK) parser = lxml.etree.HTMLParser() @@ -1426,7 +1453,8 @@ def test_post_accounts(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies) + request.cookies = cookies + response = request.post("/accounts") assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1468,7 +1496,8 @@ def test_post_accounts_username(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies, data={"U": user.Username}) + request.cookies = cookies + response = request.post("/accounts", data={"U": user.Username}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1501,13 +1530,15 @@ def test_post_accounts_account_type(client: TestClient, user: User, tu_user: Use # Expect no entries; we marked our only user as a User type. with client as request: - response = request.post("/accounts", cookies=cookies, data={"T": "t"}) + request.cookies = cookies + response = request.post("/accounts", data={"T": "t"}) assert response.status_code == int(HTTPStatus.OK) assert len(get_rows(response.text)) == 0 # So, let's also ensure that specifying "u" returns our user. with client as request: - response = request.post("/accounts", cookies=cookies, data={"T": "u"}) + request.cookies = cookies + response = request.post("/accounts", data={"T": "u"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1525,7 +1556,8 @@ def test_post_accounts_account_type(client: TestClient, user: User, tu_user: Use ) with client as request: - response = request.post("/accounts", cookies=cookies, data={"T": "t"}) + request.cookies = cookies + response = request.post("/accounts", data={"T": "t"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1542,7 +1574,8 @@ def test_post_accounts_account_type(client: TestClient, user: User, tu_user: Use ) with client as request: - response = request.post("/accounts", cookies=cookies, data={"T": "d"}) + request.cookies = cookies + response = request.post("/accounts", data={"T": "d"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1559,7 +1592,8 @@ def test_post_accounts_account_type(client: TestClient, user: User, tu_user: Use ) with client as request: - response = request.post("/accounts", cookies=cookies, data={"T": "td"}) + request.cookies = cookies + response = request.post("/accounts", data={"T": "td"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1577,7 +1611,8 @@ def test_post_accounts_status(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies) + request.cookies = cookies + response = request.post("/accounts") assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1591,7 +1626,8 @@ def test_post_accounts_status(client: TestClient, user: User, tu_user: User): user.Suspended = True with client as request: - response = request.post("/accounts", cookies=cookies, data={"S": True}) + request.cookies = cookies + response = request.post("/accounts", data={"S": True}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1608,7 +1644,8 @@ def test_post_accounts_email(client: TestClient, user: User, tu_user: User): # Search via email. with client as request: - response = request.post("/accounts", cookies=cookies, data={"E": user.Email}) + request.cookies = cookies + response = request.post("/accounts", data={"E": user.Email}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1621,7 +1658,8 @@ def test_post_accounts_realname(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies, data={"R": user.RealName}) + request.cookies = cookies + response = request.post("/accounts", data={"R": user.RealName}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1634,7 +1672,8 @@ def test_post_accounts_irc(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies, data={"I": user.IRCNick}) + request.cookies = cookies + response = request.post("/accounts", data={"I": user.IRCNick}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1652,14 +1691,16 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): # Show that "u" is the default search order, by username. with client as request: - response = request.post("/accounts", cookies=cookies) + request.cookies = cookies + response = request.post("/accounts") assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 first_rows = rows with client as request: - response = request.post("/accounts", cookies=cookies, data={"SB": "u"}) + request.cookies = cookies + response = request.post("/accounts", data={"SB": "u"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1671,7 +1712,8 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): assert compare_text_values(0, first_rows, rows) is True with client as request: - response = request.post("/accounts", cookies=cookies, data={"SB": "i"}) + request.cookies = cookies + response = request.post("/accounts", data={"SB": "i"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1681,7 +1723,8 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): # Sort by "i" -> RealName. with client as request: - response = request.post("/accounts", cookies=cookies, data={"SB": "r"}) + request.cookies = cookies + response = request.post("/accounts", data={"SB": "r"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1696,7 +1739,8 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): # Fetch first_rows again with our new AccountType ordering. with client as request: - response = request.post("/accounts", cookies=cookies) + request.cookies = cookies + response = request.post("/accounts") assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1704,7 +1748,8 @@ def test_post_accounts_sortby(client: TestClient, user: User, tu_user: User): # Sort by "t" -> AccountType. with client as request: - response = request.post("/accounts", cookies=cookies, data={"SB": "t"}) + request.cookies = cookies + response = request.post("/accounts", data={"SB": "t"}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) assert len(rows) == 2 @@ -1722,7 +1767,8 @@ def test_post_accounts_pgp_key(client: TestClient, user: User, tu_user: User): # Search via PGPKey. with client as request: - response = request.post("/accounts", cookies=cookies, data={"K": user.PGPKey}) + request.cookies = cookies + response = request.post("/accounts", data={"K": user.PGPKey}) assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1749,7 +1795,8 @@ def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): cookies = {"AURSID": sid} with client as request: - response = request.post("/accounts", cookies=cookies) + request.cookies = cookies + response = request.post("/accounts") assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1775,9 +1822,8 @@ def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): assert "disabled" not in page_next.attrib with client as request: - response = request.post( - "/accounts", cookies=cookies, data={"O": 50} - ) # +50 offset. + request.cookies = cookies + response = request.post("/accounts", data={"O": 50}) # +50 offset. assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1791,9 +1837,8 @@ def test_post_accounts_paged(client: TestClient, user: User, tu_user: User): assert username.text.strip() == _user.Username with client as request: - response = request.post( - "/accounts", cookies=cookies, data={"O": 101} - ) # Last page. + request.cookies = cookies + response = request.post("/accounts", data={"O": 101}) # Last page. assert response.status_code == int(HTTPStatus.OK) rows = get_rows(response.text) @@ -1824,12 +1869,14 @@ def test_get_terms_of_service(client: TestClient, user: User): # First of all, let's test that we get redirected to /tos # when attempting to browse authenticated without accepting terms. with client as request: - response = request.get("/", cookies=cookies) + request.cookies = cookies + response = request.get("/") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tos" with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") assert response.status_code == int(HTTPStatus.OK) with db.begin(): @@ -1838,7 +1885,8 @@ def test_get_terms_of_service(client: TestClient, user: User): ) with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") # We accepted the term, there's nothing left to accept. assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -1847,7 +1895,8 @@ def test_get_terms_of_service(client: TestClient, user: User): term.Revision = 2 with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") # This time, we have a modified term Revision that hasn't # yet been agreed to via AcceptedTerm update. assert response.status_code == int(HTTPStatus.OK) @@ -1856,7 +1905,8 @@ def test_get_terms_of_service(client: TestClient, user: User): accepted_term.Revision = term.Revision with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") # We updated the term revision, there's nothing left to accept. assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -1876,17 +1926,20 @@ def test_post_terms_of_service(client: TestClient, user: User): # Test that the term we just created is listed. with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") assert response.status_code == int(HTTPStatus.OK) # Make a POST request to /tos with the agree checkbox disabled (False). with client as request: - response = request.post("/tos", data={"accept": False}, cookies=cookies) + request.cookies = cookies + response = request.post("/tos", data={"accept": False}) assert response.status_code == int(HTTPStatus.OK) # Make a POST request to /tos with the agree checkbox enabled (True). with client as request: - response = request.post("/tos", data=data, cookies=cookies) + request.cookies = cookies + response = request.post("/tos", data=data) assert response.status_code == int(HTTPStatus.SEE_OTHER) # Query the db for the record created by the post request. @@ -1900,12 +1953,14 @@ def test_post_terms_of_service(client: TestClient, user: User): # A GET request gives us the new revision to accept. with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") assert response.status_code == int(HTTPStatus.OK) # Let's POST again and agree to the new term revision. with client as request: - response = request.post("/tos", data=data, cookies=cookies) + request.cookies = cookies + response = request.post("/tos", data=data) assert response.status_code == int(HTTPStatus.SEE_OTHER) # Check that the records ended up matching. @@ -1913,7 +1968,8 @@ def test_post_terms_of_service(client: TestClient, user: User): # Now, see that GET redirects us to / with no terms left to accept. with client as request: - response = request.get("/tos", cookies=cookies) + request.cookies = cookies + response = request.get("/tos") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -1921,14 +1977,16 @@ def test_post_terms_of_service(client: TestClient, user: User): def test_account_comments_not_found(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get("/account/non-existent/comments", cookies=cookies) + request.cookies = cookies + resp = request.get("/account/non-existent/comments") assert resp.status_code == int(HTTPStatus.NOT_FOUND) def test_accounts_unauthorized(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get("/accounts", cookies=cookies) + request.cookies = cookies + resp = request.get("/accounts") assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" @@ -1941,16 +1999,18 @@ def test_account_delete_self_unauthorized(client: TestClient, tu_user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/account/{user2.Username}/delete" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.UNAUTHORIZED - resp = request.post(endpoint, cookies=cookies) + resp = request.post(endpoint) assert resp.status_code == HTTPStatus.UNAUTHORIZED # But a TU does have access cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with TestClient(app=app) as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.OK @@ -1958,10 +2018,11 @@ def test_account_delete_self_not_found(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = "/account/non-existent-user/delete" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.NOT_FOUND - resp = request.post(endpoint, cookies=cookies) + resp = request.post(endpoint) assert resp.status_code == HTTPStatus.NOT_FOUND @@ -1972,15 +2033,16 @@ def test_account_delete_self(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/account/{username}/delete" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.OK # The checkbox must be checked with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"passwd": "fakePassword", "confirm": False}, - cookies=cookies, ) assert resp.status_code == HTTPStatus.BAD_REQUEST errors = get_errors(resp.text) @@ -1991,10 +2053,10 @@ def test_account_delete_self(client: TestClient, user: User): # The correct password must be supplied with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"passwd": "fakePassword", "confirm": True}, - cookies=cookies, ) assert resp.status_code == HTTPStatus.BAD_REQUEST errors = get_errors(resp.text) @@ -2002,10 +2064,10 @@ def test_account_delete_self(client: TestClient, user: User): # Supply everything correctly and delete ourselves with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"passwd": "testPassword", "confirm": True}, - cookies=cookies, ) assert resp.status_code == HTTPStatus.SEE_OTHER @@ -2026,15 +2088,16 @@ def test_account_delete_self_with_ssh_public_key(client: TestClient, user: User) cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/account/{username}/delete" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == HTTPStatus.OK # Supply everything correctly and delete ourselves with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"passwd": "testPassword", "confirm": True}, - cookies=cookies, ) assert resp.status_code == HTTPStatus.SEE_OTHER @@ -2055,10 +2118,10 @@ def test_account_delete_as_tu(client: TestClient, tu_user: User): # Delete the user with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"passwd": "testPassword", "confirm": True}, - cookies=cookies, ) assert resp.status_code == HTTPStatus.SEE_OTHER diff --git a/test/test_auth_routes.py b/test/test_auth_routes.py index 150625cd..066457c4 100644 --- a/test/test_auth_routes.py +++ b/test/test_auth_routes.py @@ -71,10 +71,10 @@ def test_login_logout(client: TestClient, user: User): response = request.post("/logout", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) + request.cookies = {"AURSID": response.cookies.get("AURSID")} response = request.post( "/logout", data=post_data, - cookies={"AURSID": response.cookies.get("AURSID")}, ) assert response.status_code == int(HTTPStatus.SEE_OTHER) @@ -196,7 +196,9 @@ def test_authenticated_login(client: TestClient, user: User): # when requesting GET /login as an authenticated user. # Now, let's verify that we receive 403 Forbidden when we # try to get /login as an authenticated user. - response = request.get("/login", cookies=response.cookies) + request.cookies = response.cookies + response = request.get("/login") + assert response.status_code == int(HTTPStatus.OK) assert "Logged-in as: test" in response.text @@ -356,7 +358,8 @@ def test_generate_unique_sid_exhausted( with mock.patch(generate_unique_sid_, mock_generate_sid): with client as request: # Set cookies = {} to remove any previous login kept by TestClient. - response = request.post("/login", data=post_data, cookies={}) + request.cookies = {} + response = request.post("/login", data=post_data) assert response.status_code == int(HTTPStatus.INTERNAL_SERVER_ERROR) assert "500 - Internal Server Error" in response.text diff --git a/test/test_git_archives.py b/test/test_git_archives.py index 8ee4c2ba..c90706a4 100644 --- a/test/test_git_archives.py +++ b/test/test_git_archives.py @@ -197,7 +197,8 @@ def test_metadata_change( with client as request: endp = f"/pkgbase/{pkgbasename}/keywords" post_data = {"keywords": "abc def"} - resp = request.post(endp, data=post_data, cookies=cookies, allow_redirects=True) + request.cookies = cookies + resp = request.post(endp, data=post_data) assert resp.status_code == HTTPStatus.OK # Run main() again, which should now produce a new commit with the diff --git a/test/test_homepage.py b/test/test_homepage.py index 1aad30f7..a573bdd6 100644 --- a/test/test_homepage.py +++ b/test/test_homepage.py @@ -210,7 +210,8 @@ def test_homepage_dashboard(redis, packages, user): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.get("/", cookies=cookies) + request.cookies = cookies + response = request.get("/") assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -307,7 +308,8 @@ def test_homepage_dashboard_flagged(user: User, user2: User, package: Package): # flagged maintained packages. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get("/", cookies=cookies) + request.cookies = cookies + resp = request.get("/") assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) diff --git a/test/test_html.py b/test/test_html.py index 88c75a7c..681bd245 100644 --- a/test/test_html.py +++ b/test/test_html.py @@ -71,7 +71,8 @@ def test_archdev_navbar_authenticated(client: TestClient, user: User): expected = ["Dashboard", "Packages", "Requests", "My Account", "Logout"] cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get("/", cookies=cookies) + request.cookies = cookies + resp = request.get("/") assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -92,7 +93,8 @@ def test_archdev_navbar_authenticated_tu(client: TestClient, trusted_user: User) ] cookies = {"AURSID": trusted_user.login(Request(), "testPassword")} with client as request: - resp = request.get("/", cookies=cookies) + request.cookies = cookies + resp = request.get("/") assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -173,9 +175,12 @@ def test_rtl(client: TestClient): expected = [[], [], ["rtl"], ["rtl"]] with client as request: responses["default"] = request.get("/") - responses["de"] = request.get("/", cookies={"AURLANG": "de"}) - responses["he"] = request.get("/", cookies={"AURLANG": "he"}) - responses["ar"] = request.get("/", cookies={"AURLANG": "ar"}) + request.cookies = {"AURLANG": "de"} + responses["de"] = request.get("/") + request.cookies = {"AURLANG": "he"} + responses["he"] = request.get("/") + request.cookies = {"AURLANG": "ar"} + responses["ar"] = request.get("/") for i, (lang, resp) in enumerate(responses.items()): assert resp.status_code == int(HTTPStatus.OK) t = parse_root(resp.text) diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index 29872cb8..0da6cfab 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -410,7 +410,8 @@ def test_package_comments(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(package_endpoint(package), cookies=cookies) + request.cookies = cookies + resp = request.get(package_endpoint(package)) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -465,7 +466,8 @@ def test_package_authenticated(client: TestClient, user: User, package: Package) This process also occurs when pkgbase.html is rendered.""" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(package_endpoint(package), cookies=cookies) + request.cookies = cookies + resp = request.get(package_endpoint(package)) assert resp.status_code == int(HTTPStatus.OK) expected = [ @@ -493,7 +495,8 @@ def test_package_authenticated_maintainer( ): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.get(package_endpoint(package), cookies=cookies) + request.cookies = cookies + resp = request.get(package_endpoint(package)) assert resp.status_code == int(HTTPStatus.OK) expected = [ @@ -515,7 +518,8 @@ def test_package_authenticated_maintainer( def test_package_authenticated_tu(client: TestClient, tu_user: User, package: Package): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.get(package_endpoint(package), cookies=cookies) + request.cookies = cookies + resp = request.get(package_endpoint(package)) assert resp.status_code == int(HTTPStatus.OK) expected = [ @@ -941,10 +945,10 @@ def test_packages_sort_by_voted( # Test that, by default, the first result is what we just set above. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: + request.cookies = cookies response = request.get( "/packages", params={"SB": "w", "SO": "d"}, # Voted # Descending, Voted first. - cookies=cookies, ) assert response.status_code == int(HTTPStatus.OK) @@ -966,10 +970,10 @@ def test_packages_sort_by_notify( # Test that, by default, the first result is what we just set above. cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: + request.cookies = cookies response = request.get( "/packages", params={"SB": "o", "SO": "d"}, # Voted # Descending, Voted first. - cookies=cookies, ) assert response.status_code == int(HTTPStatus.OK) @@ -1142,10 +1146,10 @@ def test_packages_post_unknown_action(client: TestClient, user: User, package: P cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "unknown"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1158,10 +1162,10 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "stub"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1178,10 +1182,10 @@ def test_packages_post(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "stub"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -1250,7 +1254,8 @@ def test_packages_post_notify(client: TestClient, user: User, package: Package): # an error to be rendered. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "notify"}, cookies=cookies) + request.cookies = cookies + resp = request.post("/packages", data={"action": "notify"}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to be notified about." @@ -1258,9 +1263,8 @@ def test_packages_post_notify(client: TestClient, user: User, package: Package): # Now let's actually enable notifications on `package`. with client as request: - resp = request.post( - "/packages", data={"action": "notify", "IDs": [package.ID]}, cookies=cookies - ) + request.cookies = cookies + resp = request.post("/packages", data={"action": "notify", "IDs": [package.ID]}) assert resp.status_code == int(HTTPStatus.OK) expected = "The selected packages' notifications have been enabled." successes = get_successes(resp.text) @@ -1269,9 +1273,8 @@ def test_packages_post_notify(client: TestClient, user: User, package: Package): # Try to enable notifications when they're already enabled, # causing an error to be rendered. with client as request: - resp = request.post( - "/packages", data={"action": "notify", "IDs": [package.ID]}, cookies=cookies - ) + request.cookies = cookies + resp = request.post("/packages", data={"action": "notify", "IDs": [package.ID]}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to be notified about." @@ -1289,7 +1292,8 @@ def test_packages_post_unnotify(client: TestClient, user: User, package: Package # Request removal of the notification without any IDs. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "unnotify"}, cookies=cookies) + request.cookies = cookies + resp = request.post("/packages", data={"action": "unnotify"}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages for notification removal." @@ -1297,10 +1301,10 @@ def test_packages_post_unnotify(client: TestClient, user: User, package: Package # Request removal of the notification; really. with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "unnotify", "IDs": [package.ID]}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) successes = get_successes(resp.text) @@ -1315,10 +1319,10 @@ def test_packages_post_unnotify(client: TestClient, user: User, package: Package # Try it again. The notif no longer exists. with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "unnotify", "IDs": [package.ID]}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1331,7 +1335,8 @@ def test_packages_post_adopt(client: TestClient, user: User, package: Package): # Try to adopt an empty list of packages. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post("/packages", data={"action": "adopt"}, cookies=cookies) + request.cookies = cookies + resp = request.post("/packages", data={"action": "adopt"}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "You did not select any packages to adopt." @@ -1339,10 +1344,10 @@ def test_packages_post_adopt(client: TestClient, user: User, package: Package): # Now, let's try to adopt a package that's already maintained. with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "adopt", "IDs": [package.ID], "confirm": True}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1356,9 +1361,8 @@ def test_packages_post_adopt(client: TestClient, user: User, package: Package): # Now, let's try to adopt without confirming. with client as request: - resp = request.post( - "/packages", data={"action": "adopt", "IDs": [package.ID]}, cookies=cookies - ) + request.cookies = cookies + resp = request.post("/packages", data={"action": "adopt", "IDs": [package.ID]}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = ( @@ -1369,10 +1373,10 @@ def test_packages_post_adopt(client: TestClient, user: User, package: Package): # Let's do it again now that there is no maintainer. with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "adopt", "IDs": [package.ID], "confirm": True}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) successes = get_successes(resp.text) @@ -1446,10 +1450,10 @@ def test_packages_post_disown( """Disown packages as a Trusted User, which cannot bypass idle time.""" cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "disown", "IDs": [package.ID], "confirm": True}, - cookies=cookies, ) errors = get_errors(resp.text) @@ -1576,7 +1580,8 @@ def test_account_comments(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/account/{user.Username}/comments" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index dd92d72d..124eb71f 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -312,7 +312,8 @@ def test_pkgbase_voters(client: TestClient, tu_user: User, package: Package): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) # We should've gotten one link to the voter, tu_user. @@ -343,7 +344,8 @@ def test_pkgbase_comment_not_found( comment_id = 12345 # A non-existing comment. endpoint = f"/pkgbase/{package.PackageBase.Name}/comments/{comment_id}" with client as request: - resp = request.post(endpoint, data={"comment": "Failure"}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"comment": "Failure"}) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -365,7 +367,8 @@ def test_pkgbase_comment_form_unauthorized( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment.ID}/form" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) @@ -377,7 +380,8 @@ def test_pkgbase_comment_form_not_found( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/form" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -387,7 +391,8 @@ def test_pkgbase_comments_missing_comment( cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/comments" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -409,10 +414,10 @@ def test_pkgbase_comments( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments" with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"comment": "Test comment.", "enable_notifications": True}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -440,7 +445,8 @@ def test_pkgbase_comments( # Test the non-javascript version of comment editing by # visiting the /pkgbase/{name}/comments/{id}/edit route. with client as request: - resp = request.get(f"{endpoint}/{comment_id}/edit", cookies=cookies) + request.cookies = cookies + resp = request.get(f"{endpoint}/{comment_id}/edit") assert resp.status_code == int(HTTPStatus.OK) # Clear up the PackageNotification. This doubles as testing @@ -457,10 +463,10 @@ def test_pkgbase_comments( comment_id = int(headers[0].attrib["id"].split("-")[-1]) endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}" with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"comment": "Edited comment.", "enable_notifications": True}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -485,14 +491,16 @@ def test_pkgbase_comments( # Don't supply a comment; should return BAD_REQUEST. with client as request: - fail_resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + fail_resp = request.post(endpoint) assert fail_resp.status_code == int(HTTPStatus.BAD_REQUEST) # Now, test the form route, which should return form markup # via JSON. endpoint = f"{endpoint}/form" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) data = resp.json() @@ -510,11 +518,11 @@ def test_pkgbase_comment_edit_unauthorized( cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: + request.cookies = cookies endp = f"/pkgbase/{pkgbase.Name}/comments/{comment.ID}" response = request.post( endp, data={"comment": "abcd im trying to change this comment."}, - cookies=cookies, ) assert response.status_code == HTTPStatus.UNAUTHORIZED @@ -561,7 +569,8 @@ def test_pkgbase_comment_delete_unauthorized( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment.ID}/delete" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) @@ -573,7 +582,8 @@ def test_pkgbase_comment_delete_not_found( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/delete" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -585,7 +595,8 @@ def test_pkgbase_comment_undelete_not_found( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/undelete" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -607,7 +618,8 @@ def test_pkgbase_comment_pin_as_co( endpoint = f"/pkgbase/{pkgbasename}/comments/{comment.ID}/pin" cookies = {"AURSID": comaint.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Assert that PinnedTS got set. @@ -616,7 +628,8 @@ def test_pkgbase_comment_pin_as_co( # Unpin the comment we just pinned. endpoint = f"/pkgbase/{pkgbasename}/comments/{comment.ID}/unpin" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Let's assert that PinnedTS was unset. @@ -633,7 +646,8 @@ def test_pkgbase_comment_pin( # Pin the comment. endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/pin" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Assert that PinnedTS got set. @@ -642,7 +656,8 @@ def test_pkgbase_comment_pin( # Unpin the comment we just pinned. endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/unpin" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Let's assert that PinnedTS was unset. @@ -657,7 +672,8 @@ def test_pkgbase_comment_pin_unauthorized( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/pin" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) @@ -669,7 +685,8 @@ def test_pkgbase_comment_unpin_unauthorized( pkgbasename = package.PackageBase.Name endpoint = f"/pkgbase/{pkgbasename}/comments/{comment_id}/unpin" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) @@ -677,7 +694,8 @@ def test_pkgbase_comaintainers_not_found(client: TestClient, maintainer: User): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = "/pkgbase/fake/comaintainers" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -685,7 +703,8 @@ def test_pkgbase_comaintainers_post_not_found(client: TestClient, maintainer: Us cookies = {"AURSID": maintainer.login(Request(), "testPassword")} endpoint = "/pkgbase/fake/comaintainers" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -696,7 +715,8 @@ def test_pkgbase_comaintainers_unauthorized( endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -708,7 +728,8 @@ def test_pkgbase_comaintainers_post_unauthorized( endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -720,7 +741,8 @@ def test_pkgbase_comaintainers_post_invalid_user( endpoint = f"/pkgbase/{pkgbase.Name}/comaintainers" cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={"users": "\nfake\n"}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"users": "\nfake\n"}) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -738,20 +760,20 @@ def test_pkgbase_comaintainers( # Start off by adding user as a comaintainer to package. # The maintainer username given should be ignored. with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"users": f"\n{user.Username}\n{maintainer.Username}\n"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" # Do it again to exercise the last_priority bump path. with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"users": f"\n{user.Username}\n{maintainer.Username}\n"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -760,7 +782,8 @@ def test_pkgbase_comaintainers( # let's perform a GET request to make sure that the backend produces # the user we added in the users textarea. with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -769,12 +792,14 @@ def test_pkgbase_comaintainers( # Finish off by removing all the comaintainers. with client as request: - resp = request.post(endpoint, data={"users": str()}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"users": str()}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) @@ -788,7 +813,8 @@ def test_pkgbase_request_not_found(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -798,16 +824,16 @@ def test_pkgbase_request(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) def test_pkgbase_request_post_not_found(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post( - "/pkgbase/fake/request", data={"type": "fake"}, cookies=cookies - ) + request.cookies = cookies + resp = request.post("/pkgbase/fake/request", data={"type": "fake"}) assert resp.status_code == int(HTTPStatus.NOT_FOUND) @@ -817,7 +843,8 @@ def test_pkgbase_request_post_invalid_type( endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, data={"type": "fake"}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"type": "fake"}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -827,13 +854,13 @@ def test_pkgbase_request_post_no_comment_error( endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( endpoint, data={ "type": "deletion", "comments": "", # An empty comment field causes an error. }, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -849,6 +876,7 @@ def test_pkgbase_request_post_merge_not_found_error( endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( endpoint, data={ @@ -856,7 +884,6 @@ def test_pkgbase_request_post_merge_not_found_error( "merge_into": "fake", # There is no PackageBase.Name "fake" "comments": "We want to merge this.", }, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -872,6 +899,7 @@ def test_pkgbase_request_post_merge_no_merge_into_error( endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( endpoint, data={ @@ -879,7 +907,6 @@ def test_pkgbase_request_post_merge_no_merge_into_error( "merge_into": "", # There is no PackageBase.Name "fake" "comments": "We want to merge this.", }, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -895,6 +922,7 @@ def test_pkgbase_request_post_merge_self_error( endpoint = f"/pkgbase/{package.PackageBase.Name}/request" cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( endpoint, data={ @@ -902,7 +930,6 @@ def test_pkgbase_request_post_merge_self_error( "merge_into": package.PackageBase.Name, "comments": "We want to merge this.", }, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -1017,7 +1044,8 @@ def test_pkgbase_flag_vcs(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(f"/pkgbase/{package.PackageBase.Name}/flag", cookies=cookies) + request.cookies = cookies + resp = request.get(f"/pkgbase/{package.PackageBase.Name}/flag") assert resp.status_code == int(HTTPStatus.OK) expected = ( @@ -1042,7 +1070,8 @@ def test_pkgbase_notify(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbase.Name}/notify" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) notif = pkgbase.notifications.filter(PackageNotification.UserID == user.ID).first() @@ -1051,7 +1080,8 @@ def test_pkgbase_notify(client: TestClient, user: User, package: Package): # Disable notifications. endpoint = f"/pkgbase/{pkgbase.Name}/unnotify" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) notif = pkgbase.notifications.filter(PackageNotification.UserID == user.ID).first() @@ -1069,7 +1099,8 @@ def test_pkgbase_vote(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbase.Name}/vote" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) vote = pkgbase.package_votes.filter(PackageVote.UsersID == user.ID).first() @@ -1079,7 +1110,8 @@ def test_pkgbase_vote(client: TestClient, user: User, package: Package): # Remove vote. endpoint = f"/pkgbase/{pkgbase.Name}/unvote" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) vote = pkgbase.package_votes.filter(PackageVote.UsersID == user.ID).first() @@ -1096,7 +1128,8 @@ def test_pkgbase_disown_as_sole_maintainer( # But we do here. with client as request: - resp = request.post(endpoint, data={"confirm": True}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"confirm": True}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -1114,9 +1147,8 @@ def test_pkgbase_disown_as_maint_with_comaint( maint_cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.post( - endp, data=post_data, cookies=maint_cookies, follow_redirects=True - ) + request.cookies = maint_cookies + resp = request.post(endp, data=post_data, follow_redirects=True) assert resp.status_code == int(HTTPStatus.OK) package = db.refresh(package) @@ -1219,21 +1251,24 @@ def test_pkgbase_adopt( # Adopt the package base. with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == maintainer # Try to adopt it when it already has a maintainer; nothing changes. user_cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=user_cookies) + request.cookies = user_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == maintainer # Steal the package as a TU. tu_cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.post(endpoint, cookies=tu_cookies) + request.cookies = tu_cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert package.PackageBase.Maintainer == tu_user @@ -1245,13 +1280,15 @@ def test_pkgbase_delete_unauthorized(client: TestClient, user: User, package: Pa # Test GET. with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" # Test POST. with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -1263,17 +1300,20 @@ def test_pkgbase_delete(client: TestClient, tu_user: User, package: Package): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbase.Name}/delete" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) # Test that POST works and denies us because we haven't confirmed. with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) # Test that we can actually delete the pkgbase. with client as request: - resp = request.post(endpoint, data={"confirm": True}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"confirm": True}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Let's assert that the package base record got removed. @@ -1300,7 +1340,8 @@ def test_pkgbase_delete_with_request( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{pkgbase.Name}/delete" with client as request: - resp = request.post(endpoint, data={"confirm": True}, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint, data={"confirm": True}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/packages" @@ -1316,10 +1357,10 @@ def test_pkgbase_delete_with_request( def test_packages_post_unknown_action(client: TestClient, user: User, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "unknown"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1332,10 +1373,10 @@ def test_packages_post_error(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "stub"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) @@ -1352,10 +1393,10 @@ def test_packages_post(client: TestClient, user: User, package: Package): with mock.patch.dict("aurweb.routers.packages.PACKAGE_ACTIONS", actions): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( "/packages", data={"action": "stub"}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -1368,7 +1409,8 @@ def test_pkgbase_merge_unauthorized(client: TestClient, user: User, package: Pac cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) @@ -1376,7 +1418,8 @@ def test_pkgbase_merge(client: TestClient, tu_user: User, package: Package): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.get(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) assert not get_errors(resp.text) @@ -1387,7 +1430,8 @@ def test_pkgbase_merge_post_unauthorized( cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.UNAUTHORIZED) @@ -1397,7 +1441,8 @@ def test_pkgbase_merge_post_unconfirmed( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = ( @@ -1413,9 +1458,8 @@ def test_pkgbase_merge_post_invalid_into( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post( - endpoint, data={"into": "not_real", "confirm": True}, cookies=cookies - ) + request.cookies = cookies + resp = request.post(endpoint, data={"into": "not_real", "confirm": True}) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) expected = "Cannot find package to merge votes and comments into." @@ -1428,10 +1472,10 @@ def test_pkgbase_merge_post_self_invalid( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: + request.cookies = cookies resp = request.post( endpoint, data={"into": package.PackageBase.Name, "confirm": True}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -1461,20 +1505,24 @@ def test_pkgbase_merge_post( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} endpoint = f"/pkgbase/{package.PackageBase.Name}/vote" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Enable notifications. endpoint = f"/pkgbase/{package.PackageBase.Name}/notify" with client as request: - resp = request.post(endpoint, cookies=cookies) + request.cookies = cookies + resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Comment on the package. endpoint = f"/pkgbase/{package.PackageBase.Name}/comments" with client as request: + request.cookies = cookies resp = request.post( - endpoint, data={"comment": "Test comment."}, cookies=cookies + endpoint, + data={"comment": "Test comment."}, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) @@ -1486,9 +1534,8 @@ def test_pkgbase_merge_post( # Merge the package into target. endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" with client as request: - resp = request.post( - endpoint, data={"into": target.Name, "confirm": True}, cookies=cookies - ) + request.cookies = cookies + resp = request.post(endpoint, data={"into": target.Name, "confirm": True}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) loc = resp.headers.get("location") assert loc == f"/pkgbase/{target.Name}" @@ -1604,9 +1651,10 @@ def test_unauthorized_pkgbase_keywords(client: TestClient, package: Package): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies pkgbase = package.PackageBase endp = f"/pkgbase/{pkgbase.Name}/keywords" - response = request.post(endp, cookies=cookies) + response = request.post(endp) assert response.status_code == HTTPStatus.UNAUTHORIZED diff --git a/test/test_requests.py b/test/test_requests.py index 1d681d58..18b860f2 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -254,7 +254,8 @@ def test_request(client: TestClient, auser: User, pkgbase: PackageBase): """Test the standard pkgbase request route GET method.""" endpoint = f"/pkgbase/{pkgbase.Name}/request" with client as request: - resp = request.get(endpoint, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.get(endpoint) assert resp.status_code == int(HTTPStatus.OK) @@ -263,7 +264,8 @@ def test_request_post_deletion(client: TestClient, auser2: User, pkgbase: Packag endpoint = f"/pkgbase/{pkgbase.Name}/request" data = {"comments": "Test request.", "type": "deletion"} with client as request: - resp = request.post(endpoint, data=data, cookies=auser2.cookies) + request.cookies = auser2.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = pkgbase.requests.first() @@ -285,7 +287,8 @@ def test_request_post_deletion_as_maintainer( endpoint = f"/pkgbase/{pkgbase.Name}/request" data = {"comments": "Test request.", "type": "deletion"} with client as request: - resp = request.post(endpoint, data=data, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Check the pkgreq record got created and accepted. @@ -368,7 +371,8 @@ def test_request_post_merge( "comments": "Test request.", } with client as request: - resp = request.post(endpoint, data=data, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = pkgbase.requests.first() @@ -392,7 +396,8 @@ def test_request_post_orphan(client: TestClient, auser: User, pkgbase: PackageBa "comments": "Test request.", } with client as request: - resp = request.post(endpoint, data=data, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = pkgbase.requests.first() @@ -428,7 +433,8 @@ def test_deletion_request( comments = "Test closure." data = {"comments": comments, "confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/packages" @@ -460,7 +466,8 @@ def test_deletion_autorequest(client: TestClient, tu_user: User, pkgbase: Packag endpoint = f"/pkgbase/{pkgbase.Name}/delete" data = {"confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/packages" @@ -498,7 +505,8 @@ def test_merge_request( comments = "Test merge closure." data = {"into": target.Name, "comments": comments, "confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{target.Name}" @@ -545,7 +553,8 @@ def test_merge_autorequest( endpoint = f"/pkgbase/{pkgbase.Name}/merge" data = {"into": target.Name, "confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{target.Name}" @@ -582,7 +591,8 @@ def test_orphan_request( comments = "Test orphan closure." data = {"comments": comments, "confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -615,7 +625,8 @@ def test_request_post_orphan_autogenerated_closure( endpoint = f"/pkgbase/{pkgbase.Name}/disown" data = {"confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -677,7 +688,8 @@ def test_orphan_as_maintainer(client: TestClient, auser: User, pkgbase: PackageB endpoint = f"/pkgbase/{pkgbase.Name}/disown" data = {"confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=auser.cookies) + request.cookies = auser.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" @@ -694,7 +706,8 @@ def test_orphan_without_requests( endpoint = f"/pkgbase/{pkgbase.Name}/disown" data = {"confirm": True} with client as request: - resp = request.post(endpoint, data=data, cookies=tu_user.cookies) + request.cookies = tu_user.cookies + resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) errors = get_errors(resp.text) @@ -733,6 +746,7 @@ def test_requests( ): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.get( "/requests", params={ @@ -742,7 +756,6 @@ def test_requests( "SeB": "nd", "SB": "n", }, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -756,7 +769,8 @@ def test_requests( # Request page 2 of the requests page. with client as request: - resp = request.get("/requests", params={"O": 50}, cookies=cookies) # Page 2 + request.cookies = cookies + resp = request.get("/requests", params={"O": 50}) # Page 2 assert resp.status_code == int(HTTPStatus.OK) assert "‹ Previous" in resp.text @@ -775,6 +789,7 @@ def test_requests_with_filters( ): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.get( "/requests", params={ @@ -789,7 +804,6 @@ def test_requests_with_filters( "filter_rejected": True, "filter_maintainer_requests": False, }, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -803,6 +817,7 @@ def test_requests_with_filters( # Request page 2 of the requests page. with client as request: + request.cookies = cookies resp = request.get( "/requests", params={ @@ -813,7 +828,6 @@ def test_requests_with_filters( "filter_rejected": True, "filter_maintainer_requests": False, }, - cookies=cookies, ) # Page 2 assert resp.status_code == int(HTTPStatus.OK) @@ -833,10 +847,10 @@ def test_requests_for_maintainer_requests( ): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.get( "/requests", params={"filter_maintainer_requests": True}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.OK) @@ -854,7 +868,8 @@ def test_requests_by_deleted_users( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - resp = request.get("/requests", cookies=cookies) + request.cookies = cookies + resp = request.get("/requests") assert resp.status_code == HTTPStatus.OK root = parse_root(resp.text) @@ -867,7 +882,8 @@ def test_requests_selfmade( ): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get("/requests", cookies=cookies) + request.cookies = cookies + resp = request.get("/requests") assert resp.status_code == int(HTTPStatus.OK) # As the user who creates all of the requests, we should see all of them. @@ -885,7 +901,8 @@ def test_requests_selfmade( def test_requests_close(client: TestClient, user: User, pkgreq: PackageRequest): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.get(f"/requests/{pkgreq.ID}/close", cookies=cookies) + request.cookies = cookies + resp = request.get(f"/requests/{pkgreq.ID}/close") assert resp.status_code == int(HTTPStatus.OK) @@ -894,7 +911,10 @@ def test_requests_close_unauthorized( ): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: - resp = request.get(f"/requests/{pkgreq.ID}/close", cookies=cookies) + request.cookies = cookies + resp = request.get( + f"/requests/{pkgreq.ID}/close", + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" @@ -904,10 +924,10 @@ def test_requests_close_post_unauthorized( ): cookies = {"AURSID": maintainer.login(Request(), "testPassword")} with client as request: + request.cookies = cookies resp = request.post( f"/requests/{pkgreq.ID}/close", data={"reason": ACCEPTED_ID}, - cookies=cookies, ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == "/" @@ -916,7 +936,8 @@ def test_requests_close_post_unauthorized( def test_requests_close_post(client: TestClient, user: User, pkgreq: PackageRequest): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(f"/requests/{pkgreq.ID}/close", cookies=cookies) + request.cookies = cookies + resp = request.post(f"/requests/{pkgreq.ID}/close") assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgreq.Status == REJECTED_ID @@ -929,7 +950,10 @@ def test_requests_close_post_rejected( ): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - resp = request.post(f"/requests/{pkgreq.ID}/close", cookies=cookies) + request.cookies = cookies + resp = request.post( + f"/requests/{pkgreq.ID}/close", + ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert pkgreq.Status == REJECTED_ID diff --git a/test/test_routes.py b/test/test_routes.py index b4bc30ee..c104211e 100644 --- a/test/test_routes.py +++ b/test/test_routes.py @@ -99,7 +99,8 @@ def test_user_language(client: TestClient, user: User): assert sid is not None with client as req: - response = req.post("/language", data=post_data, cookies={"AURSID": sid}) + req.cookies = {"AURSID": sid} + response = req.post("/language", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert user.LangPreference == "de" @@ -154,6 +155,5 @@ def test_id_redirect(client: TestClient): "key": "value", # Test that this param persists. "key2": "value2", # And this one. }, - allow_redirects=False, ) assert response.headers.get("location") == "/test?key=value&key2=value2" diff --git a/test/test_trusted_user_routes.py b/test/test_trusted_user_routes.py index dc468808..0bb9523e 100644 --- a/test/test_trusted_user_routes.py +++ b/test/test_trusted_user_routes.py @@ -166,7 +166,8 @@ def test_tu_index_unauthorized(client: TestClient, user: User): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: # Login as a normal user, not a TU. - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/" @@ -177,7 +178,8 @@ def test_tu_empty_index(client, tu_user): # Make a default get request to /tu. cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == int(HTTPStatus.OK) # Parse lxml root. @@ -226,9 +228,9 @@ def test_tu_index(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: # Pass an invalid cby and pby; let them default to "desc". + request.cookies = cookies response = request.get( "/tu", - cookies=cookies, params={"cby": "BAD!", "pby": "blah"}, ) @@ -295,7 +297,8 @@ def test_tu_index(client, tu_user): def test_tu_stats(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == HTTPStatus.OK root = parse_root(response.text) @@ -316,7 +319,8 @@ def test_tu_stats(client: TestClient, tu_user: User): tu_user.InactivityTS = time.utcnow() with client as request: - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == HTTPStatus.OK root = parse_root(response.text) @@ -364,7 +368,8 @@ def test_tu_index_table_paging(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == int(HTTPStatus.OK) # Parse lxml.etree root. @@ -394,7 +399,8 @@ def test_tu_index_table_paging(client, tu_user): # Now, get the next page of current votes. offset = 10 # Specify coff=10 with client as request: - response = request.get("/tu", cookies=cookies, params={"coff": offset}) + request.cookies = cookies + response = request.get("/tu", params={"coff": offset}) assert response.status_code == int(HTTPStatus.OK) old_rows = rows @@ -421,7 +427,8 @@ def test_tu_index_table_paging(client, tu_user): offset = 20 # Specify coff=10 with client as request: - response = request.get("/tu", cookies=cookies, params={"coff": offset}) + request.cookies = cookies + response = request.get("/tu", params={"coff": offset}) assert response.status_code == int(HTTPStatus.OK) # Do it again, we only have five left. @@ -470,7 +477,8 @@ def test_tu_index_sorting(client, tu_user): # Make a default request to /tu. cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == int(HTTPStatus.OK) # Get lxml handles of the document. @@ -497,7 +505,8 @@ def test_tu_index_sorting(client, tu_user): # Make another request; one that sorts the current votes # in ascending order instead of the default descending order. with client as request: - response = request.get("/tu", cookies=cookies, params={"cby": "asc"}) + request.cookies = cookies + response = request.get("/tu", params={"cby": "asc"}) assert response.status_code == int(HTTPStatus.OK) # Get lxml handles of the document. @@ -548,7 +557,8 @@ def test_tu_index_last_votes( # Now, check that tu_user got populated in the .last-votes table. cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/tu", cookies=cookies) + request.cookies = cookies + response = request.get("/tu") assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -581,12 +591,14 @@ def test_tu_proposal_unauthorized( cookies = {"AURSID": user.login(Request(), "testPassword")} endpoint = f"/tu/{proposal[2].ID}" with client as request: - response = request.get(endpoint, cookies=cookies) + request.cookies = cookies + response = request.get(endpoint) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" with client as request: - response = request.post(endpoint, cookies=cookies, data={"decision": False}) + request.cookies = cookies + response = request.post(endpoint, data={"decision": False}) assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" @@ -602,9 +614,8 @@ def test_tu_running_proposal( proposal_id = voteinfo.ID cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get( - f"/tu/{proposal_id}", cookies=cookies, follow_redirects=True - ) + request.cookies = cookies + response = request.get(f"/tu/{proposal_id}") assert response.status_code == int(HTTPStatus.OK) # Alright, now let's continue on to verifying some markup. @@ -674,9 +685,8 @@ def test_tu_running_proposal( # Make another request now that we've voted. with client as request: - response = request.get( - "/tu", params={"id": voteinfo.ID}, cookies=cookies, follow_redirects=True - ) + request.cookies = cookies + response = request.get("/tu", params={"id": voteinfo.ID}, follow_redirects=True) assert response.status_code == int(HTTPStatus.OK) # Parse our new root. @@ -702,7 +712,8 @@ def test_tu_ended_proposal(client, proposal): proposal_id = voteinfo.ID cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get(f"/tu/{proposal_id}", cookies=cookies) + request.cookies = cookies + response = request.get(f"/tu/{proposal_id}") assert response.status_code == int(HTTPStatus.OK) # Alright, now let's continue on to verifying some markup. @@ -734,7 +745,8 @@ def test_tu_proposal_vote_not_found(client, tu_user): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post("/tu/1", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/tu/1", data=data) assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -747,7 +759,8 @@ def test_tu_proposal_vote(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) + request.cookies = cookies + response = request.post(f"/tu/{voteinfo.ID}", data=data) assert response.status_code == int(HTTPStatus.OK) # Check that the proposal record got updated. @@ -775,7 +788,8 @@ def test_tu_proposal_vote_unauthorized( cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) + request.cookies = cookies + response = request.post(f"/tu/{voteinfo.ID}", data=data) assert response.status_code == int(HTTPStatus.UNAUTHORIZED) root = parse_root(response.text) @@ -784,7 +798,8 @@ def test_tu_proposal_vote_unauthorized( with client as request: data = {"decision": "Yes"} - response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, params=data) + request.cookies = cookies + response = request.get(f"/tu/{voteinfo.ID}", params=data) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -802,7 +817,8 @@ def test_tu_proposal_vote_cant_self_vote(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) + request.cookies = cookies + response = request.post(f"/tu/{voteinfo.ID}", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) root = parse_root(response.text) @@ -811,7 +827,8 @@ def test_tu_proposal_vote_cant_self_vote(client, proposal): with client as request: data = {"decision": "Yes"} - response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, params=data) + request.cookies = cookies + response = request.get(f"/tu/{voteinfo.ID}", params=data) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -830,7 +847,8 @@ def test_tu_proposal_vote_already_voted(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "Yes"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) + request.cookies = cookies + response = request.post(f"/tu/{voteinfo.ID}", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) root = parse_root(response.text) @@ -839,7 +857,8 @@ def test_tu_proposal_vote_already_voted(client, proposal): with client as request: data = {"decision": "Yes"} - response = request.get(f"/tu/{voteinfo.ID}", cookies=cookies, params=data) + request.cookies = cookies + response = request.get(f"/tu/{voteinfo.ID}", params=data) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -853,7 +872,8 @@ def test_tu_proposal_vote_invalid_decision(client, proposal): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: data = {"decision": "EVIL"} - response = request.post(f"/tu/{voteinfo.ID}", cookies=cookies, data=data) + request.cookies = cookies + response = request.post(f"/tu/{voteinfo.ID}", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) assert response.text == "Invalid 'decision' value." @@ -861,7 +881,8 @@ def test_tu_proposal_vote_invalid_decision(client, proposal): def test_tu_addvote(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/addvote", cookies=cookies) + request.cookies = cookies + response = request.get("/addvote") assert response.status_code == int(HTTPStatus.OK) @@ -870,12 +891,14 @@ def test_tu_addvote_unauthorized( ): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: - response = request.get("/addvote", cookies=cookies) + request.cookies = cookies + response = request.get("/addvote") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" with client as request: - response = request.post("/addvote", cookies=cookies) + request.cookies = cookies + response = request.post("/addvote") assert response.status_code == int(HTTPStatus.SEE_OTHER) assert response.headers.get("location") == "/tu" @@ -883,7 +906,8 @@ def test_tu_addvote_unauthorized( def test_tu_addvote_invalid_type(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: - response = request.get("/addvote", params={"type": "faketype"}, cookies=cookies) + request.cookies = cookies + response = request.get("/addvote", params={"type": "faketype"}) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) @@ -897,7 +921,8 @@ def test_tu_addvote_post(client: TestClient, tu_user: User, user: User): data = {"user": user.Username, "type": "add_tu", "agenda": "Blah"} with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.SEE_OTHER) voteinfo = db.query(TUVoteInfo, TUVoteInfo.Agenda == "Blah").first() @@ -912,14 +937,16 @@ def test_tu_addvote_post_cant_duplicate_username( data = {"user": user.Username, "type": "add_tu", "agenda": "Blah"} with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.SEE_OTHER) voteinfo = db.query(TUVoteInfo, TUVoteInfo.Agenda == "Blah").first() assert voteinfo is not None with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -927,7 +954,8 @@ def test_tu_addvote_post_invalid_username(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} data = {"user": "fakeusername"} with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.NOT_FOUND) @@ -935,7 +963,8 @@ def test_tu_addvote_post_invalid_type(client: TestClient, tu_user: User, user: U cookies = {"AURSID": tu_user.login(Request(), "testPassword")} data = {"user": user.Username} with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -943,7 +972,8 @@ def test_tu_addvote_post_invalid_agenda(client: TestClient, tu_user: User, user: cookies = {"AURSID": tu_user.login(Request(), "testPassword")} data = {"user": user.Username, "type": "add_tu"} with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.BAD_REQUEST) @@ -952,5 +982,6 @@ def test_tu_addvote_post_bylaws(client: TestClient, tu_user: User): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} data = {"type": "bylaws", "agenda": "Blah blah!"} with client as request: - response = request.post("/addvote", cookies=cookies, data=data) + request.cookies = cookies + response = request.post("/addvote", data=data) assert response.status_code == int(HTTPStatus.SEE_OTHER) From a08681ba2391b955cc39a8f62dbddcc153ea6cca Mon Sep 17 00:00:00 2001 From: moson-mo Date: Fri, 25 Nov 2022 12:24:04 +0100 Subject: [PATCH 164/415] fix: Add "Show more..." link for "Required by" Fix glitch on the package page: "Show more..." not displayed for the "Required by" list Fix test case: Function name does not start with "test" hence it was never executed during test runs Issue report: #363 Signed-off-by: moson-mo --- templates/partials/packages/package_metadata.html | 10 ++++++---- test/test_packages_routes.py | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/templates/partials/packages/package_metadata.html b/templates/partials/packages/package_metadata.html index 123b994d..50d38b48 100644 --- a/templates/partials/packages/package_metadata.html +++ b/templates/partials/packages/package_metadata.html @@ -62,10 +62,12 @@ {{ dep | dep_extra }}
  • {% endfor %} - {% if not all_reqs and (required_by | length) > max_listing %} - - {{ "Show %d more" | tr | format(reqs_count - (required_by | length)) }}... - + {% if not all_reqs and reqs_count > max_listing %} +
  • + + {{ "Show %d more" | tr | format(reqs_count - (required_by | length)) }}... + +
  • {% endif %} diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index 0da6cfab..c8986b9c 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -352,7 +352,7 @@ def test_package_split_description(client: TestClient, user: User): assert row.text == pkg_b.Description -def paged_depends_required(client: TestClient, package: Package): +def test_paged_depends_required(client: TestClient, package: Package): maint = package.PackageBase.Maintainer new_pkgs = [] @@ -360,7 +360,7 @@ def paged_depends_required(client: TestClient, package: Package): # Create 25 new packages that'll be used to depend on our package. for i in range(26): base = db.create(PackageBase, Name=f"new_pkg{i}", Maintainer=maint) - new_pkgs.append(db.create(Package, Name=base.Name)) + new_pkgs.append(db.create(Package, Name=base.Name, PackageBase=base)) # Create 25 deps. for i in range(25): From 7864ac6dfeafd3995063e3b58cfbd393fb1b6551 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Sun, 27 Nov 2022 10:33:58 +0100 Subject: [PATCH 165/415] fix: search-by parameter for keyword links Fixes: Keyword-links on the package page pass wrong query-parameter. Thus a name/description search is performed instead of keywords Issue report: #397 Signed-off-by: moson-mo --- templates/partials/packages/details.html | 2 +- test/test_packages_routes.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index 8ecf9bd8..697ef724 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -53,7 +53,7 @@ {% for keyword in pkgbase.keywords.all() %} {{ keyword.Keyword }} diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index c8986b9c..bf179963 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -271,6 +271,13 @@ def test_package(client: TestClient, package: Package): db.create(PackageLicense, PackageID=package.ID, License=licenses[0]) db.create(PackageLicense, PackageID=package.ID, License=licenses[1]) + # Create some keywords + keywords = ["test1", "test2"] + for keyword in keywords: + db.create( + PackageKeyword, PackageBaseID=package.PackageBaseID, Keyword=keyword + ) + with client as request: resp = request.get(package_endpoint(package)) assert resp.status_code == int(HTTPStatus.OK) @@ -307,6 +314,11 @@ def test_package(client: TestClient, package: Package): expected = ["test_conflict1", "test_conflict2"] assert conflicts[0].text.strip() == ", ".join(expected) + keywords = root.xpath('//a[@class="keyword"]') + expected = ["test1", "test2"] + for i, keyword in enumerate(expected): + assert keywords[i].text.strip() == keyword + def test_package_split_description(client: TestClient, user: User): From c74772cb3610c7f5be270f0edb1416fc9d1476ed Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sun, 27 Nov 2022 10:34:07 +0000 Subject: [PATCH 166/415] chore: bump to v6.1.9 Signed-off-by: Leonidas Spyropoulos --- aurweb/config.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aurweb/config.py b/aurweb/config.py index 49806738..8130376d 100644 --- a/aurweb/config.py +++ b/aurweb/config.py @@ -5,7 +5,7 @@ from typing import Any # Publicly visible version of aurweb. This is used to display # aurweb versioning in the footer and must be maintained. # Todo: Make this dynamic/automated. -AURWEB_VERSION = "v6.1.8" +AURWEB_VERSION = "v6.1.9" _parser = None diff --git a/pyproject.toml b/pyproject.toml index 762a52c1..ce5b0b43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ combine_as_imports = true # [tool.poetry] name = "aurweb" -version = "v6.1.8" +version = "v6.1.9" license = "GPL-2.0-only" description = "Source code for the Arch User Repository's website" homepage = "https://aur.archlinux.org" From 8027ff936c030ebcd43bf4d8ae3a244fb3d28a56 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 28 Nov 2022 16:57:27 +0100 Subject: [PATCH 167/415] fix: alignment of pagination element pagination for comments should appear on the right instead of center Issue report: #390 Signed-off-by: moson-mo --- templates/partials/packages/comments.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/partials/packages/comments.html b/templates/partials/packages/comments.html index 9d49bc86..f00d62f2 100644 --- a/templates/partials/packages/comments.html +++ b/templates/partials/packages/comments.html @@ -39,7 +39,7 @@ {% if pages > 1 %}

    {{ page | pager_nav(comments_total, prefix) | safe }} -

    +

    {% endif %} {% for comment in comments.all() %} From 2b8dedb3a2dcfa4442591bf589e1586105064866 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Mon, 28 Nov 2022 17:01:44 +0100 Subject: [PATCH 168/415] feat: add pagination element below comments other pages like the "package search" have this as well. Issue report: #390 Signed-off-by: moson-mo --- templates/partials/packages/comments.html | 7 +++++++ web/html/css/aurweb.css | 5 +++++ 2 files changed, 12 insertions(+) diff --git a/templates/partials/packages/comments.html b/templates/partials/packages/comments.html index f00d62f2..55421bfa 100644 --- a/templates/partials/packages/comments.html +++ b/templates/partials/packages/comments.html @@ -45,5 +45,12 @@ {% for comment in comments.all() %} {% include "partials/packages/comment.html" %} {% endfor %} + {% endif %} diff --git a/web/html/css/aurweb.css b/web/html/css/aurweb.css index 59f7ed1e..64a65742 100644 --- a/web/html/css/aurweb.css +++ b/web/html/css/aurweb.css @@ -193,6 +193,11 @@ label.confirmation { align-self: flex-end; } +.comments-footer { + display: flex; + justify-content: flex-end; +} + .comment-header { clear: both; font-size: 1em; From d8e91d058cd494dfb7812994796d1a46eb532f6b Mon Sep 17 00:00:00 2001 From: moson-mo Date: Thu, 22 Dec 2022 12:41:29 +0100 Subject: [PATCH 169/415] fix(rpc): provides search should return name match We need to return packages matching on the name as well. (A package always provides itself) Signed-off-by: moson-mo --- aurweb/rpc.py | 12 +++++++++++- test/test_rpc.py | 13 +++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 2aa27500..1440703a 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -376,8 +376,18 @@ class RPC: search.search_by(by, arg) max_results = config.getint("options", "max_rpc_results") - results = self.entities(search.results()).limit(max_results + 1).all() + query = self.entities(search.results()).limit(max_results + 1) + + # For "provides", we need to union our relation search + # with an exact search since a package always provides itself. + # Turns out that doing this with an OR statement is extremely slow + if by == "provides": + search = RPCSearch() + search._search_by_exact_name(arg) + query = query.union(self.entities(search.results())) + + results = query.all() if len(results) > max_results: raise RPCError("Too many package results.") diff --git a/test/test_rpc.py b/test/test_rpc.py index 04efd38f..92714ff1 100644 --- a/test/test_rpc.py +++ b/test/test_rpc.py @@ -920,6 +920,19 @@ def test_rpc_search_provides( assert result.get("Name") == packages[0].Name +def test_rpc_search_provides_self( + client: TestClient, packages: list[Package], relations: list[PackageRelation] +): + params = {"v": 5, "type": "search", "by": "provides", "arg": "big-chungus"} + with client as request: + response = request.get("/rpc", params=params) + data = response.json() + # expected to return "big-chungus" + assert data.get("resultcount") == 1 + result = data.get("results")[0] + assert result.get("Name") == packages[0].Name + + def test_rpc_search_conflicts( client: TestClient, packages: list[Package], relations: list[PackageRelation] ): From 7a9448a3e52e216f4f11b996be12ab87b99fe4bc Mon Sep 17 00:00:00 2001 From: moson-mo Date: Tue, 29 Nov 2022 14:45:24 +0100 Subject: [PATCH 170/415] perf: improve packages search-query Improves performance for queries with large result sets. The "group by" clause can be removed for all search types but the keywords. Signed-off-by: moson-mo --- aurweb/packages/search.py | 5 ++++- aurweb/routers/packages.py | 28 ++++++++++++---------------- test/test_packages_routes.py | 17 +++++++++++++++++ 3 files changed, 33 insertions(+), 17 deletions(-) diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index c0740cda..d5e00110 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -136,7 +136,10 @@ class PackageSearch: self._join_user() self._join_keywords() keywords = set(k.lower() for k in keywords) - self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords)) + self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords)).group_by( + models.Package.Name + ) + return self def _search_by_maintainer(self, keywords: str) -> orm.Query: diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index a4aac496..6a943dbf 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -93,22 +93,18 @@ async def packages_get( search.sort_by(sort_by, sort_order) # Insert search results into the context. - results = ( - search.results() - .with_entities( - models.Package.ID, - models.Package.Name, - models.Package.PackageBaseID, - models.Package.Version, - models.Package.Description, - models.PackageBase.Popularity, - models.PackageBase.NumVotes, - models.PackageBase.OutOfDateTS, - models.User.Username.label("Maintainer"), - models.PackageVote.PackageBaseID.label("Voted"), - models.PackageNotification.PackageBaseID.label("Notify"), - ) - .group_by(models.Package.Name) + results = search.results().with_entities( + models.Package.ID, + models.Package.Name, + models.Package.PackageBaseID, + models.Package.Version, + models.Package.Description, + models.PackageBase.Popularity, + models.PackageBase.NumVotes, + models.PackageBase.OutOfDateTS, + models.User.Username.label("Maintainer"), + models.PackageVote.PackageBaseID.label("Voted"), + models.PackageNotification.PackageBaseID.label("Notify"), ) packages = results.limit(per_page).offset(offset) diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index bf179963..f9cea694 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -740,6 +740,23 @@ def test_packages_search_by_keywords(client: TestClient, packages: list[Package] rows = root.xpath('//table[@class="results"]/tbody/tr') assert len(rows) == 1 + # Now let's add another keyword to the same package + with db.begin(): + db.create( + PackageKeyword, PackageBase=package.PackageBase, Keyword="testKeyword2" + ) + + # And request packages with both keywords, we should still get 1 result. + with client as request: + response = request.get( + "/packages", params={"SeB": "k", "K": "testKeyword testKeyword2"} + ) + assert response.status_code == int(HTTPStatus.OK) + + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 + def test_packages_search_by_maintainer( client: TestClient, maintainer: User, package: Package From 413de914caa20f1dd848c9b59e6d8d065a3b8230 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:28:17 -0800 Subject: [PATCH 171/415] fix: remove trailing whitespace lint check for ./po Signed-off-by: Kevin Morris --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ab4240c9..b2baec65 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,6 +8,7 @@ repos: - id: check-toml - id: end-of-file-fixer - id: trailing-whitespace + exclude: ^po/ - id: debug-statements - repo: https://github.com/myint/autoflake From 65266d752b2671a8d175e85aafd8b27ae638aba0 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:13 -0800 Subject: [PATCH 172/415] update-ar translations --- po/ar.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/ar.po b/po/ar.po index ea0e03cf..1fed4f4f 100644 --- a/po/ar.po +++ b/po/ar.po @@ -1,17 +1,17 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # safa1996alfulaij , 2015 # ØµÙØ§ الÙليج , 2015-2016 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: ØµÙØ§ الÙليج , 2015-2016\n" "Language-Team: Arabic (http://www.transifex.com/lfleischer/aurweb/language/ar/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 3a13eeb744e603d06bbe57025af5ebabaf3ba615 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:16 -0800 Subject: [PATCH 173/415] update-az translations --- po/az.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/az.po b/po/az.po index 1c7ca207..df14a5b0 100644 --- a/po/az.po +++ b/po/az.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Azerbaijani (http://www.transifex.com/lfleischer/aurweb/language/az/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From b89fe9eb1397529982c6ab099abef30214e7ce2e Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:19 -0800 Subject: [PATCH 174/415] update-az_AZ translations --- po/az_AZ.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/az_AZ.po b/po/az_AZ.po index 2f5ceabd..293d7b0d 100644 --- a/po/az_AZ.po +++ b/po/az_AZ.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Azerbaijani (Azerbaijan) (http://www.transifex.com/lfleischer/aurweb/language/az_AZ/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 9229220e2107833846565f54f7cf814086f8b04d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:22 -0800 Subject: [PATCH 175/415] update-bg translations --- po/bg.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/bg.po b/po/bg.po index c7c70021..f373b761 100644 --- a/po/bg.po +++ b/po/bg.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Bulgarian (http://www.transifex.com/lfleischer/aurweb/language/bg/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From d6661403aae6ebc40d68a2b47170bbd626a79f8e Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:26 -0800 Subject: [PATCH 176/415] update-ca translations --- po/ca.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/ca.po b/po/ca.po index d43c84dc..86d77e56 100644 --- a/po/ca.po +++ b/po/ca.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Adolfo Jayme-Barrientos, 2014 # Hector Mtz-Seara , 2011,2013 @@ -10,10 +10,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Ãcar , 2021\n" "Language-Team: Catalan (http://www.transifex.com/lfleischer/aurweb/language/ca/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 618a382e6c32e3eef2efc20b3a15877754518cb4 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:29 -0800 Subject: [PATCH 177/415] update-ca_ES translations --- po/ca_ES.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/ca_ES.po b/po/ca_ES.po index aac7b03f..5c05ba0c 100644 --- a/po/ca_ES.po +++ b/po/ca_ES.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Catalan (Spain) (http://www.transifex.com/lfleischer/aurweb/language/ca_ES/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From da458ae70ab1c1c05c1d0965bb31990f09769676 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:33 -0800 Subject: [PATCH 178/415] update-cs translations --- po/cs.po | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/po/cs.po b/po/cs.po index 59a24007..9086bd75 100644 --- a/po/cs.po +++ b/po/cs.po @@ -1,11 +1,11 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Daniel Milde , 2017 # Daniel Peukert , 2021 -# Daniel Peukert , 2021 +# Daniel Peukert , 2021-2022 # Jaroslav Lichtblau , 2015-2016 # Jaroslav Lichtblau , 2014 # Jiří Vírava , 2017-2018 @@ -15,10 +15,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Daniel Peukert , 2021-2022\n" "Language-Team: Czech (http://www.transifex.com/lfleischer/aurweb/language/cs/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -763,7 +763,7 @@ msgstr "Hlasující" msgid "" "Account registration has been disabled for your IP address, probably due to " "sustained spam attacks. Sorry for the inconvenience." -msgstr "Registrace úÄtu byla pro vaÅ¡i IP adresu zakázána, pravdÄ›podobnÄ› kvůli trvalým spamovým útokům. Omluvám se za nepříjemnost." +msgstr "Registrace úÄtu byla pro vaÅ¡i IP adresu zakázána, pravdÄ›podobnÄ› kvůli trvalým spamovým útokům. Za nepříjemnosti se omlouváme." #: lib/acctfuncs.inc.php msgid "Missing User ID" @@ -978,7 +978,7 @@ msgstr "Informace o balíÄku nebyly nalezeny." #: aurweb/routers/auth.py msgid "Bad Referer header." -msgstr "" +msgstr "Chybná hlaviÄka Referer" #: aurweb/routers/packages.py msgid "You did not select any packages to be notified about." @@ -2322,33 +2322,33 @@ msgstr "Pro zmÄ›nu typu tohoto úÄtu na %s nemáte oprávnÄ›ní." #: aurweb/packages/requests.py msgid "No due existing orphan requests to accept for %s." -msgstr "" +msgstr "Žádné žádosti o odebrání vlastnictví balíÄku %s momentálnÄ› neexistují." #: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "Interní chyba serveru" #: templates/errors/500.html msgid "A fatal error has occurred." -msgstr "" +msgstr "DoÅ¡lo k fatální chybÄ›." #: templates/errors/500.html msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "" +msgstr "Detaily chyby byly zalogovány a budou co nejdříve zkontrolovány administrátorem. Za jakékoli způsobené nepříjemnosti se omlouváme." #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "" +msgstr "Chyba serveru AUR" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Komentáře k uzavÅ™ení žádostí vztahujících se k tomuto balíÄku..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Tato akce uzavÅ™e vÅ¡echny žádosti Äekající na vyřízení vztahující se k tomuto balíÄku. Pokud není vyplnÄ›no textové Pole %sKomentáře\"%s, komentář k uzavÅ™ení žádostí bude vygenerován automaticky." From 5a7a9c2c9f8734842510cadc70b2e090a77c03dd Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:36 -0800 Subject: [PATCH 179/415] update-da translations --- po/da.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/da.po b/po/da.po index 822b5506..89f6a635 100644 --- a/po/da.po +++ b/po/da.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Linuxbruger , 2018 # Louis Tim Larsen , 2015 @@ -9,10 +9,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Linuxbruger , 2018\n" "Language-Team: Danish (http://www.transifex.com/lfleischer/aurweb/language/da/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 791e715aee661d67152ca2bf20714d9697586590 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:39 -0800 Subject: [PATCH 180/415] update-de translations --- po/de.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/de.po b/po/de.po index a0f8fb0f..894494c6 100644 --- a/po/de.po +++ b/po/de.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # 9d91e189c22376bb4ee81489bc27fc28, 2013 # 9d91e189c22376bb4ee81489bc27fc28, 2013-2014 @@ -27,10 +27,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Stefan Auditor , 2021\n" "Language-Team: German (http://www.transifex.com/lfleischer/aurweb/language/de/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 6bf408775c249f0938ce7dd59066bc91a2c872a7 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:43 -0800 Subject: [PATCH 181/415] update-el translations --- po/el.po | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/po/el.po b/po/el.po index 37db785c..2b665c34 100644 --- a/po/el.po +++ b/po/el.po @@ -1,23 +1,23 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Achilleas Pipinellis, 2014 # Achilleas Pipinellis, 2013 # Achilleas Pipinellis, 2013 # Achilleas Pipinellis, 2011 # Achilleas Pipinellis, 2012 -# Leonidas Spyropoulos, 2021 +# Leonidas Spyropoulos, 2021-2022 # Lukas Fleischer , 2011 # flamelab , 2011 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Leonidas Spyropoulos, 2021-2022\n" "Language-Team: Greek (http://www.transifex.com/lfleischer/aurweb/language/el/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -35,7 +35,7 @@ msgstr "Μας συγχωÏείτε, η σελίδα που ζητήσατε δ #: html/404.php template/pkgreq_close_form.php msgid "Note" -msgstr "" +msgstr "Σημείωση" #: html/404.php msgid "Git clone URLs are not meant to be opened in a browser." From aeb38b599d68ac1c7cf50b3fdd22a3b222db688c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:46 -0800 Subject: [PATCH 182/415] update-es translations --- po/es.po | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/po/es.po b/po/es.po index 9cbe98a6..b6035d5b 100644 --- a/po/es.po +++ b/po/es.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Adolfo Jayme-Barrientos, 2015 # Angel Velasquez , 2011 @@ -9,25 +9,25 @@ # Lukas Fleischer , 2011 # neiko , 2011 # Nicolás de la Torre , 2012 -# prflr88 , 2012 -# prflr88 , 2016-2017 -# prflr88 , 2013-2016 -# prflr88 , 2016-2017 -# prflr88 , 2016 -# prflr88 , 2019 +# Pablo Lezaeta Reyes , 2012 +# Pablo Lezaeta Reyes , 2016-2017 +# Pablo Lezaeta Reyes , 2013-2016 +# Pablo Lezaeta Reyes , 2016-2017 +# Pablo Lezaeta Reyes , 2016 +# Pablo Lezaeta Reyes , 2019 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Pablo Lezaeta Reyes , 2019\n" "Language-Team: Spanish (http://www.transifex.com/lfleischer/aurweb/language/es/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: es\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"Plural-Forms: nplurals=3; plural=n == 1 ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n" #: html/404.php msgid "Page Not Found" @@ -1590,6 +1590,7 @@ msgid "%d pending request" msgid_plural "%d pending requests" msgstr[0] "Hay %d solicitud pendiente" msgstr[1] "Hay %d solicitudes pendientes" +msgstr[2] "Hay %d solicitudes pendientes" #: template/pkgbase_actions.php msgid "Adopt Package" @@ -1864,6 +1865,7 @@ msgid "%d package request found." msgid_plural "%d package requests found." msgstr[0] "Se encontró %d solicitud para el paquete." msgstr[1] "Se encontraron %d solicitudes para el paquete." +msgstr[2] "Se encontraron %d solicitudes para el paquete." #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1888,6 +1890,7 @@ msgid "~%d day left" msgid_plural "~%d days left" msgstr[0] "~%d día restante" msgstr[1] "~%d días restantes" +msgstr[2] "~%d días restantes" #: template/pkgreq_results.php #, php-format @@ -1895,6 +1898,7 @@ msgid "~%d hour left" msgid_plural "~%d hours left" msgstr[0] "Aprox. %d hora restante" msgstr[1] "Aprox. %d horas restantes" +msgstr[2] "Aprox. %d horas restantes" #: template/pkgreq_results.php msgid "<1 hour left" @@ -2023,6 +2027,7 @@ msgid "%d package found." msgid_plural "%d packages found." msgstr[0] "%d paquete fue encontrado." msgstr[1] "%d paquetes fueron encontrados." +msgstr[2] "%d paquetes fueron encontrados." #: template/pkg_search_results.php msgid "Version" From 076245e061786e762cc705fa5ad49f7292b456db Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:49 -0800 Subject: [PATCH 183/415] update-et translations --- po/et.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/et.po b/po/et.po index 44f2b3a0..4092823b 100644 --- a/po/et.po +++ b/po/et.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Estonian (http://www.transifex.com/lfleischer/aurweb/language/et/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From bce9bedaf460b8efd8c5e2eb9e9cde5da4384f7c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:53 -0800 Subject: [PATCH 184/415] update-fi translations --- po/fi.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/fi.po b/po/fi.po index 636681b7..98b3a03b 100644 --- a/po/fi.po +++ b/po/fi.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Elias Autio, 2016 # Jesse Jaara , 2011-2012,2015 @@ -10,10 +10,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Nikolay Korotkiy , 2018-2019\n" "Language-Team: Finnish (http://www.transifex.com/lfleischer/aurweb/language/fi/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 3fa9047864d1a872f20027f26837ac1dfd9c971f Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:56 -0800 Subject: [PATCH 185/415] update-fi_FI translations --- po/fi_FI.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/fi_FI.po b/po/fi_FI.po index 17a58b4a..cd516edc 100644 --- a/po/fi_FI.po +++ b/po/fi_FI.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Finnish (Finland) (http://www.transifex.com/lfleischer/aurweb/language/fi_FI/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From ff01947f3d260981bfdecf8488b54a9995256a6b Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:08:59 -0800 Subject: [PATCH 186/415] update-fr translations --- po/fr.po | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/po/fr.po b/po/fr.po index 03192d48..2b0c5bab 100644 --- a/po/fr.po +++ b/po/fr.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Alexandre Macabies , 2018 # Antoine Lubineau , 2012 @@ -10,7 +10,7 @@ # demostanis , 2020 # Kristien , 2020 # lordheavy , 2011 -# lordheavy , 2013-2014,2018 +# lordheavy , 2013-2014,2018,2022 # lordheavy , 2011-2012 # Lukas Fleischer , 2011 # Thibault , 2020 @@ -18,16 +18,16 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: lordheavy , 2013-2014,2018,2022\n" "Language-Team: French (http://www.transifex.com/lfleischer/aurweb/language/fr/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: fr\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" +"Plural-Forms: nplurals=3; plural=(n == 0 || n == 1) ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n" #: html/404.php msgid "Page Not Found" @@ -1590,6 +1590,7 @@ msgid "%d pending request" msgid_plural "%d pending requests" msgstr[0] "%d requête en attente" msgstr[1] "%d requêtes en attente" +msgstr[2] "%d requêtes en attente" #: template/pkgbase_actions.php msgid "Adopt Package" @@ -1864,6 +1865,7 @@ msgid "%d package request found." msgid_plural "%d package requests found." msgstr[0] "%d paquet demandé trouvé." msgstr[1] "%d paquets demandés trouvés." +msgstr[2] "%d paquets demandés trouvés." #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1888,6 +1890,7 @@ msgid "~%d day left" msgid_plural "~%d days left" msgstr[0] "~%d jour restant" msgstr[1] "~%d jours restants" +msgstr[2] "~%d jours restants" #: template/pkgreq_results.php #, php-format @@ -1895,6 +1898,7 @@ msgid "~%d hour left" msgid_plural "~%d hours left" msgstr[0] "~%d heure restante" msgstr[1] "%d heures restantes" +msgstr[2] "%d heures restantes" #: template/pkgreq_results.php msgid "<1 hour left" @@ -2023,6 +2027,7 @@ msgid "%d package found." msgid_plural "%d packages found." msgstr[0] "%d paquet trouvé." msgstr[1] "%d paquets trouvés." +msgstr[2] "%d paquets trouvés." #: template/pkg_search_results.php msgid "Version" @@ -2319,7 +2324,7 @@ msgstr "" #: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "Erreur interne du serveur" #: templates/errors/500.html msgid "A fatal error has occurred." From 9385c14f77d18d28ade7e2fa681412133f3daea5 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:03 -0800 Subject: [PATCH 187/415] update-he translations --- po/he.po | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/po/he.po b/po/he.po index 936e93a1..88f2fddd 100644 --- a/po/he.po +++ b/po/he.po @@ -1,18 +1,18 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: -# GenghisKhan , 2016 +# gk , 2016 # Lukas Fleischer , 2011 # Yaron Shahrabani , 2016-2022 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Yaron Shahrabani , 2016-2022\n" "Language-Team: Hebrew (http://www.transifex.com/lfleischer/aurweb/language/he/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -2339,10 +2339,10 @@ msgstr "שגי×ת שרת ×”Ö¾AUR" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "הערות הסגירה התו×מות של בקשת החבילה…" #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "פעולה זו תסגור בקשות חבילות ממתינות שקשורות ×ליה. ×× %sתגובות%s מושמטות, תיווצר תגובת סגירה ×וטומטית." From b209cd962c25f0f51ea31625b7ede3784407c16c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:06 -0800 Subject: [PATCH 188/415] update-hi_IN translations --- po/hi_IN.po | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/po/hi_IN.po b/po/hi_IN.po index 114c9461..1ba83dae 100644 --- a/po/hi_IN.po +++ b/po/hi_IN.po @@ -1,16 +1,16 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: -# Panwar108 , 2018,2020-2021 +# Panwar108 , 2018,2020-2022 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Panwar108 , 2018,2020-2022\n" "Language-Team: Hindi (India) (http://www.transifex.com/lfleischer/aurweb/language/hi_IN/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -897,7 +897,7 @@ msgstr "अनà¥à¤šà¤¿à¤¤ उपयोकà¥à¤¤à¤¾ नाम या कूट #: lib/acctfuncs.inc.php msgid "An error occurred trying to generate a user session." -msgstr "उपयोकà¥à¤¤à¤¾ सतà¥à¤° बनाने हेतॠतà¥à¤°à¥à¤Ÿà¤¿à¥¤" +msgstr "उपयोकà¥à¤¤à¤¾ सतà¥à¤° बनाने समय तà¥à¤°à¥à¤Ÿà¤¿ हà¥à¤ˆà¥¤" #: lib/acctfuncs.inc.php msgid "Invalid e-mail and reset key combination." @@ -2308,29 +2308,29 @@ msgstr "%s सà¥à¤µà¥€à¤•ारनें हेतॠकोई निररॠ#: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "आंतरिक सरà¥à¤µà¤° तà¥à¤°à¥à¤Ÿà¤¿" #: templates/errors/500.html msgid "A fatal error has occurred." -msgstr "" +msgstr "गंभीर तà¥à¤°à¥à¤Ÿà¤¿ हà¥à¤ˆà¥¤" #: templates/errors/500.html msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "" +msgstr "संबंधित सूचना लॉग फाइल में दरà¥à¤œ की जा चà¥à¤•ी है à¤à¤µà¤‚ अतिशीघà¥à¤° ही पोसà¥à¤Ÿ पà¥à¤°à¤¬à¤‚धक दà¥à¤µà¤¾à¤°à¤¾ उसकी समीकà¥à¤·à¤¾ की जाà¤à¤—ी। इस कारण हà¥à¤ˆ किसी भी पà¥à¤°à¤•ार की असà¥à¤µà¤¿à¤§à¤¾ हेतॠखेद है।" #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "" +msgstr "AUR सरà¥à¤µà¤° तà¥à¤°à¥à¤Ÿà¤¿" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "पैकेज अनà¥à¤°à¥‹à¤§ समापन संबंधी टिपà¥à¤ªà¤£à¤¿à¤¯à¤¾à¤..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "इस कारà¥à¤¯ दà¥à¤µà¤¾à¤°à¤¾ संबंधित सभी लंबित पैकेज अनà¥à¤°à¥‹à¤§ बंद हो जाà¤à¤à¤—े। %sटिपà¥à¤ªà¤£à¤¿à¤¯à¤¾à¤%s न होने की सà¥à¤¥à¤¿à¤¤à¤¿ में à¤à¤• समापन टिपà¥à¤ªà¤£à¥€ का सà¥à¤µà¤¤à¤ƒ ही सृजन होगा।" From bf348fa5721dd79800d152477e3056d15ff3d0b0 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:09 -0800 Subject: [PATCH 189/415] update-hr translations --- po/hr.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/hr.po b/po/hr.po index fe1857c1..a0474e23 100644 --- a/po/hr.po +++ b/po/hr.po @@ -1,16 +1,16 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Lukas Fleischer , 2011\n" "Language-Team: Croatian (http://www.transifex.com/lfleischer/aurweb/language/hr/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 5f71e58db16e0f22db0261cf07741d35fd3b79e7 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:13 -0800 Subject: [PATCH 190/415] update-hu translations --- po/hu.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/hu.po b/po/hu.po index e6ebd451..7459a716 100644 --- a/po/hu.po +++ b/po/hu.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Balló György , 2013 # Balló György , 2011,2013-2016 @@ -11,10 +11,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: PB, 2020\n" "Language-Team: Hungarian (http://www.transifex.com/lfleischer/aurweb/language/hu/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 28e8b312110e917e72505bebc122be61d38a37ee Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:16 -0800 Subject: [PATCH 191/415] update-id translations --- po/id.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/id.po b/po/id.po index 103c47e6..96059ac9 100644 --- a/po/id.po +++ b/po/id.po @@ -1,17 +1,17 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # se7entime , 2013 # se7entime , 2016 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: se7entime , 2016\n" "Language-Team: Indonesian (http://www.transifex.com/lfleischer/aurweb/language/id/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 3a460faa6e97296cc8b308416c30bda68b13c016 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:19 -0800 Subject: [PATCH 192/415] update-id_ID translations --- po/id_ID.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/id_ID.po b/po/id_ID.po index c3acb167..f0612399 100644 --- a/po/id_ID.po +++ b/po/id_ID.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Indonesian (Indonesia) (http://www.transifex.com/lfleischer/aurweb/language/id_ID/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 0d950a0c9fe355f1ccb667181d6313da769f671d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:23 -0800 Subject: [PATCH 193/415] update-is translations --- po/is.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/is.po b/po/is.po index aee80ce5..0f3a3fcb 100644 --- a/po/is.po +++ b/po/is.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Icelandic (http://www.transifex.com/lfleischer/aurweb/language/is/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From a12dbd191a9d857f3474c3b8557cb3cd787fb603 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:26 -0800 Subject: [PATCH 194/415] update-it translations --- po/it.po | 132 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 69 insertions(+), 63 deletions(-) diff --git a/po/it.po b/po/it.po index f583cb2f..466d486a 100644 --- a/po/it.po +++ b/po/it.po @@ -1,26 +1,27 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Fanfurlio Farolfi , 2021-2022 -# Giovanni Scafora , 2011-2015 +# Giovanni Scafora , 2011-2015,2022 +# Giovanni Scafora , 2022 # Lorenzo Porta , 2014 # Lukas Fleischer , 2011 # mattia_b89 , 2019 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Giovanni Scafora , 2022\n" "Language-Team: Italian (http://www.transifex.com/lfleischer/aurweb/language/it/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: it\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"Plural-Forms: nplurals=3; plural=n == 1 ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n" #: html/404.php msgid "Page Not Found" @@ -41,12 +42,12 @@ msgstr "Le URL per clonare un repository git non sono visualizzabili nel browser #: html/404.php #, php-format msgid "To clone the Git repository of %s, run %s." -msgstr "Per clonare il reposiroty git di %s, esegui %s." +msgstr "Per clonare il repository git di %s, esegui %s." #: html/404.php #, php-format msgid "Click %shere%s to return to the %s details page." -msgstr "Clicca %squi%s per tornare alla pagina dei dettagli di %s." +msgstr "Clicca %squi%s per ritornare alla pagina dei dettagli di %s." #: html/503.php msgid "Service Unavailable" @@ -79,7 +80,7 @@ msgstr "Non hai i permessi necessari per modificare questo account." #: html/account.php lib/acctfuncs.inc.php msgid "Invalid password." -msgstr "Password non valida." +msgstr "La password non è valida." #: html/account.php msgid "Use this form to search existing accounts." @@ -169,7 +170,7 @@ msgstr "Edita il commento" #: html/home.php template/header.php msgid "Dashboard" -msgstr "Cruscotto" +msgstr "Pannello" #: html/home.php template/header.php msgid "Home" @@ -573,7 +574,7 @@ msgstr "Solo i TU e gli sviluppatori possono abbandonare i pacchetti." #: html/pkgflagcomment.php msgid "Flag Comment" -msgstr "Segnala Commento" +msgstr "Segnala il commento" #: html/pkgflag.php msgid "Flag Package Out-Of-Date" @@ -585,7 +586,7 @@ msgid "" " package version in the AUR does not match the most recent commit. Flagging " "this package should only be done if the sources moved or changes in the " "PKGBUILD are required because of recent upstream changes." -msgstr "Questo appare essere un pacchetto da VCS. Per favore %snon%s marcarlo come non aggiornato se la versione in AUR non corrisponde con il commit più recente, Questo pacchetto dovrebbe essere marcato solo se i sorgenti sono stati spostati o se sono necessari dei cambiamenti al PKGBUILD a causa delle recenti modifiche al sorgente." +msgstr "Sembra un pacchetto VCS. Fai %snon%s segnalarlo come non aggiornato, se la versione in AUR non corrisponde con il commit più recente. Questo pacchetto dovrebbe essere segnalato solo se i sorgenti vengono spostati o se sono necessarie delle modifiche al PKGBUILD a causa delle recenti modifiche al sorgente." #: html/pkgflag.php #, php-format @@ -699,12 +700,12 @@ msgstr "Usa questo modulo per creare un account." #: html/tos.php msgid "Terms of Service" -msgstr "Termini del Servizio" +msgstr "Termini di servizio" #: html/tos.php msgid "" "The following documents have been updated. Please review them carefully:" -msgstr "I seguenti documenti sono stati aggiornati. Per favore riesaminali correttamente:" +msgstr "I seguenti documenti sono stati aggiornati. Riesaminali attentamente:" #: html/tos.php #, php-format @@ -784,7 +785,7 @@ msgstr "Può contenere solo un punto, un trattino basso o un trattino." #: lib/acctfuncs.inc.php msgid "Please confirm your new password." -msgstr "Per favore conferma la tua nuova password." +msgstr "Conferma la tua nuova password." #: lib/acctfuncs.inc.php msgid "The email address is invalid." @@ -796,7 +797,7 @@ msgstr "L'indirizzo email di scorta non è valido." #: lib/acctfuncs.inc.php msgid "The home page is invalid, please specify the full HTTP(s) URL." -msgstr "La homepage non è valida, per favore specificare l'URL HTTP(s) completo." +msgstr "La homepage non è valida, specifica l'URL HTTP(s) completo." #: lib/acctfuncs.inc.php msgid "The PGP key fingerprint is invalid." @@ -816,7 +817,7 @@ msgstr "Lingua attualmente non supportata." #: lib/acctfuncs.inc.php msgid "Timezone is not currently supported." -msgstr "Fuso orario non attualmente supportato." +msgstr "Il fuso orario non è attualmente supportato." #: lib/acctfuncs.inc.php #, php-format @@ -835,15 +836,15 @@ msgstr "La chiave pubblica SSH %s%s%s, è già in uso." #: lib/acctfuncs.inc.php msgid "The CAPTCHA is missing." -msgstr "Manca la risposta CAPTCHA." +msgstr "Manca il CAPTCHA." #: lib/acctfuncs.inc.php msgid "This CAPTCHA has expired. Please try again." -msgstr "Il CAPTCHA è scaduto, Per favore riprova." +msgstr "Il CAPTCHA è scaduto. Riprova." #: lib/acctfuncs.inc.php msgid "The entered CAPTCHA answer is invalid." -msgstr "La risposta CAPTCHA inserita non è valida." +msgstr "Il CAPTCHA inserito non è valido." #: lib/acctfuncs.inc.php #, php-format @@ -885,7 +886,7 @@ msgstr "Account sospeso" #: aurweb/routers/accounts.py msgid "You do not have permission to suspend accounts." -msgstr "Non hai il permesso per sospendere account." +msgstr "Non hai il permesso per sospendere gli account." #: lib/acctfuncs.inc.php #, php-format @@ -946,7 +947,7 @@ msgstr "Manca l'ID del commento." #: lib/pkgbasefuncs.inc.php msgid "No more than 5 comments can be pinned." -msgstr "Non possono essere inseriti più di 5 commenti." +msgstr "Non possono essere evidenziati più di 5 commenti." #: lib/pkgbasefuncs.inc.php msgid "You are not allowed to pin this comment." @@ -958,11 +959,11 @@ msgstr "Non sei autorizzato a rimuovere questo commento." #: lib/pkgbasefuncs.inc.php msgid "Comment has been pinned." -msgstr "Il commento è stato rimosso." +msgstr "Il commento è ora in evidenza." #: lib/pkgbasefuncs.inc.php msgid "Comment has been unpinned." -msgstr "I commenti sono stati rimossi." +msgstr "I commenti non sono più in evidenza." #: lib/pkgbasefuncs.inc.php lib/pkgfuncs.inc.php msgid "Error retrieving package details." @@ -1296,7 +1297,7 @@ msgstr "Modifica l'account di quest'utente" #: template/account_details.php msgid "List this user's comments" -msgstr "Elenca i commenti di quest'utente" +msgstr "Elenca i commenti di questo utente" #: template/account_edit_form.php #, php-format @@ -1306,7 +1307,7 @@ msgstr "Clicca %squi%s se vuoi eliminare definitivamente questo account." #: template/account_edit_form.php #, php-format msgid "Click %shere%s for user details." -msgstr "Click %squì%s per il dettagli dell'utente." +msgstr "Click %squì%s per visualizzare i dettagli dell'utente." #: template/account_edit_form.php #, php-format @@ -1364,7 +1365,7 @@ msgstr "Indirizzo email di scorta" msgid "" "Optionally provide a secondary email address that can be used to restore " "your account in case you lose access to your primary email address." -msgstr "Puoi fornire un secondo indirizzo email che potrà essere usato per ripristinare il tuo account, nel caso tu perda l'accesso al tuo indirizzo email primario." +msgstr "Puoi fornire un secondo indirizzo email che potrà essere usato per ripristinare il tuo account, nel caso tu perdessi l'accesso al tuo indirizzo email primario." #: template/account_edit_form.php msgid "" @@ -1391,7 +1392,7 @@ msgstr "Fuso orario" msgid "" "If you want to change the password, enter a new password and confirm the new" " password by entering it again." -msgstr "Se vuoi cambiare la tua password, inseriscine una nuova e confermala inserendola di nuovo." +msgstr "Se vuoi cambiare la tua password, inseriscine una nuova e confermala digitandola di nuovo." #: template/account_edit_form.php msgid "Re-type password" @@ -1409,7 +1410,7 @@ msgstr "Chiave pubblica SSH" #: template/account_edit_form.php msgid "Notification settings" -msgstr "Impostazioni notifiche" +msgstr "Impostazioni delle notifiche" #: template/account_edit_form.php msgid "Notify of new comments" @@ -1421,11 +1422,11 @@ msgstr "Notifica degli aggiornamenti dei pacchetti" #: template/account_edit_form.php msgid "Notify of ownership changes" -msgstr "Notifica cambiamenti di proprietà" +msgstr "Notifica dei cambiamenti di proprietà" #: template/account_edit_form.php msgid "To confirm the profile changes, please enter your current password:" -msgstr "Per confermare le modifiche al profilo, per favore inserisci la tua password:" +msgstr "Per confermare le modifiche al profilo, inserisci la tua password:" #: template/account_edit_form.php msgid "Your current password" @@ -1499,7 +1500,7 @@ msgstr "Salva" #: template/flag_comment.php #, php-format msgid "Flagged Out-of-Date Comment: %s" -msgstr "Commento per la marcatura come Non Aggiornato: %s" +msgstr "Commento per la segnalazione come Non Aggiornato: %s" #: template/flag_comment.php #, php-format @@ -1513,7 +1514,7 @@ msgstr "%s%s%s non è segnalato come non aggiornato." #: template/flag_comment.php msgid "Return to Details" -msgstr "Ritorna ai Dettagli" +msgstr "Ritorna ai dettagli" #: template/footer.php #, php-format @@ -1583,6 +1584,7 @@ msgid "%d pending request" msgid_plural "%d pending requests" msgstr[0] "%d richiesta in attesa" msgstr[1] "%d richieste in attesa" +msgstr[2] "%d richieste in attesa" #: template/pkgbase_actions.php msgid "Adopt Package" @@ -1654,7 +1656,7 @@ msgstr "Aggiungi un commento" msgid "" "Git commit identifiers referencing commits in the AUR package repository and" " URLs are converted to links automatically." -msgstr "Gli identificatori dei commit Git nel repository dei pacchetti AUR e le URL vengono convertite automaticamente in link." +msgstr "Gli identificatori dei commit Git nel repository dei pacchetti AUR e le URL vengono convertiti automaticamente in link." #: template/pkg_comment_form.php #, php-format @@ -1663,7 +1665,7 @@ msgstr "La %ssintassi Markdown%s è parzialmente supportata." #: template/pkg_comments.php msgid "Pinned Comments" -msgstr "Elimina i commenti" +msgstr "Commenti in evidenza" #: template/pkg_comments.php msgid "Latest Comments" @@ -1676,7 +1678,7 @@ msgstr "Commenti per" #: template/pkg_comments.php #, php-format msgid "%s commented on %s" -msgstr "%s ha commentato su %s" +msgstr "%s ha commentato il %s" #: template/pkg_comments.php #, php-format @@ -1686,27 +1688,27 @@ msgstr "Commento anonimo su %s" #: template/pkg_comments.php #, php-format msgid "Commented on package %s on %s" -msgstr "Ha commentato sul pacchetto %s su %s" +msgstr "Ha commentato sul pacchetto %s il %s" #: template/pkg_comments.php #, php-format msgid "deleted on %s by %s" -msgstr "Eliminato su %s da %s" +msgstr "eliminato il %s da %s" #: template/pkg_comments.php #, php-format msgid "deleted on %s" -msgstr "cancellato su %s" +msgstr "eliminato il %s" #: template/pkg_comments.php #, php-format msgid "edited on %s by %s" -msgstr "modificato su %s da %s" +msgstr "modificato il %s da %s" #: template/pkg_comments.php #, php-format msgid "edited on %s" -msgstr "modificato su %s" +msgstr "modificato il %s" #: template/pkg_comments.php msgid "Undelete comment" @@ -1829,7 +1831,7 @@ msgid "" "By submitting a deletion request, you ask a Trusted User to delete the " "package base. This type of request should be used for duplicates, software " "abandoned by upstream, as well as illegal and irreparably broken packages." -msgstr "Inserendo una richiesta di cancellazione, stai chiedendo ad un Trusted User di cancellare il pacchetto base. Questo tipo di richiesta dovrebbe essere usato per duplicati, software abbandonati dall'autore, sotware illegalmente distribuiti o pacchetti irreparabili." +msgstr "Inserendo una richiesta di cancellazione, stai chiedendo ad un Trusted User di cancellare il pacchetto base. Questo tipo di richiesta dovrebbe essere usata per i duplicati, per software abbandonati dall'autore, per sotware illegalmente distribuiti oppure per pacchetti irreparabili." #: template/pkgreq_form.php msgid "" @@ -1845,7 +1847,7 @@ msgid "" "package base. Please only do this if the package needs maintainer action, " "the maintainer is MIA and you already tried to contact the maintainer " "previously." -msgstr "Inserendo una richiesta di abbandono, stai chiedendo ad un Trusted User di rimuovere la proprietà del pacchetto base. Per favore procedi soltanto se il pacchetto necessita di manutenzione, il manutentore attuale non risponde, e hai già provato a contattarlo precedentemente." +msgstr "Inserendo una richiesta di abbandono, stai chiedendo ad un Trusted User di rimuovere la proprietà del pacchetto base. Procedi soltanto se il pacchetto necessita di manutenzione, se il manutentore attuale non risponde e se hai già provato a contattarlo precedentemente." #: template/pkgreq_results.php msgid "No requests matched your search criteria." @@ -1857,6 +1859,7 @@ msgid "%d package request found." msgid_plural "%d package requests found." msgstr[0] "È stato trovato %d pacchetto." msgstr[1] "Sono stati trovati %d pacchetti." +msgstr[2] "Sono stati trovati %d pacchetti." #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1881,6 +1884,7 @@ msgid "~%d day left" msgid_plural "~%d days left" msgstr[0] "~%d giorno rimanente" msgstr[1] "~%d giorni rimanenti" +msgstr[2] "~%d giorni rimanenti" #: template/pkgreq_results.php #, php-format @@ -1888,6 +1892,7 @@ msgid "~%d hour left" msgid_plural "~%d hours left" msgstr[0] "~%d ora rimanente" msgstr[1] "~%d ore rimanenti" +msgstr[2] "~%d ore rimanenti" #: template/pkgreq_results.php msgid "<1 hour left" @@ -2016,6 +2021,7 @@ msgid "%d package found." msgid_plural "%d packages found." msgstr[0] "È stato trovato %d pacchetto." msgstr[1] "Sono stati trovati %d pacchetti." +msgstr[2] "Sono stati trovati %d pacchetti." #: template/pkg_search_results.php msgid "Version" @@ -2026,7 +2032,7 @@ msgstr "Versione" msgid "" "Popularity is calculated as the sum of all votes with each vote being " "weighted with a factor of %.2f per day since its creation." -msgstr "La popolarità è calcolata come somma di tutti i voti pesati con un fattore di %.2f al giorno, dalla sua creazione." +msgstr "La popolarità è calcolata come somma di tutti i voti ponderati con un fattore di %.2f al giorno dalla sua creazione." #: template/pkg_search_results.php template/tu_details.php #: template/tu_list.php @@ -2172,7 +2178,7 @@ msgstr "Precedente" #: scripts/notify.py msgid "AUR Password Reset" -msgstr "Ripristino Password di AUR" +msgstr "Ripristino della password di AUR" #: scripts/notify.py #, python-brace-format @@ -2180,18 +2186,18 @@ msgid "" "A password reset request was submitted for the account {user} associated " "with your email address. If you wish to reset your password follow the link " "[1] below, otherwise ignore this message and nothing will happen." -msgstr "È stata inviata una richiesta per ripristinare la password dell'account {user} associato al tuo indirizzo e-mail. Se desideri ripristinare la tua password, clicca sul link [1] sottostante, altrimenti ignora questo messaggio e non succederà nulla." +msgstr "È stata inviata una richiesta per ripristinare la password dell'account {user} associato al tuo indirizzo email. Se desideri ripristinare la tua password, clicca sul link [1] sottostante, altrimenti ignora questo messaggio e non succederà nulla." #: scripts/notify.py msgid "Welcome to the Arch User Repository" -msgstr "Benvenuto nel Arch User Repository" +msgstr "Benvenuto nell' Arch User Repository" #: scripts/notify.py msgid "" "Welcome to the Arch User Repository! In order to set an initial password for" " your new account, please click the link [1] below. If the link does not " "work, try copying and pasting it into your browser." -msgstr "Benvenuto nel Arch User Repository! Per impostare una password iniziale per il tuo nuovo account, per favore segui il collegamento [1] sottostante. Se il collegamento non funziona, prova a copiarlo e incollarlo nel tuo browser." +msgstr "Benvenuto nell' Arch User Repository! Per impostare una password iniziale per il tuo nuovo account, clicca sul link [1] sottostante. Se il link non funzionasse, prova a copiarlo e ad incollarlo nella barra degli indirizzi del tuo browser." #: scripts/notify.py #, python-brace-format @@ -2208,12 +2214,12 @@ msgstr "{user} [1] ha commentato su {pkgbase} [2]:" msgid "" "If you no longer wish to receive notifications about this package, please go" " to the package page [2] and select \"{label}\"." -msgstr "Se non vuoi più ricevere notifiche su questo pacchetto, per favore vai alla pagina del pacchetto [2] e seleziona \"{label}\"." +msgstr "Se non vuoi più ricevere notifiche su questo pacchetto, vai alla pagina del pacchetto [2] e seleziona \"{label}\"." #: scripts/notify.py #, python-brace-format msgid "AUR Package Update: {pkgbase}" -msgstr "Aggiornamento pachetto base: {pkgbase}" +msgstr "Aggiornamento del pacchetto base: {pkgbase}" #: scripts/notify.py #, python-brace-format @@ -2223,7 +2229,7 @@ msgstr "{user} [1] ha inviato un nuovo commit su {pkgbase} [2]." #: scripts/notify.py #, python-brace-format msgid "AUR Out-of-date Notification for {pkgbase}" -msgstr "Notifica AUR per pacchetto {pkgbase} non aggiornato" +msgstr "Notifica AUR per il pacchetto {pkgbase} non aggiornato" #: scripts/notify.py #, python-brace-format @@ -2233,7 +2239,7 @@ msgstr "Il tuo pacchetto {pkgbase} [1] è stato marcato come non aggiornato dall #: scripts/notify.py #, python-brace-format msgid "AUR Ownership Notification for {pkgbase}" -msgstr "Notifica AUR di proprietà per pacchetto {pkgbase} " +msgstr "Notifica AUR di proprietà del pacchetto {pkgbase} " #: scripts/notify.py #, python-brace-format @@ -2248,7 +2254,7 @@ msgstr "Il pacchetto {pkgbase} [1] è stato abbandonato da {user} [2]." #: scripts/notify.py #, python-brace-format msgid "AUR Co-Maintainer Notification for {pkgbase}" -msgstr "Notifica AUR di co-manutenzione per pacchetto {pkgbase} " +msgstr "Notifica AUR di co-manutenzione per il pacchetto {pkgbase} " #: scripts/notify.py #, python-brace-format @@ -2272,7 +2278,7 @@ msgid "" "\n" "-- \n" "If you no longer wish receive notifications about the new package, please go to [3] and click \"{label}\"." -msgstr "{user} [1] ha unito {old} [2] in {new} [3].\n\n-- \nSe non vuoi più ricevere notifiche sul nuovo pacchetto, per favore vai a [3] e clicca su \"{label}\"." +msgstr "{user} [1] ha unito {old} [2] in {new} [3].\n\n-- \nSe non desideri più ricevere notifiche sul nuovo pacchetto, vai a [3] e clicca su \"{label}\"." #: scripts/notify.py #, python-brace-format @@ -2280,7 +2286,7 @@ msgid "" "{user} [1] deleted {pkgbase} [2].\n" "\n" "You will no longer receive notifications about this package." -msgstr "{user} [1] ha eliminato {pkgbase} [2].\n\nNon riceverai più notifiche su questo pacchetto." +msgstr "{user} [1] ha eliminato {pkgbase} [2].\n\nNon riceverai più notifiche per questo pacchetto." #: scripts/notify.py #, python-brace-format @@ -2292,19 +2298,19 @@ msgstr "Promemoria per voto TU: Proposta {id}" msgid "" "Please remember to cast your vote on proposal {id} [1]. The voting period " "ends in less than 48 hours." -msgstr "Per favore ricordati di votare sulla proposta {id} [1]. La finestra di voto si chiude fra meno di 48 ore." +msgstr "Ricordati di votare la proposta di {id} [1]. La finestra di voto si chiude fra meno di 48 ore." #: aurweb/routers/accounts.py msgid "Invalid account type provided." -msgstr "Tipo di account non valido." +msgstr "L' account fornito non è valido." #: aurweb/routers/accounts.py msgid "You do not have permission to change account types." -msgstr "Non hai il permesso per cambiare il tipo di account." +msgstr "Non hai il permesso per modificare il tipo di account." #: aurweb/routers/accounts.py msgid "You do not have permission to change this user's account type to %s." -msgstr "Non hai il permesso per cambiare il tipo di account di questo utente in %s." +msgstr "Non hai il permesso per modificare il tipo di account di questo utente in %s." #: aurweb/packages/requests.py msgid "No due existing orphan requests to accept for %s." @@ -2322,19 +2328,19 @@ msgstr "Si è verificato un errore irreversibile." msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "I dettagli sono stati registrati e verranno visionati da postmaster velocemente. Ci scusiamo per l'inconvenienza che questo possa aver causato." +msgstr "I dettagli sono stati registrati e verranno visionati al più presto dal postmaster. Ci scusiamo per gli eventuali disagi causati." #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "Errore server AUR" +msgstr "Errore del server di AUR" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Commenti relativi alla richiesta di chiusura del pacchetto..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Questa azione chiuderà tutte le richieste in sospeso dei pacchetti ad essa correlate. Se %scommenti%s vengono omessi, verrà generato automaticamente un commento di chiusura." From 08af8cad8d2c085770633a11198e4acd7a2774f1 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:29 -0800 Subject: [PATCH 195/415] update-ja translations --- po/ja.po | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/po/ja.po b/po/ja.po index 280edb46..40349f28 100644 --- a/po/ja.po +++ b/po/ja.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # kusakata, 2013 # kusakata, 2013 @@ -10,10 +10,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: kusakata, 2013-2018,2020-2022\n" "Language-Team: Japanese (http://www.transifex.com/lfleischer/aurweb/language/ja/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -2325,10 +2325,10 @@ msgstr "AUR サーãƒãƒ¼ã‚¨ãƒ©ãƒ¼" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "関連ã™ã‚‹ãƒ‘ッケージリクエストã®å–り消ã—コメント..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "ã“ã®ã‚¢ã‚¯ã‚·ãƒ§ãƒ³ã¯é–¢é€£ã™ã‚‹ãƒ‘ッケージリクエストをã™ã¹ã¦å–り消ã—ã¾ã™ã€‚%sコメント%sã‚’çœç•¥ã—ãŸå ´åˆã€è‡ªå‹•çš„ã«ã‚³ãƒ¡ãƒ³ãƒˆãŒç”Ÿæˆã•れã¾ã™ã€‚" From e6d36101d9f26f7e71570bd02961b3ed3a21fa3c Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:33 -0800 Subject: [PATCH 196/415] update-ko translations --- po/ko.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/ko.po b/po/ko.po index 6da57759..a4c694c9 100644 --- a/po/ko.po +++ b/po/ko.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Korean (http://www.transifex.com/lfleischer/aurweb/language/ko/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From e5137e0c4297a82bfe420228a38465fb396a34eb Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:36 -0800 Subject: [PATCH 197/415] update-lt translations --- po/lt.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/lt.po b/po/lt.po index c9f55632..627fefd0 100644 --- a/po/lt.po +++ b/po/lt.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Lithuanian (http://www.transifex.com/lfleischer/aurweb/language/lt/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From d20dbbcf7419c8b76eb338384467172db4af9189 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:39 -0800 Subject: [PATCH 198/415] update-nb translations --- po/nb.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/nb.po b/po/nb.po index 307a80d6..b503de85 100644 --- a/po/nb.po +++ b/po/nb.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Alexander F. Rødseth , 2015,2017-2019 # Alexander F. Rødseth , 2011,2013-2014 @@ -12,10 +12,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Alexander F. Rødseth , 2015,2017-2019\n" "Language-Team: Norwegian BokmÃ¥l (http://www.transifex.com/lfleischer/aurweb/language/nb/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 57a2b4b516a43a33182399b8fdaa4473cfa91e6f Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:43 -0800 Subject: [PATCH 199/415] update-nb_NO translations --- po/nb_NO.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/nb_NO.po b/po/nb_NO.po index 5d958172..49d2eccf 100644 --- a/po/nb_NO.po +++ b/po/nb_NO.po @@ -1,17 +1,17 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Kim Nordmo , 2017,2019 # Lukas Fleischer , 2011 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Kim Nordmo , 2017,2019\n" "Language-Team: Norwegian BokmÃ¥l (Norway) (http://www.transifex.com/lfleischer/aurweb/language/nb_NO/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 05c6266986ac6652a1755a89f229427195a7305d Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:46 -0800 Subject: [PATCH 200/415] update-nl translations --- po/nl.po | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/po/nl.po b/po/nl.po index 54519d21..d23fe04a 100644 --- a/po/nl.po +++ b/po/nl.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Heimen Stoffels , 2021-2022 # Heimen Stoffels , 2015,2021 @@ -13,10 +13,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Heimen Stoffels , 2021-2022\n" "Language-Team: Dutch (http://www.transifex.com/lfleischer/aurweb/language/nl/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -2333,10 +2333,10 @@ msgstr "AUR-serverfout" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Gerelateerde pakketverzoekreacties…" #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Met deze actie sluit u elk gerelateerd openstaand verzoek. Als %s reacties%s genegeerd worden, dan wordt er een automatische afsluitreactie geplaatst." From e572b86fd3d2acf041c0882ba669ad6a5bcfac0f Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:50 -0800 Subject: [PATCH 201/415] update-pl translations --- po/pl.po | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/po/pl.po b/po/pl.po index 94a6fb67..97c7d730 100644 --- a/po/pl.po +++ b/po/pl.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # BartÅ‚omiej Piotrowski , 2011 # BartÅ‚omiej Piotrowski , 2014 @@ -13,16 +13,16 @@ # marcin mikoÅ‚ajczak , 2017 # Michal T , 2016 # Nuc1eoN , 2014 -# Piotr StrÄ™bski , 2017-2018 +# Piotr StrÄ™bski , 2017-2018,2022 # Piotr StrÄ™bski , 2013-2016 # Przemyslaw Ka. , 2021 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Piotr StrÄ™bski , 2017-2018,2022\n" "Language-Team: Polish (http://www.transifex.com/lfleischer/aurweb/language/pl/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -712,7 +712,7 @@ msgstr "Zasady korzystania" #: html/tos.php msgid "" "The following documents have been updated. Please review them carefully:" -msgstr "" +msgstr "Zaktualizowano nastÄ™pujÄ…ce dokumenty. Przejrzyj je dokÅ‚adnie:" #: html/tos.php #, php-format @@ -792,7 +792,7 @@ msgstr "Może zawierać tylko jednÄ… kropkÄ™, podkreÅ›lnik lub myÅ›lnik." #: lib/acctfuncs.inc.php msgid "Please confirm your new password." -msgstr "" +msgstr "Potwierdź nowe hasÅ‚o." #: lib/acctfuncs.inc.php msgid "The email address is invalid." @@ -800,7 +800,7 @@ msgstr "Adres e-mail jest nieprawidÅ‚owy." #: lib/acctfuncs.inc.php msgid "The backup email address is invalid." -msgstr "" +msgstr "Zapasowy adres e-mail jest nieprawidÅ‚owy." #: lib/acctfuncs.inc.php msgid "The home page is invalid, please specify the full HTTP(s) URL." @@ -824,7 +824,7 @@ msgstr "JÄ™zyk nie jest obecnie obsÅ‚ugiwany." #: lib/acctfuncs.inc.php msgid "Timezone is not currently supported." -msgstr "" +msgstr "Strefa czasowa nie jest obecnie obsÅ‚ugiwana." #: lib/acctfuncs.inc.php #, php-format From 6ee7598211d5358cf94bf4b8936f486b439add45 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:53 -0800 Subject: [PATCH 202/415] update-pt translations --- po/pt.po | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/po/pt.po b/po/pt.po index aed32031..05778859 100644 --- a/po/pt.po +++ b/po/pt.po @@ -1,22 +1,22 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Lukas Fleischer , 2011\n" "Language-Team: Portuguese (http://www.transifex.com/lfleischer/aurweb/language/pt/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: pt\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"Plural-Forms: nplurals=3; plural=(n == 0 || n == 1) ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n" #: html/404.php msgid "Page Not Found" @@ -1579,6 +1579,7 @@ msgid "%d pending request" msgid_plural "%d pending requests" msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkgbase_actions.php msgid "Adopt Package" @@ -1853,6 +1854,7 @@ msgid "%d package request found." msgid_plural "%d package requests found." msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1877,6 +1879,7 @@ msgid "~%d day left" msgid_plural "~%d days left" msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkgreq_results.php #, php-format @@ -1884,6 +1887,7 @@ msgid "~%d hour left" msgid_plural "~%d hours left" msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkgreq_results.php msgid "<1 hour left" @@ -2012,6 +2016,7 @@ msgid "%d package found." msgid_plural "%d packages found." msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkg_search_results.php msgid "Version" From bb00a4ecfde887741f1bab5b8f71e902e5fee252 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:09:56 -0800 Subject: [PATCH 203/415] update-pt_BR translations --- po/pt_BR.po | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/po/pt_BR.po b/po/pt_BR.po index d29a9448..6bc6a596 100644 --- a/po/pt_BR.po +++ b/po/pt_BR.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Albino Biasutti Neto Bino , 2011 # Fábio Nogueira , 2016 @@ -13,16 +13,16 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Rafael Fontenelle , 2011,2015-2018,2020-2022\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/lfleischer/aurweb/language/pt_BR/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: pt_BR\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" +"Plural-Forms: nplurals=3; plural=(n == 0 || n == 1) ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n" #: html/404.php msgid "Page Not Found" @@ -1585,6 +1585,7 @@ msgid "%d pending request" msgid_plural "%d pending requests" msgstr[0] "%d requisição pendente" msgstr[1] "%d requisições pendentes" +msgstr[2] "%d requisições pendentes" #: template/pkgbase_actions.php msgid "Adopt Package" @@ -1859,6 +1860,7 @@ msgid "%d package request found." msgid_plural "%d package requests found." msgstr[0] "%d requisição de pacote encontrada." msgstr[1] "%d requisições de pacotes encontradas." +msgstr[2] "%d requisições de pacotes encontradas." #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1883,6 +1885,7 @@ msgid "~%d day left" msgid_plural "~%d days left" msgstr[0] "~%d dia restante" msgstr[1] "~%d dias restantes" +msgstr[2] "~%d dias restantes" #: template/pkgreq_results.php #, php-format @@ -1890,6 +1893,7 @@ msgid "~%d hour left" msgid_plural "~%d hours left" msgstr[0] "~%d hora restante" msgstr[1] "~%d horas restantes" +msgstr[2] "~%d horas restantes" #: template/pkgreq_results.php msgid "<1 hour left" @@ -2018,6 +2022,7 @@ msgid "%d package found." msgid_plural "%d packages found." msgstr[0] "%d pacote encontrado." msgstr[1] "%d pacotes encontrados." +msgstr[2] "%d pacotes encontrados." #: template/pkg_search_results.php msgid "Version" @@ -2333,10 +2338,10 @@ msgstr "Erro do Servidor AUR" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Comentários relacionados ao fechamento de requisição de pacote..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Esta ação fechará todas as requisições de pacote pendentes relacionadas a ela. Se %sComentários%s for omitido, um comentário de encerramento será gerado automaticamente." From e7bcf2fc9786afcb761e98ec5d6cde0b6efa9396 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:00 -0800 Subject: [PATCH 204/415] update-pt_PT translations --- po/pt_PT.po | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/po/pt_PT.po b/po/pt_PT.po index 7f6ea67a..5d2ff7de 100644 --- a/po/pt_PT.po +++ b/po/pt_PT.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Christophe Silva , 2018 # Gaspar Santos , 2011 @@ -12,16 +12,16 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Christophe Silva , 2018\n" "Language-Team: Portuguese (Portugal) (http://www.transifex.com/lfleischer/aurweb/language/pt_PT/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: pt_PT\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"Plural-Forms: nplurals=3; plural=(n == 0 || n == 1) ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n" #: html/404.php msgid "Page Not Found" @@ -1584,6 +1584,7 @@ msgid "%d pending request" msgid_plural "%d pending requests" msgstr[0] "%d pedido por atender" msgstr[1] "%d pedidos por atender" +msgstr[2] "%d pedidos por atender" #: template/pkgbase_actions.php msgid "Adopt Package" @@ -1858,6 +1859,7 @@ msgid "%d package request found." msgid_plural "%d package requests found." msgstr[0] "%d pedido de pacote encontrado." msgstr[1] "%d pedidos de pacotes encontrados." +msgstr[2] "%d pedidos de pacotes encontrados." #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1882,6 +1884,7 @@ msgid "~%d day left" msgid_plural "~%d days left" msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkgreq_results.php #, php-format @@ -1889,6 +1892,7 @@ msgid "~%d hour left" msgid_plural "~%d hours left" msgstr[0] "" msgstr[1] "" +msgstr[2] "" #: template/pkgreq_results.php msgid "<1 hour left" @@ -2017,6 +2021,7 @@ msgid "%d package found." msgid_plural "%d packages found." msgstr[0] "%d pacote encontrado." msgstr[1] "%d pacotes encontrados." +msgstr[2] "%d pacotes encontrados." #: template/pkg_search_results.php msgid "Version" From fa20a3b5d81cd7554da6b1cd1ca52ddb76681b43 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:03 -0800 Subject: [PATCH 205/415] update-ro translations --- po/ro.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/ro.po b/po/ro.po index 4409b698..ecee97fd 100644 --- a/po/ro.po +++ b/po/ro.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Arthur ÈšiÈ›eică , 2013-2015 # Lukas Fleischer , 2011 @@ -9,10 +9,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Arthur ÈšiÈ›eică , 2013-2015\n" "Language-Team: Romanian (http://www.transifex.com/lfleischer/aurweb/language/ro/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From ebae0d43045de2f38a2b9e09d7e847b044fc05f9 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:06 -0800 Subject: [PATCH 206/415] update-ru translations --- po/ru.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/ru.po b/po/ru.po index 44f000dd..4a8a18f7 100644 --- a/po/ru.po +++ b/po/ru.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Alex , 2021 # Evgeniy Alekseev , 2014-2015 @@ -18,10 +18,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Kevin Morris , 2021\n" "Language-Team: Russian (http://www.transifex.com/lfleischer/aurweb/language/ru/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 8ee843b7b1f00e18b42f43984bf57b8d35dad695 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:10 -0800 Subject: [PATCH 207/415] update-sk translations --- po/sk.po | 62 ++++++++++++++++++++++++++++---------------------------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/po/sk.po b/po/sk.po index 853fc198..ca124981 100644 --- a/po/sk.po +++ b/po/sk.po @@ -1,18 +1,18 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # archetyp , 2013-2016 -# Jose Riha , 2018 +# Jose Riha , 2018,2022 # Matej Ľach , 2011 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Jose Riha , 2018,2022\n" "Language-Team: Slovak (http://www.transifex.com/lfleischer/aurweb/language/sk/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -77,7 +77,7 @@ msgstr "Nemáte potrebné oprávnenia, pre úpravu tohoto úÄtu. " #: html/account.php lib/acctfuncs.inc.php msgid "Invalid password." -msgstr "" +msgstr "Neplatné heslo." #: html/account.php msgid "Use this form to search existing accounts." @@ -167,7 +167,7 @@ msgstr "EditovaÅ¥ komentár" #: html/home.php template/header.php msgid "Dashboard" -msgstr "" +msgstr "Nástenka" #: html/home.php template/header.php msgid "Home" @@ -175,11 +175,11 @@ msgstr "Domov" #: html/home.php msgid "My Flagged Packages" -msgstr "" +msgstr "Moje oznaÄené balíÄky" #: html/home.php msgid "My Requests" -msgstr "" +msgstr "Moje požiadavky" #: html/home.php msgid "My Packages" @@ -187,15 +187,15 @@ msgstr "Moje balíÄky" #: html/home.php msgid "Search for packages I maintain" -msgstr "" +msgstr "HľadaÅ¥ balíÄky, ktoré spravujem" #: html/home.php msgid "Co-Maintained Packages" -msgstr "" +msgstr "SpoloÄne spravované balíÄky" #: html/home.php msgid "Search for packages I co-maintain" -msgstr "" +msgstr "HľadaÅ¥ balíÄky, v ktorých pôsobím ako spolupracovník" #: html/home.php #, php-format @@ -239,7 +239,7 @@ msgstr "Podpora" #: html/home.php msgid "Package Requests" -msgstr "Žiadosti ohľadom balíÄkov" +msgstr "Žiadosti týkajúce sa balíÄkov" #: html/home.php #, php-format @@ -325,7 +325,7 @@ msgid "" "our %sbug tracker%s. Use the tracker to report bugs in the AUR web interface" " %sonly%s. To report packaging bugs contact the package maintainer or leave " "a comment on the appropriate package page." -msgstr "Ak nájdete chybu vo webovom rozhradní AUR, poÅ¡lite prosím správu o chybe na náš %sbug tracker%s. Posielajte sem %slen%s chyby webového rozhrania AUR. Pre nahlásenie chýb balíÄkov kontaktujte správcu balíÄka alebo zanechate komentár na prísluÅ¡nej stránke balíÄka." +msgstr "Ak nájdete chybu vo webovom rozhraní AUR, poÅ¡lite prosím správu o chybe na náš %sbug tracker%s. Posielajte sem %slen%s chyby webového rozhrania AUR. Pre nahlásenie chýb balíÄkov kontaktujte správcu balíÄka alebo zanechate komentár na prísluÅ¡nej stránke balíÄka." #: html/home.php msgid "Package Search" @@ -374,7 +374,7 @@ msgstr "Zadajte prihlasovacie údaje" #: html/login.php msgid "User name or primary email address" -msgstr "" +msgstr "Meno používateľa alebo primárna e-mailová adresa" #: html/login.php template/account_delete.php template/account_edit_form.php msgid "Password" @@ -438,7 +438,7 @@ msgstr "Heslo bolo úspeÅ¡ne obnovené." #: html/passreset.php msgid "Confirm your user name or primary e-mail address:" -msgstr "" +msgstr "PotvrÄte vaÅ¡e meno používateľa alebo primárnu e-mailovú adresu:" #: html/passreset.php msgid "Enter your new password:" @@ -707,7 +707,7 @@ msgstr "" #: html/tos.php #, php-format msgid "revision %d" -msgstr "" +msgstr "revízia: %d" #: html/tos.php msgid "I accept the terms and conditions above." @@ -790,7 +790,7 @@ msgstr "E-mailová adresa nie je platná." #: lib/acctfuncs.inc.php msgid "The backup email address is invalid." -msgstr "" +msgstr "Záložná e-mailová adresa nie je platná." #: lib/acctfuncs.inc.php msgid "The home page is invalid, please specify the full HTTP(s) URL." @@ -1256,7 +1256,7 @@ msgstr "PGP otlaÄok kľúÄa" #: template/account_details.php template/account_search_results.php #: template/pkgreq_results.php msgid "Status" -msgstr "Status" +msgstr "Stav" #: template/account_details.php msgid "Inactive since" @@ -1520,7 +1520,7 @@ msgstr "Copyright %s 2004-%d aurweb Development Team." #: template/header.php msgid " My Account" -msgstr "Môj úÄet" +msgstr " Môj úÄet" #: template/pkgbase_actions.php msgid "Package Actions" @@ -1814,7 +1814,7 @@ msgstr "Typ žiadosti" #: template/pkgreq_form.php msgid "Deletion" -msgstr "Vymazanie" +msgstr "VymazaÅ¥" #: template/pkgreq_form.php msgid "Orphan" @@ -1855,10 +1855,10 @@ msgstr "" #, php-format msgid "%d package request found." msgid_plural "%d package requests found." -msgstr[0] "Bola nájdená %d požiadavka ohľadom balíÄkov." -msgstr[1] "Boli nájdené %d požiadavky ohľadom balíÄkov." -msgstr[2] "Bolo nájdených %d požiadaviek ohľadom balíÄkov." -msgstr[3] "Bolo nájdených %d požiadaviek ohľadom balíÄkov." +msgstr[0] "Bola nájdená %d požiadavka týkajúc sa balíÄka." +msgstr[1] "Boli nájdené %d požiadavky týkajúcich sa balíÄkov." +msgstr[2] "Bolo nájdených %d požiadaviek týkajúcich sa balíÄkov." +msgstr[3] "Bolo nájdených %d požiadaviek týkajúcich sa balíÄkov." #: template/pkgreq_results.php template/pkg_search_results.php #, php-format @@ -1986,7 +1986,7 @@ msgstr "VyhľadávaÅ¥ podľa" #: template/pkg_search_form.php template/stats/user_table.php msgid "Out of Date" -msgstr "Neaktuálny" +msgstr "Neaktuálne" #: template/pkg_search_form.php template/search_accounts_form.php msgid "Sort by" @@ -2216,7 +2216,7 @@ msgstr "" msgid "" "If you no longer wish to receive notifications about this package, please go" " to the package page [2] and select \"{label}\"." -msgstr "" +msgstr "Ak si už viac neželáte dostávaÅ¥ upozornenia na tento balíÄek, prejdite prosím na stránku balíÄku [2] a vyberte \"{label}\"." #: scripts/notify.py #, python-brace-format @@ -2280,7 +2280,7 @@ msgid "" "\n" "-- \n" "If you no longer wish receive notifications about the new package, please go to [3] and click \"{label}\"." -msgstr "" +msgstr "{user} [1] zlúÄil(a) {old} [2] do {new} [3].\n\n-- \nAk si už viac neželáte dostávaÅ¥ upozornenia na tento balíÄek, prejdite prosím na stránku balíÄku [2] a vyberte \"{label}\"." #: scripts/notify.py #, python-brace-format @@ -2288,7 +2288,7 @@ msgid "" "{user} [1] deleted {pkgbase} [2].\n" "\n" "You will no longer receive notifications about this package." -msgstr "" +msgstr "{user} [1] odstránil(a) {pkgbase} [2].\n\nUpozornenia na tento balíÄek už viac nebudete dostávaÅ¥." #: scripts/notify.py #, python-brace-format @@ -2339,10 +2339,10 @@ msgstr "" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Súvisiace komentáre k žiadosti o uzatvorenie balíÄka..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Táto operácia uzavrie vÅ¡etky súvisiace nevybavené žiadosti balíÄkov. Ak %sComments%s vynecháte, použije sa automaticky generovaný komentár." From 46c925bc82722c35c7a0d55c5135e4174c8ec94f Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:13 -0800 Subject: [PATCH 208/415] update-sr translations --- po/sr.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/sr.po b/po/sr.po index 426ce599..4054d7df 100644 --- a/po/sr.po +++ b/po/sr.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 # Slobodan Terzić , 2011-2012,2015-2017 @@ -9,10 +9,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Slobodan Terzić , 2011-2012,2015-2017\n" "Language-Team: Serbian (http://www.transifex.com/lfleischer/aurweb/language/sr/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 8592bada16bec50a167b5c81ede867d5c8bc7b43 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:17 -0800 Subject: [PATCH 209/415] update-sr_RS translations --- po/sr_RS.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/sr_RS.po b/po/sr_RS.po index b7560965..a924dc4c 100644 --- a/po/sr_RS.po +++ b/po/sr_RS.po @@ -1,16 +1,16 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Nikola Stojković , 2013 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Nikola Stojković , 2013\n" "Language-Team: Serbian (Serbia) (http://www.transifex.com/lfleischer/aurweb/language/sr_RS/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 5609ddf791192a1d4b2d9a37b4af6d68b78b2839 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:20 -0800 Subject: [PATCH 210/415] update-sv_SE translations --- po/sv_SE.po | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/po/sv_SE.po b/po/sv_SE.po index 4887fdde..6abb8452 100644 --- a/po/sv_SE.po +++ b/po/sv_SE.po @@ -1,7 +1,7 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Johannes Löthberg , 2015-2016 # Kevin Morris , 2022 From b36cbd526b7cd6203401f30e11a3f6715725b9b5 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:23 -0800 Subject: [PATCH 211/415] update-tr translations --- po/tr.po | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/po/tr.po b/po/tr.po index 559a0008..b36c04f4 100644 --- a/po/tr.po +++ b/po/tr.po @@ -1,11 +1,11 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # tarakbumba , 2011,2013-2015 # tarakbumba , 2012,2014 -# Demiray Muhterem , 2015,2020-2021 +# Demiray Muhterem , 2015,2020-2022 # Koray Biçer , 2020 # Lukas Fleischer , 2011 # Samed Beyribey , 2012 @@ -15,10 +15,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Demiray Muhterem , 2015,2020-2022\n" "Language-Team: Turkish (http://www.transifex.com/lfleischer/aurweb/language/tr/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -2316,29 +2316,29 @@ msgstr "%s için kabul edilecek sahipsize gereksinim yok." #: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "İç Sunucu Hatası" #: templates/errors/500.html msgid "A fatal error has occurred." -msgstr "" +msgstr "Önemli bir hata oluÅŸtu." #: templates/errors/500.html msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "" +msgstr "Ayrıntılar günlüğe kaydedildi ve posta yöneticisi tarafından gözden geçirilecek. Bunun neden olabileceÄŸi rahatsızlıktan dolayı özür dileriz." #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "" +msgstr "AUR Sunucu Hatası" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "İlgili paket isteÄŸi kapatma yorumları..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Bu eylem, kendisiyle ilgili bekleyen paket isteklerini kapatacaktır. %s Yorum %s atlanırsa, bir kapatma yorumu otomatik olarak oluÅŸturulur." From 4cff1e500bd3491947a90b4559d7eac40e1f24fc Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:27 -0800 Subject: [PATCH 212/415] update-uk translations --- po/uk.po | 110 +++++++++++++++++++++++++++---------------------------- 1 file changed, 55 insertions(+), 55 deletions(-) diff --git a/po/uk.po b/po/uk.po index 3bffe4f6..13f3ab90 100644 --- a/po/uk.po +++ b/po/uk.po @@ -1,21 +1,21 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Lukas Fleischer , 2011 # Rax Garfield , 2012 # Rax Garfield , 2012 # Vladislav Glinsky , 2019 -# Yarema aka Knedlyk , 2011-2018 +# Yarema aka Knedlyk , 2011-2018,2022 # Данило КороÑтіль , 2011 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Yarema aka Knedlyk , 2011-2018,2022\n" "Language-Team: Ukrainian (http://www.transifex.com/lfleischer/aurweb/language/uk/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -80,7 +80,7 @@ msgstr "У Ð²Ð°Ñ Ð½ÐµÐ´Ð¾Ñтатньо прав Ð´Ð»Ñ Ñ€ÐµÐ´Ð°Ð³ÑƒÐ²Ð°Ð½Ð½Ñ #: html/account.php lib/acctfuncs.inc.php msgid "Invalid password." -msgstr "" +msgstr "Ðеправильний пароль" #: html/account.php msgid "Use this form to search existing accounts." @@ -377,7 +377,7 @@ msgstr "Увійдіть, ввівши облікові дані." #: html/login.php msgid "User name or primary email address" -msgstr "" +msgstr "Ðазва кориÑтувача або адреÑа електронної пошти" #: html/login.php template/account_delete.php template/account_edit_form.php msgid "Password" @@ -441,7 +441,7 @@ msgstr "Ваш пароль уÑпішно Ñкинуто." #: html/passreset.php msgid "Confirm your user name or primary e-mail address:" -msgstr "" +msgstr "Підтвердити назву кориÑтувача або адреÑа електронної пошти:" #: html/passreset.php msgid "Enter your new password:" @@ -460,11 +460,11 @@ msgstr "Продовжити" msgid "" "If you have forgotten the user name and the primary e-mail address you used " "to register, please send a message to the %saur-general%s mailing list." -msgstr "" +msgstr "Якщо Ви забули назву кориÑтувача Ñ– адреÑу електронної пошти, викориÑтану при реєÑтрації, звернітьÑÑ Ð´Ð¾ ÑпиÑку розÑÐ¸Ð»Ð°Ð½Ð½Ñ %saur-general%s." #: html/passreset.php msgid "Enter your user name or your primary e-mail address:" -msgstr "" +msgstr "Введіть назву кориÑтувача або адреÑу електронної пошти:" #: html/pkgbase.php msgid "Package Bases" @@ -480,7 +480,7 @@ msgstr "Вибрані пакунки вÑе ще мають влаÑника, msgid "" "The selected packages have not been adopted, check the confirmation " "checkbox." -msgstr "" +msgstr "Обрані пакунки не прийнÑто, перевірте, чи поÑтавлено галочку в полі підтвердженнÑ." #: html/pkgbase.php lib/pkgreqfuncs.inc.php msgid "Cannot find package to merge votes and comments into." @@ -586,7 +586,7 @@ msgid "" " package version in the AUR does not match the most recent commit. Flagging " "this package should only be done if the sources moved or changes in the " "PKGBUILD are required because of recent upstream changes." -msgstr "" +msgstr "ЗдаєтьÑÑ, це пакет VCS. Будь лаÑка, %sне%s позначайте його Ñк заÑтарілий, Ñкщо верÑÑ–Ñ Ð¿Ð°ÐºÐµÑ‚Ð° в AUR не відповідає оÑтанньому коміту. Позначити цей пакунок Ñлід лише в тому випадку, Ñкщо джерела переміщено або потрібні зміни в PKGBUILD в зв'Ñзку з оÑтанніми змінами." #: html/pkgflag.php #, php-format @@ -785,7 +785,7 @@ msgstr "Може міÑтити тільки один період, підкре #: lib/acctfuncs.inc.php msgid "Please confirm your new password." -msgstr "" +msgstr "Підтвердіть новий пароль, будь лаÑка." #: lib/acctfuncs.inc.php msgid "The email address is invalid." @@ -793,7 +793,7 @@ msgstr "ÐдреÑа електронної пошти неправильна." #: lib/acctfuncs.inc.php msgid "The backup email address is invalid." -msgstr "" +msgstr "Ðеправильна адреÑа електронної пошти Ð´Ð»Ñ Ð²Ñ–Ð´Ð½Ð¾Ð²Ð»ÐµÐ½Ð½Ñ." #: lib/acctfuncs.inc.php msgid "The home page is invalid, please specify the full HTTP(s) URL." @@ -836,15 +836,15 @@ msgstr "Публічний ключ SSH, %s%s%s, вже викориÑтовує #: lib/acctfuncs.inc.php msgid "The CAPTCHA is missing." -msgstr "" +msgstr "Пропущено CAPTCHA." #: lib/acctfuncs.inc.php msgid "This CAPTCHA has expired. Please try again." -msgstr "" +msgstr "Термін дії цієї CAPTCHA закінчивÑÑ. Будь лаÑка, Ñпробуйте ще раз." #: lib/acctfuncs.inc.php msgid "The entered CAPTCHA answer is invalid." -msgstr "" +msgstr "Введена відповідь CAPTCHA недійÑна." #: lib/acctfuncs.inc.php #, php-format @@ -886,7 +886,7 @@ msgstr "Обліковий Ð·Ð°Ð¿Ð¸Ñ Ð²Ð¸Ð»ÑƒÑ‡ÐµÐ½Ð¾" #: aurweb/routers/accounts.py msgid "You do not have permission to suspend accounts." -msgstr "" +msgstr " \nВи не маєте дозволу на Ð¿Ñ€Ð¸Ð·ÑƒÐ¿Ð¸Ð½ÐµÐ½Ð½Ñ Ð¾Ð±Ð»Ñ–ÐºÐ¾Ð²Ð¸Ñ… запиÑів." #: lib/acctfuncs.inc.php #, php-format @@ -975,27 +975,27 @@ msgstr "Інформації про пакунок не знайдено." #: aurweb/routers/auth.py msgid "Bad Referer header." -msgstr "" +msgstr "Поганий заголовок Referer." #: aurweb/routers/packages.py msgid "You did not select any packages to be notified about." -msgstr "" +msgstr "Ви не вибрали жодних пакунків, про Ñкі потрібно Ñповіщати." #: aurweb/routers/packages.py msgid "The selected packages' notifications have been enabled." -msgstr "" +msgstr "Ð¡Ð¿Ð¾Ð²Ñ–Ñ‰ÐµÐ½Ð½Ñ Ð´Ð»Ñ Ð²Ð¸Ð±Ñ€Ð°Ð½Ð¸Ñ… пакунків увімкнено." #: aurweb/routers/packages.py msgid "You did not select any packages for notification removal." -msgstr "" +msgstr "Ви не вибрали жодних пакунків Ð´Ð»Ñ Ð²Ð¸Ð´Ð°Ð»ÐµÐ½Ð½Ñ Ñповіщень." #: aurweb/routers/packages.py msgid "A package you selected does not have notifications enabled." -msgstr "" +msgstr "У вибраному вами пакунку не ввімкнено ÑповіщеннÑ." #: aurweb/routers/packages.py msgid "The selected packages' notifications have been removed." -msgstr "" +msgstr "Ð¡Ð¿Ð¾Ð²Ñ–Ñ‰ÐµÐ½Ð½Ñ Ð´Ð»Ñ Ð²Ð¸Ð±Ñ€Ð°Ð½Ð¸Ñ… пакунків видалено." #: lib/pkgbasefuncs.inc.php msgid "You must be logged in before you can flag packages." @@ -1035,7 +1035,7 @@ msgstr "Ðе вибрано жодного пакунку Ð´Ð»Ñ Ð²Ð¸Ð»ÑƒÑ‡ÐµÐ½ #: aurweb/routers/packages.py msgid "One of the packages you selected does not exist." -msgstr "" +msgstr "Один із вибраних Вами пакунків не Ñ–Ñнує." #: lib/pkgbasefuncs.inc.php msgid "The selected packages have been deleted." @@ -1047,7 +1047,7 @@ msgstr "Ð”Ð»Ñ Ð¿ÐµÑ€ÐµÐ¹Ð½ÑÑ‚Ñ‚Ñ Ð¿Ð°ÐºÑƒÐ½ÐºÑ–Ð² Ñлід увійти." #: aurweb/routers/package.py msgid "You are not allowed to adopt one of the packages you selected." -msgstr "" +msgstr "У Ð’Ð°Ñ Ð½ÐµÐ¼Ð°Ñ” дозволу прийнÑти один з вибраних Вами пакунків." #: lib/pkgbasefuncs.inc.php msgid "You must be logged in before you can disown packages." @@ -1055,7 +1055,7 @@ msgstr "Ð”Ð»Ñ Ð·Ñ€ÐµÑ‡ÐµÐ½Ð½Ñ Ð¿Ð°ÐºÑƒÐ½ÐºÑ–Ð² Ñлід увійти." #: aurweb/routers/packages.py msgid "You are not allowed to disown one of the packages you selected." -msgstr "" +msgstr "У Ð’Ð°Ñ Ð½ÐµÐ¼Ð°Ñ” дозволу відмовитиÑÑ Ð²Ñ–Ð´ одного з вибраних Вами пакунків" #: lib/pkgbasefuncs.inc.php msgid "You did not select any packages to adopt." @@ -1297,7 +1297,7 @@ msgstr "Редагувати обліковий Ð·Ð°Ð¿Ð¸Ñ Ñ†ÑŒÐ¾Ð³Ð¾ ÐºÐ¾Ñ€Ð¸Ñ #: template/account_details.php msgid "List this user's comments" -msgstr "" +msgstr "Показати коментарі цього кориÑтувача" #: template/account_edit_form.php #, php-format @@ -1312,7 +1312,7 @@ msgstr "Клацніть %sтут%s, щоб дізнатиÑÑ Ð±Ñ–Ð»ÑŒÑˆÐµ пр #: template/account_edit_form.php #, php-format msgid "Click %shere%s to list the comments made by this account." -msgstr "" +msgstr "ÐатиÑніть %sтут%s, щоб показати коментарі, зроблені цим обліковим запиÑом." #: template/account_edit_form.php msgid "required" @@ -1355,30 +1355,30 @@ msgid "" "If you do not hide your email address, it is visible to all registered AUR " "users. If you hide your email address, it is visible to members of the Arch " "Linux staff only." -msgstr "" +msgstr "Якщо ви не приховаєте Ñвою адреÑу електронної пошти, тоді Ñ—Ñ— можуть бачити вÑÑ– зареєÑтровані кориÑтувачі AUR. Якщо Ви приховаєте Ñвою адреÑу електронної пошти, тоді Ñ—Ñ— зможуть бачити лише Ñпівробітники Arch Linux." #: template/account_edit_form.php msgid "Backup Email Address" -msgstr "" +msgstr "Резервна адреÑа електронної пошти" #: template/account_edit_form.php msgid "" "Optionally provide a secondary email address that can be used to restore " "your account in case you lose access to your primary email address." -msgstr "" +msgstr "За бажаннÑм вкажіть додаткову адреÑу електронної пошти, Ñку можна викориÑтовувати Ð´Ð»Ñ Ð²Ñ–Ð´Ð½Ð¾Ð²Ð»ÐµÐ½Ð½Ñ Ð¾Ð±Ð»Ñ–ÐºÐ¾Ð²Ð¾Ð³Ð¾ запиÑу на випадок втрати доÑтупу до Ñвоєї оÑновної електронної адреÑи." #: template/account_edit_form.php msgid "" "Password reset links are always sent to both your primary and your backup " "email address." -msgstr "" +msgstr "ПоÑÐ¸Ð»Ð°Ð½Ð½Ñ Ð´Ð»Ñ ÑÐºÐ¸Ð´Ð°Ð½Ð½Ñ Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð·Ð°Ð²Ð¶Ð´Ð¸ надÑилаютьÑÑ Ñк на вашу оÑновну, так Ñ– на резервну адреÑу електронної пошти." #: template/account_edit_form.php #, php-format msgid "" "Your backup email address is always only visible to members of the Arch " "Linux staff, independent of the %s setting." -msgstr "" +msgstr "Вашу резервну електронну адреÑу завжди бачать лише Ñпівробітники Arch Linux, незалежно від налаштувань. %s ." #: template/account_edit_form.php msgid "Language" @@ -1392,7 +1392,7 @@ msgstr "ЧаÑова зона" msgid "" "If you want to change the password, enter a new password and confirm the new" " password by entering it again." -msgstr "" +msgstr "Якщо Ви бажаєте змінити пароль, введіть новий пароль Ñ– підтвердьте новий пароль, ввівши його ще раз." #: template/account_edit_form.php msgid "Re-type password" @@ -1426,21 +1426,21 @@ msgstr "Ð¡Ð¿Ð¾Ð²Ñ–Ñ‰ÐµÐ½Ð½Ñ Ð¿Ñ€Ð¾ зміну влаÑника" #: template/account_edit_form.php msgid "To confirm the profile changes, please enter your current password:" -msgstr "" +msgstr "Щоб підтвердити зміни профілю, введіть поточний пароль:" #: template/account_edit_form.php msgid "Your current password" -msgstr "" +msgstr "Ваш поточний пароль" #: template/account_edit_form.php msgid "" "To protect the AUR against automated account creation, we kindly ask you to " "provide the output of the following command:" -msgstr "" +msgstr "Щоб захиÑтити AUR від автоматичного ÑÑ‚Ð²Ð¾Ñ€ÐµÐ½Ð½Ñ Ð¾Ð±Ð»Ñ–ÐºÐ¾Ð²Ð¾Ð³Ð¾ запиÑу, ми проÑимо Ð’Ð°Ñ Ð½Ð°Ð´Ð°Ñ‚Ð¸ результат такої команди:" #: template/account_edit_form.php msgid "Answer" -msgstr "" +msgstr "Відповідь" #: template/account_edit_form.php template/pkgbase_details.php #: template/pkg_details.php @@ -1605,7 +1605,7 @@ msgstr "тільки Ð´Ð»Ñ Ñ‡Ð¸Ñ‚Ð°Ð½Ð½Ñ" #: template/pkgbase_details.php template/pkg_details.php msgid "click to copy" -msgstr "" +msgstr "натиÑніть, щоб Ñкопіювати" #: template/pkgbase_details.php template/pkg_details.php #: template/pkg_search_form.php @@ -1657,12 +1657,12 @@ msgstr "Додати коментар" msgid "" "Git commit identifiers referencing commits in the AUR package repository and" " URLs are converted to links automatically." -msgstr "" +msgstr "Відповідні відÑилачі комітів до ідентифікаторів комітів Git в Ñховищі пакунків AUR та URL-адреÑи автоматично перетворюютьÑÑ Ð½Ð° поÑиланнÑ." #: template/pkg_comment_form.php #, php-format msgid "%sMarkdown syntax%s is partially supported." -msgstr "" +msgstr "%sÐ¡Ð¸Ð½Ñ‚Ð°ÐºÑ Markdown%s підтримуєтьÑÑ Ñ‡Ð°Ñтково." #: template/pkg_comments.php msgid "Pinned Comments" @@ -1674,7 +1674,7 @@ msgstr "ОÑтанні коментарі" #: template/pkg_comments.php msgid "Comments for" -msgstr "" +msgstr "Коментарі длÑ" #: template/pkg_comments.php #, php-format @@ -1689,7 +1689,7 @@ msgstr "Ðнонімний коментар про %s" #: template/pkg_comments.php #, php-format msgid "Commented on package %s on %s" -msgstr "" +msgstr "Коментовано пакунок %s з датою %s" #: template/pkg_comments.php #, php-format @@ -2283,7 +2283,7 @@ msgid "" "\n" "-- \n" "If you no longer wish receive notifications about the new package, please go to [3] and click \"{label}\"." -msgstr "" +msgstr "{user} [1] з'єднав {old} [2] до {new} [3].\n\n-- \nЯкщо Ви не бажаєте більше отримувати ÑÐ¿Ð¾Ð²Ñ–Ñ‰ÐµÐ½Ð½Ñ Ð¿Ñ€Ð¾ новий пакунок, перейдіть на Ñторінку [3] Ñ– натиÑніть \"{label}\"." #: scripts/notify.py #, python-brace-format @@ -2307,45 +2307,45 @@ msgstr "Будь лаÑка, не забудьте подати Ñвій гол #: aurweb/routers/accounts.py msgid "Invalid account type provided." -msgstr "" +msgstr "Вказано недійÑний тип облікового запиÑу." #: aurweb/routers/accounts.py msgid "You do not have permission to change account types." -msgstr "" +msgstr "У Ð’Ð°Ñ Ð½ÐµÐ¼Ð°Ñ” дозволу змінювати типи облікових запиÑів." #: aurweb/routers/accounts.py msgid "You do not have permission to change this user's account type to %s." -msgstr "" +msgstr "Ви не маєте дозволу змінити тип облікового запиÑу цього кориÑтувача на %s." #: aurweb/packages/requests.py msgid "No due existing orphan requests to accept for %s." -msgstr "" +msgstr "Ðемає наÑвних запитів на прийнÑÑ‚Ñ‚Ñ Ð´Ð»Ñ %s." #: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "Ð’Ð½ÑƒÑ‚Ñ€Ñ–ÑˆÐ½Ñ Ð¿Ð¾Ð¼Ð¸Ð»ÐºÐ° Ñервера" #: templates/errors/500.html msgid "A fatal error has occurred." -msgstr "" +msgstr "СталаÑÑ Ñ„Ð°Ñ‚Ð°Ð»ÑŒÐ½Ð° помилка." #: templates/errors/500.html msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "" +msgstr "Подробиці зареєÑтровані та будуть переглÑнуті поштмейÑтером posthaste. ПроÑимо Ð²Ð¸Ð±Ð°Ñ‡ÐµÐ½Ð½Ñ Ð·Ð° можливі незручноÑті." #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "" +msgstr "Помилка Ñервера AUR" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "Пов'Ñзані коментарі щодо Ð·Ð°ÐºÑ€Ð¸Ñ‚Ñ‚Ñ Ð·Ð°Ð¿Ð¸Ñ‚Ñƒ на пакунок..." #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "Ð¦Ñ Ð´Ñ–Ñ Ð·Ð°ÐºÑ€Ð¸Ñ” вÑÑ– запити на пакет, що очікують на розглÑд. Якщо %sКоментарі%s пропущено, тоді буде автоматично згенеровано коментар закриттÑ." From 2770952dfbaf2bf819d3670e885990f73da35078 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:30 -0800 Subject: [PATCH 213/415] update-vi translations --- po/vi.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/vi.po b/po/vi.po index 87f7faac..a71c9ed5 100644 --- a/po/vi.po +++ b/po/vi.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Vietnamese (http://www.transifex.com/lfleischer/aurweb/language/vi/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From ef0e3b9f357a34577eeeb49bd32162ff12f8af62 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:33 -0800 Subject: [PATCH 214/415] update-zh translations --- po/zh.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/po/zh.po b/po/zh.po index c932df9c..77f31fe4 100644 --- a/po/zh.po +++ b/po/zh.po @@ -1,15 +1,15 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: FULL NAME \n" "Language-Team: Chinese (http://www.transifex.com/lfleischer/aurweb/language/zh/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" From 65d364fe9066e19f2a0c1dbad50642e9ed680096 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:36 -0800 Subject: [PATCH 215/415] update-zh_CN translations --- po/zh_CN.po | 93 +++++++++++++++++++++++++++-------------------------- 1 file changed, 47 insertions(+), 46 deletions(-) diff --git a/po/zh_CN.po b/po/zh_CN.po index 675d15a3..a61781fb 100644 --- a/po/zh_CN.po +++ b/po/zh_CN.po @@ -1,13 +1,14 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # Feng Chao , 2015-2016 # dongfengweixiao , 2015 # dongfengweixiao , 2015 # Felix Yan , 2014,2021 # Feng Chao , 2012,2021 +# lakejason0 , 2022 # Lukas Fleischer , 2011 # pingplug , 2017-2018 # Feng Chao , 2012 @@ -17,10 +18,10 @@ msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: lakejason0 , 2022\n" "Language-Team: Chinese (China) (http://www.transifex.com/lfleischer/aurweb/language/zh_CN/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -42,7 +43,7 @@ msgstr "æç¤º" #: html/404.php msgid "Git clone URLs are not meant to be opened in a browser." -msgstr "Git clone URLs 并䏿„味ç€èƒ½è¢«æµè§ˆå™¨æ‰“开。" +msgstr "Git clone URL å¹¶ä¸åº”该使用æµè§ˆå™¨æ‰“开。" #: html/404.php #, php-format @@ -65,11 +66,11 @@ msgstr "åˆ«æ…Œï¼æœ¬ç«™æ­£åœ¨ç»´æŠ¤ä¸­ï¼Œä¸ä¹…åŽå°†æ¢å¤ã€‚" #: html/account.php msgid "Account" -msgstr "叿ˆ·" +msgstr "账户" #: html/account.php template/header.php msgid "Accounts" -msgstr "叿ˆ·" +msgstr "账户" #: html/account.php html/addvote.php msgid "You are not allowed to access this area." @@ -81,7 +82,7 @@ msgstr "æ— æ³•èŽ·å–æŒ‡å®šç”¨æˆ·çš„ä¿¡æ¯ã€‚" #: html/account.php msgid "You do not have permission to edit this account." -msgstr "您没有æƒé™ç¼–è¾‘æ­¤å¸æˆ·ã€‚" +msgstr "您没有æƒé™ç¼–辑此账户。" #: html/account.php lib/acctfuncs.inc.php msgid "Invalid password." @@ -89,11 +90,11 @@ msgstr "å¯†ç æ— æ•ˆã€‚" #: html/account.php msgid "Use this form to search existing accounts." -msgstr "ä½¿ç”¨æ­¤è¡¨å•æŸ¥æ‰¾å­˜åœ¨çš„叿ˆ·ã€‚" +msgstr "ä½¿ç”¨æ­¤è¡¨å•æŸ¥æ‰¾å­˜åœ¨çš„账户。" #: html/account.php msgid "You must log in to view user information." -msgstr "您需è¦ç™»å½•åŽæ‰èƒ½å¯Ÿçœ‹ç”¨æˆ·ä¿¡æ¯ã€‚" +msgstr "您需è¦ç™»å½•åŽæ‰èƒ½æŸ¥çœ‹ç”¨æˆ·ä¿¡æ¯ã€‚" #: html/addvote.php template/tu_list.php msgid "Add Proposal" @@ -485,7 +486,7 @@ msgstr "选中的软件包未被弃置,请检查确认å¤é€‰æ¡†ã€‚" msgid "" "The selected packages have not been adopted, check the confirmation " "checkbox." -msgstr "" +msgstr "选中的软件包未被接管,请检查确认å¤é€‰æ¡†ã€‚" #: html/pkgbase.php lib/pkgreqfuncs.inc.php msgid "Cannot find package to merge votes and comments into." @@ -591,7 +592,7 @@ msgid "" " package version in the AUR does not match the most recent commit. Flagging " "this package should only be done if the sources moved or changes in the " "PKGBUILD are required because of recent upstream changes." -msgstr "" +msgstr "这似乎是 VCS 软件包。请%sä¸è¦%s因为 AUR 中的软件包版本与最新的 commit ä¸åŒ¹é…å°±å°†å…¶æ ‡è®°ä¸ºè¿‡æœŸã€‚ä»…å½“æ¥æºç§»åŠ¨æˆ–ç”±äºŽæœ€æ–°ä¸Šæ¸¸æ›´æ”¹éœ€è¦æ›´æ”¹ PKGBUILD æ—¶æ‰æ ‡è®°æ­¤è½¯ä»¶åŒ…。" #: html/pkgflag.php #, php-format @@ -701,7 +702,7 @@ msgstr "注册" #: html/register.php msgid "Use this form to create an account." -msgstr "使用此表å•创建å¸å·ã€‚" +msgstr "使用此表å•创建账户。" #: html/tos.php msgid "Terms of Service" @@ -854,12 +855,12 @@ msgstr "输入的验è¯ç æ— æ•ˆã€‚" #: lib/acctfuncs.inc.php #, php-format msgid "Error trying to create account, %s%s%s." -msgstr "å°è¯•åˆ›å»ºå¸æˆ· %s%s%s 失败。" +msgstr "å°è¯•创建账户 %s%s%s 失败。" #: lib/acctfuncs.inc.php #, php-format msgid "The account, %s%s%s, has been successfully created." -msgstr "叿ˆ· %s%s%s 创建æˆåŠŸã€‚" +msgstr "账户 %s%s%s 创建æˆåŠŸã€‚" #: lib/acctfuncs.inc.php msgid "A password reset key has been sent to your e-mail address." @@ -867,7 +868,7 @@ msgstr "密ç é‡ç½®å¯†é’¥å·²ç»å‘é€åˆ°æ‚¨çš„邮箱。" #: lib/acctfuncs.inc.php msgid "Click on the Login link above to use your account." -msgstr "点击上方的登录链接以使用您的å¸å·ã€‚" +msgstr "点击上方的登录链接以使用您的账户。" #: lib/acctfuncs.inc.php #, php-format @@ -877,7 +878,7 @@ msgstr "账户 %s%s%s 没有被修改。" #: lib/acctfuncs.inc.php #, php-format msgid "The account, %s%s%s, has been successfully modified." -msgstr "å¸å· %s%s%s 已被æˆåŠŸä¿®æ”¹ã€‚" +msgstr "账户 %s%s%s 已被æˆåŠŸä¿®æ”¹ã€‚" #: lib/acctfuncs.inc.php msgid "" @@ -887,11 +888,11 @@ msgstr "登录表å•ç›®å‰å¯¹æ‚¨æ‰€ä½¿ç”¨çš„ IP 地å€ç¦ç”¨ï¼ŒåŽŸå› å¯èƒ½æ˜¯ #: lib/acctfuncs.inc.php msgid "Account suspended" -msgstr "å¸å·è¢«åœç”¨" +msgstr "账户被åœç”¨" #: aurweb/routers/accounts.py msgid "You do not have permission to suspend accounts." -msgstr "" +msgstr "您没有æƒé™åœç”¨æ­¤è´¦æˆ·ã€‚" #: lib/acctfuncs.inc.php #, php-format @@ -980,27 +981,27 @@ msgstr "无法找到软件包的详细信æ¯ã€‚" #: aurweb/routers/auth.py msgid "Bad Referer header." -msgstr "" +msgstr "错误的 Referer 消æ¯å¤´ã€‚" #: aurweb/routers/packages.py msgid "You did not select any packages to be notified about." -msgstr "" +msgstr "æ‚¨æ²¡æœ‰é€‰æ‹©è¦æŽ¥å—通知的软件包。" #: aurweb/routers/packages.py msgid "The selected packages' notifications have been enabled." -msgstr "" +msgstr "选中的软件包的通知已被å¯ç”¨ã€‚" #: aurweb/routers/packages.py msgid "You did not select any packages for notification removal." -msgstr "" +msgstr "您没有选择è¦ç§»é™¤é€šçŸ¥çš„软件包。" #: aurweb/routers/packages.py msgid "A package you selected does not have notifications enabled." -msgstr "" +msgstr "所选中的软件包并没有å¯ç”¨é€šçŸ¥ã€‚" #: aurweb/routers/packages.py msgid "The selected packages' notifications have been removed." -msgstr "" +msgstr "选中的软件包的通知已被移除。" #: lib/pkgbasefuncs.inc.php msgid "You must be logged in before you can flag packages." @@ -1040,7 +1041,7 @@ msgstr "您没有选择è¦åˆ é™¤çš„软件包。" #: aurweb/routers/packages.py msgid "One of the packages you selected does not exist." -msgstr "" +msgstr "选中的其中一个软件包ä¸å­˜åœ¨ã€‚" #: lib/pkgbasefuncs.inc.php msgid "The selected packages have been deleted." @@ -1052,7 +1053,7 @@ msgstr "您需è¦ç™»å½•åŽæ‰èƒ½æŽ¥ç®¡è½¯ä»¶åŒ…。" #: aurweb/routers/package.py msgid "You are not allowed to adopt one of the packages you selected." -msgstr "" +msgstr "您ä¸è¢«å…许接管选中的其中一个软件包。" #: lib/pkgbasefuncs.inc.php msgid "You must be logged in before you can disown packages." @@ -1060,7 +1061,7 @@ msgstr "您需è¦ç™»å½•åŽæ‰èƒ½å¼ƒç½®è½¯ä»¶åŒ…。" #: aurweb/routers/packages.py msgid "You are not allowed to disown one of the packages you selected." -msgstr "" +msgstr "您ä¸è¢«å…许弃置选中的其中一个软件包。" #: lib/pkgbasefuncs.inc.php msgid "You did not select any packages to adopt." @@ -1197,7 +1198,7 @@ msgstr "请求关闭æˆåŠŸã€‚" #: template/account_delete.php #, php-format msgid "You can use this form to permanently delete the AUR account %s." -msgstr "您å¯ä»¥ä½¿ç”¨è¿™ä¸ªè¡¨å•永久删除 AUR å¸å· %s。" +msgstr "您å¯ä»¥ä½¿ç”¨è¿™ä¸ªè¡¨å•永久删除 AUR è´¦å· %s。" #: template/account_delete.php #, php-format @@ -1216,7 +1217,7 @@ msgstr "用户å" #: template/account_details.php template/account_edit_form.php #: template/search_accounts_form.php msgid "Account Type" -msgstr "叿ˆ·ç±»åˆ«" +msgstr "账户类别" #: template/account_details.php template/tu_details.php #: template/tu_last_votes_list.php template/tu_list.php @@ -1298,7 +1299,7 @@ msgstr "查看这个用户æäº¤çš„软件包" #: template/account_details.php msgid "Edit this user's account" -msgstr "编辑此用户的å¸å·" +msgstr "编辑此用户的账户" #: template/account_details.php msgid "List this user's comments" @@ -1307,7 +1308,7 @@ msgstr "显示此用户的评论" #: template/account_edit_form.php #, php-format msgid "Click %shere%s if you want to permanently delete this account." -msgstr "如果你想永久删除这个å¸å·ï¼Œè¯·ç‚¹å‡» %s这里%s。" +msgstr "如果你想永久删除这个账户,请点击 %s这里%s。" #: template/account_edit_form.php #, php-format @@ -1339,7 +1340,7 @@ msgstr "å—信用户" #: template/account_edit_form.php template/search_accounts_form.php msgid "Account Suspended" -msgstr "叿ˆ·è¢«æš‚åœ" +msgstr "账户被åœç”¨" #: template/account_edit_form.php msgid "Inactive" @@ -1370,7 +1371,7 @@ msgstr "备用邮件地å€" msgid "" "Optionally provide a secondary email address that can be used to restore " "your account in case you lose access to your primary email address." -msgstr "选择性的æä¾›çš„备用的邮件地å€ã€‚该邮件地å€å°†åœ¨ä½ çš„主è¦é‚®ä»¶åœ°å€ä¸å¯ç”¨æ—¶ç”¨äºŽæ¢å¤ä½ çš„å¸å·ã€‚" +msgstr "选择性的æä¾›çš„备用的邮件地å€ã€‚该邮件地å€å°†åœ¨ä½ çš„主è¦é‚®ä»¶åœ°å€ä¸å¯ç”¨æ—¶ç”¨äºŽæ¢å¤ä½ çš„账户。" #: template/account_edit_form.php msgid "" @@ -1466,7 +1467,7 @@ msgstr "æ²¡æœ‰ç»“æžœç¬¦åˆæ‚¨çš„æœç´¢æ¡ä»¶ã€‚" #: template/account_search_results.php msgid "Edit Account" -msgstr "ç¼–è¾‘å¸æˆ·" +msgstr "编辑账户" #: template/account_search_results.php msgid "Suspended" @@ -1528,7 +1529,7 @@ msgstr "ç‰ˆæƒæ‰€æœ‰ %s 2004-%d aurweb å¼€å‘组。" #: template/header.php msgid " My Account" -msgstr " æˆ‘çš„å¸æˆ·" +msgstr " 我的账户" #: template/pkgbase_actions.php msgid "Package Actions" @@ -2297,45 +2298,45 @@ msgstr "è¯·è®°å¾—ä¸ºææ¡ˆ {id} [1] 投票,投票时段将于48å°æ—¶å†…ç»“æŸ #: aurweb/routers/accounts.py msgid "Invalid account type provided." -msgstr "" +msgstr "æä¾›çš„账户类别无效。" #: aurweb/routers/accounts.py msgid "You do not have permission to change account types." -msgstr "" +msgstr "您没有æƒé™æ›´æ”¹è´¦æˆ·ç±»åˆ«ã€‚" #: aurweb/routers/accounts.py msgid "You do not have permission to change this user's account type to %s." -msgstr "" +msgstr "您没有æƒé™å°†æ­¤ç”¨æˆ·çš„账户类别更改为%s。" #: aurweb/packages/requests.py msgid "No due existing orphan requests to accept for %s." -msgstr "" +msgstr "没有为 %s 接å—的现有孤立请求。" #: aurweb/asgi.py msgid "Internal Server Error" -msgstr "" +msgstr "内部æœåŠ¡å™¨é”™è¯¯" #: templates/errors/500.html msgid "A fatal error has occurred." -msgstr "" +msgstr "å‘生了严é‡çš„错误。" #: templates/errors/500.html msgid "" "Details have been logged and will be reviewed by the postmaster posthaste. " "We apologize for any inconvenience this may have caused." -msgstr "" +msgstr "详细信æ¯å·²è¢«è®°å½•,并会交由 Postmaster 尽快调查。对您造æˆçš„ä¸ä¾¿ï¼Œæˆ‘们深感抱歉。" #: aurweb/scripts/notify.py msgid "AUR Server Error" -msgstr "" +msgstr "AUR æœåŠ¡å™¨é”™è¯¯" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "相关软件包请求关闭评论…" #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "æ­¤æ“作将关闭任何有关的未处ç†çš„软件包请求。若çœç•¥%s评论%s,将会自动生æˆå…³é—­è¯„论。" From 154bb239bfb047ca6a6bc0ab244835570f6d14f5 Mon Sep 17 00:00:00 2001 From: Kevin Morris Date: Tue, 10 Jan 2023 14:10:40 -0800 Subject: [PATCH 216/415] update-zh_TW translations --- po/zh_TW.po | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/po/zh_TW.po b/po/zh_TW.po index 1526b4a9..56014aac 100644 --- a/po/zh_TW.po +++ b/po/zh_TW.po @@ -1,18 +1,19 @@ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the AURWEB package. -# +# # Translators: # pan93412 , 2018 +# Cycatz , 2022 # 黃æŸè«º , 2014-2017 # 黃æŸè«º , 2020-2022 msgid "" msgstr "" "Project-Id-Version: aurweb\n" -"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" +"Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n" -"PO-Revision-Date: 2022-01-18 17:18+0000\n" -"Last-Translator: Kevin Morris \n" +"PO-Revision-Date: 2011-04-10 13:21+0000\n" +"Last-Translator: Cycatz , 2022\n" "Language-Team: Chinese (Taiwan) (http://www.transifex.com/lfleischer/aurweb/language/zh_TW/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -1990,7 +1991,7 @@ msgstr "æ¯é é¡¯ç¤º" #: template/pkg_search_form.php template/pkg_search_results.php msgid "Go" -msgstr "到" +msgstr "æœå°‹" #: template/pkg_search_form.php msgid "Orphans" @@ -2324,10 +2325,10 @@ msgstr "AUR 伺æœå™¨éŒ¯èª¤" #: templates/pkgbase/merge.html templates/packages/delete.html #: templates/packages/disown.html msgid "Related package request closure comments..." -msgstr "" +msgstr "相關軟體包請求關閉留言……" #: templates/pkgbase/merge.html templates/packages/delete.html msgid "" "This action will close any pending package requests related to it. If " "%sComments%s are omitted, a closure comment will be autogenerated." -msgstr "" +msgstr "此動作將會關閉任何關於此的擱置中軟體包請求。若çœç•¥%s留言%s,將會自動產生關閉留言。" From ff44eb02de7b45bf193b66a0695bca82dd8896b8 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Wed, 11 Jan 2023 20:12:28 +0100 Subject: [PATCH 217/415] feat: add link to mailing list article on requests page Provides a convenient way to check for responses on the mailing list prior to Accepting/Rejecting requests. We compute the Message-ID hash that can be used to link back to the article in the mailing list archive. Signed-off-by: moson-mo --- aurweb/models/package_request.py | 18 +++++++++++++- conf/config.defaults | 1 + templates/requests.html | 5 +++- test/test_package_request.py | 40 +++++++++++++++++++++++++++++++- 4 files changed, 61 insertions(+), 3 deletions(-) diff --git a/aurweb/models/package_request.py b/aurweb/models/package_request.py index 31071df4..94ff064b 100644 --- a/aurweb/models/package_request.py +++ b/aurweb/models/package_request.py @@ -1,7 +1,10 @@ +import base64 +import hashlib + from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import backref, relationship -from aurweb import schema +from aurweb import config, schema from aurweb.models.declarative import Base from aurweb.models.package_base import PackageBase as _PackageBase from aurweb.models.request_type import RequestType as _RequestType @@ -103,3 +106,16 @@ class PackageRequest(Base): def status_display(self) -> str: """Return a display string for the Status column.""" return self.STATUS_DISPLAY[self.Status] + + def ml_message_id_hash(self) -> str: + """Return the X-Message-ID-Hash that is used in the mailing list archive.""" + # X-Message-ID-Hash is a base32 encoded SHA1 hash + msgid = f"pkg-request-{str(self.ID)}@aur.archlinux.org" + sha1 = hashlib.sha1(msgid.encode()).digest() + + return base64.b32encode(sha1).decode() + + def ml_message_url(self) -> str: + """Return the mailing list URL for the request.""" + url = config.get("options", "ml_thread_url") % (self.ml_message_id_hash()) + return url diff --git a/conf/config.defaults b/conf/config.defaults index 6cdffe65..06e73afe 100644 --- a/conf/config.defaults +++ b/conf/config.defaults @@ -25,6 +25,7 @@ max_rpc_results = 5000 max_search_results = 2500 max_depends = 1000 aur_request_ml = aur-requests@lists.archlinux.org +ml_thread_url = https://lists.archlinux.org/archives/list/aur-requests@lists.archlinux.org/thread/%s request_idle_time = 1209600 request_archive_time = 15552000 auto_orphan_age = 15552000 diff --git a/templates/requests.html b/templates/requests.html index 669b46b0..697fbedb 100644 --- a/templates/requests.html +++ b/templates/requests.html @@ -115,8 +115,11 @@ {% if result.User %} {{ result.User.Username }} - +   {% endif %} + + (PRQ#{{ result.ID }}) + {% set idle_time = config_getint("options", "request_idle_time") %} {% set time_delta = (utcnow - result.RequestTS) | int %} diff --git a/test/test_package_request.py b/test/test_package_request.py index a69a0617..2bbf56c2 100644 --- a/test/test_package_request.py +++ b/test/test_package_request.py @@ -1,7 +1,7 @@ import pytest from sqlalchemy.exc import IntegrityError -from aurweb import db, time +from aurweb import config, db, time from aurweb.models.account_type import USER_ID from aurweb.models.package_base import PackageBase from aurweb.models.package_request import ( @@ -190,3 +190,41 @@ def test_package_request_status_display(user: User, pkgbase: PackageBase): pkgreq.Status = 124 with pytest.raises(KeyError): pkgreq.status_display() + + +def test_package_request_ml_message_id_hash(user: User, pkgbase: PackageBase): + with db.begin(): + pkgreq = db.create( + PackageRequest, + ID=1, + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + Status=PENDING_ID, + ) + + # A hash composed with ID=1 should result in BNNNRWOFDRSQP4LVPT77FF2GUFR45KW5 + assert pkgreq.ml_message_id_hash() == "BNNNRWOFDRSQP4LVPT77FF2GUFR45KW5" + + +def test_package_request_ml_message_url(user: User, pkgbase: PackageBase): + with db.begin(): + pkgreq = db.create( + PackageRequest, + ID=1, + ReqTypeID=MERGE_ID, + User=user, + PackageBase=pkgbase, + PackageBaseName=pkgbase.Name, + Comments=str(), + ClosureComment=str(), + Status=PENDING_ID, + ) + + assert ( + config.get("options", "ml_thread_url") % (pkgreq.ml_message_id_hash()) + == pkgreq.ml_message_url() + ) From 2150f8bc191e92a0b4e99b438388add88963d827 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Fri, 13 Jan 2023 10:14:53 +0100 Subject: [PATCH 218/415] fix(docker): nginx health check nginx health check always results in "unhealthy": There is no such option "--no-verify" for curl. We can use "-k" or "--insecure" for disabling SSL checks. Signed-off-by: moson-mo --- docker/health/nginx.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/health/nginx.sh b/docker/health/nginx.sh index c530103d..df76bc2b 100755 --- a/docker/health/nginx.sh +++ b/docker/health/nginx.sh @@ -1,2 +1,2 @@ #!/bin/bash -exec curl --no-verify -q https://localhost:8444 +exec curl -k -q https://localhost:8444 From f6c4891415766b1030fa20f2d69af78a4482cc95 Mon Sep 17 00:00:00 2001 From: moson-mo Date: Sat, 14 Jan 2023 13:12:33 +0100 Subject: [PATCH 219/415] feat: add Support section to Dashboard Adds the "Support" section (displayed on "Home") to the "Dashboard" page as well. Signed-off-by: moson-mo --- templates/dashboard.html | 3 ++ templates/home.html | 66 +-------------------------------- templates/partials/support.html | 65 ++++++++++++++++++++++++++++++++ test/test_homepage.py | 40 +++++++++++++------- 4 files changed, 96 insertions(+), 78 deletions(-) create mode 100644 templates/partials/support.html diff --git a/templates/dashboard.html b/templates/dashboard.html index 48f42dc6..e88fde4a 100644 --- a/templates/dashboard.html +++ b/templates/dashboard.html @@ -62,6 +62,9 @@ {% endwith %} {% endif %} +
    + {% include 'partials/support.html' %} +
    diff --git a/templates/home.html b/templates/home.html index 3a7bc76d..e8296239 100644 --- a/templates/home.html +++ b/templates/home.html @@ -24,71 +24,7 @@

    {% trans %}Learn more...{% endtrans %}

    -

    {% trans %}Support{% endtrans %}

    -

    {% trans %}Package Requests{% endtrans %}

    -
    -

    - {{ "There are three types of requests that can be filed in the %sPackage Actions%s box on the package details page:" - | tr - | format("", "") - | safe - }} -

    -
      -
    • {% trans %}Orphan Request{% endtrans %}: {% trans %}Request a package to be disowned, e.g. when the maintainer is inactive and the package has been flagged out-of-date for a long time.{% endtrans %}
    • -
    • {% trans %}Deletion Request{% endtrans %}: {%trans %}Request a package to be removed from the Arch User Repository. Please do not use this if a package is broken and can be fixed easily. Instead, contact the package maintainer and file orphan request if necessary.{% endtrans %}
    • -
    • {% trans %}Merge Request{% endtrans %}: {% trans %}Request a package to be merged into another one. Can be used when a package needs to be renamed or replaced by a split package.{% endtrans %}
    • -
    -

    - {{ "If you want to discuss a request, you can use the %saur-requests%s mailing list. However, please do not use that list to file requests." - | tr - | format('', "") - | safe - }} -

    -
    -

    {% trans %}Submitting Packages{% endtrans %}

    -
    -

    - {{ "Git over SSH is now used to submit packages to the AUR. See the %sSubmitting packages%s section of the Arch User Repository ArchWiki page for more details." - | tr - | format('', "") - | safe - }} -

    - {% if ssh_fingerprints %} -

    - {% trans %}The following SSH fingerprints are used for the AUR:{% endtrans %} -

    -

      - {% for keytype in ssh_fingerprints %} -
    • {{ keytype }}: {{ ssh_fingerprints[keytype] }} - {% endfor %} -
    - {% endif %} -
    -

    {% trans %}Discussion{% endtrans %}

    -
    -

    - {{ "General discussion regarding the Arch User Repository (AUR) and Trusted User structure takes place on %saur-general%s. For discussion relating to the development of the AUR web interface, use the %saur-dev%s mailing list." - | tr - | format('', "", - '', "") - | safe - }} -

    -

    -

    {% trans %}Bug Reporting{% endtrans %}

    -
    -

    - {{ "If you find a bug in the AUR web interface, please fill out a bug report on our %sbug tracker%s. Use the tracker to report bugs in the AUR web interface %sonly%s. To report packaging bugs contact the package maintainer or leave a comment on the appropriate package page." - | tr - | format('', "", - "", "") - | safe - }} -

    -
    + {% include 'partials/support.html' %}
    diff --git a/templates/partials/support.html b/templates/partials/support.html new file mode 100644 index 00000000..a2890cc5 --- /dev/null +++ b/templates/partials/support.html @@ -0,0 +1,65 @@ +

    {% trans %}Support{% endtrans %}

    +

    {% trans %}Package Requests{% endtrans %}

    +
    +

    + {{ "There are three types of requests that can be filed in the %sPackage Actions%s box on the package details page:" + | tr + | format("", "") + | safe + }} +

    +
      +
    • {% trans %}Orphan Request{% endtrans %}: {% trans %}Request a package to be disowned, e.g. when the maintainer is inactive and the package has been flagged out-of-date for a long time.{% endtrans %}
    • +
    • {% trans %}Deletion Request{% endtrans %}: {%trans %}Request a package to be removed from the Arch User Repository. Please do not use this if a package is broken and can be fixed easily. Instead, contact the package maintainer and file orphan request if necessary.{% endtrans %}
    • +
    • {% trans %}Merge Request{% endtrans %}: {% trans %}Request a package to be merged into another one. Can be used when a package needs to be renamed or replaced by a split package.{% endtrans %}
    • +
    +

    +{{ "If you want to discuss a request, you can use the %saur-requests%s mailing list. However, please do not use that list to file requests." + | tr + | format('', "") + | safe + }} +

    +
    +

    {% trans %}Submitting Packages{% endtrans %}

    +
    +

    + {{ "Git over SSH is now used to submit packages to the AUR. See the %sSubmitting packages%s section of the Arch User Repository ArchWiki page for more details." + | tr + | format('', "") + | safe + }} +

    +{% if ssh_fingerprints %} +

    + {% trans %}The following SSH fingerprints are used for the AUR:{% endtrans %} +

    +

      + {% for keytype in ssh_fingerprints %} +
    • {{ keytype }}: {{ ssh_fingerprints[keytype] }} + {% endfor %} +
    +{% endif %} +
    +

    {% trans %}Discussion{% endtrans %}

    +
    +

    + {{ "General discussion regarding the Arch User Repository (AUR) and Trusted User structure takes place on %saur-general%s. For discussion relating to the development of the AUR web interface, use the %saur-dev%s mailing list." + | tr + | format('', "", + '', "") + | safe + }} +

    +

    +

    {% trans %}Bug Reporting{% endtrans %}

    +
    +

    + {{ "If you find a bug in the AUR web interface, please fill out a bug report on our %sbug tracker%s. Use the tracker to report bugs in the AUR web interface %sonly%s. To report packaging bugs contact the package maintainer or leave a comment on the appropriate package page." + | tr + | format('', "", + "", "") + | safe + }} +

    +
    diff --git a/test/test_homepage.py b/test/test_homepage.py index a573bdd6..08c52c09 100644 --- a/test/test_homepage.py +++ b/test/test_homepage.py @@ -125,33 +125,47 @@ def test_homepage(): @patch("aurweb.util.get_ssh_fingerprints") -def test_homepage_ssh_fingerprints(get_ssh_fingerprints_mock): +def test_homepage_ssh_fingerprints(get_ssh_fingerprints_mock, user): fingerprints = {"Ed25519": "SHA256:RFzBCUItH9LZS0cKB5UE6ceAYhBD5C8GeOBip8Z11+4"} get_ssh_fingerprints_mock.return_value = fingerprints + # without authentication (Home) with client as request: response = request.get("/") - for key, value in fingerprints.items(): - assert key in response.content.decode() - assert value in response.content.decode() - assert ( - "The following SSH fingerprints are used for the AUR" - in response.content.decode() - ) + # with authentication (Dashboard) + with client as auth_request: + auth_request.cookies = {"AURSID": user.login(Request(), "testPassword")} + auth_response = auth_request.get("/") + + for resp in [response, auth_response]: + for key, value in fingerprints.items(): + assert key in resp.content.decode() + assert value in resp.content.decode() + assert ( + "The following SSH fingerprints are used for the AUR" + in resp.content.decode() + ) @patch("aurweb.util.get_ssh_fingerprints") -def test_homepage_no_ssh_fingerprints(get_ssh_fingerprints_mock): +def test_homepage_no_ssh_fingerprints(get_ssh_fingerprints_mock, user): get_ssh_fingerprints_mock.return_value = {} + # without authentication (Home) with client as request: response = request.get("/") - assert ( - "The following SSH fingerprints are used for the AUR" - not in response.content.decode() - ) + # with authentication (Dashboard) + with client as auth_request: + auth_request.cookies = {"AURSID": user.login(Request(), "testPassword")} + auth_response = auth_request.get("/") + + for resp in [response, auth_response]: + assert ( + "The following SSH fingerprints are used for the AUR" + not in resp.content.decode() + ) def test_homepage_stats(redis, packages): From 4d0a982c519cb087b4855922f65d73dbece45d33 Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Sat, 14 Jan 2023 11:22:03 +0200 Subject: [PATCH 220/415] fix: assert offset and per_page are positive Signed-off-by: Leonidas Spyropoulos --- aurweb/routers/requests.py | 2 +- aurweb/util.py | 6 +++--- test/test_util.py | 15 +++++++++++++++ 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index 6880abd9..713f88d2 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -48,7 +48,7 @@ async def requests( if not dict(request.query_params).keys() & FILTER_PARAMS: filter_pending = True - O, PP = util.sanitize_params(O, PP) + O, PP = util.sanitize_params(str(O), str(PP)) context["O"] = O context["PP"] = PP context["filter_pending"] = filter_pending diff --git a/aurweb/util.py b/aurweb/util.py index 7b997609..abf48938 100644 --- a/aurweb/util.py +++ b/aurweb/util.py @@ -96,14 +96,14 @@ def apply_all(iterable: Iterable, fn: Callable): return iterable -def sanitize_params(offset: str, per_page: str) -> Tuple[int, int]: +def sanitize_params(offset_str: str, per_page_str: str) -> Tuple[int, int]: try: - offset = int(offset) + offset = defaults.O if int(offset_str) < 0 else int(offset_str) except ValueError: offset = defaults.O try: - per_page = int(per_page) + per_page = defaults.PP if int(per_page_str) < 0 else int(per_page_str) except ValueError: per_page = defaults.PP diff --git a/test/test_util.py b/test/test_util.py index fd7d8655..fefa659a 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -121,6 +121,21 @@ fRSo6OFcejKc= assert_multiple_keys(pks) +@pytest.mark.parametrize( + "offset_str, per_page_str, expected", + [ + ("5", "100", (5, 100)), + ("", "100", (0, 100)), + ("5", "", (5, 50)), + ("", "", (0, 50)), + ("-1", "100", (0, 100)), + ("5", "-100", (5, 50)), + ], +) +def test_sanitize_params(offset_str: str, per_page_str: str, expected: tuple[int, int]): + assert util.sanitize_params(offset_str, per_page_str) == expected + + def assert_multiple_keys(pks): keys = util.parse_ssh_keys(pks) assert len(keys) == 2 From 0e44687ab11da81c611a2668b1249405d32cdb7f Mon Sep 17 00:00:00 2001 From: Leonidas Spyropoulos Date: Thu, 12 Jan 2023 11:47:00 +0200 Subject: [PATCH 221/415] fix: only try to show dependencies if object exists Signed-off-by: Leonidas Spyropoulos --- templates/partials/packages/package_metadata.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/templates/partials/packages/package_metadata.html b/templates/partials/packages/package_metadata.html index 50d38b48..ebbfe3f9 100644 --- a/templates/partials/packages/package_metadata.html +++ b/templates/partials/packages/package_metadata.html @@ -48,6 +48,7 @@

    {{ "Required by" | tr }} ({{ reqs_count }})