From 93bf8ea260aed547e8ea8148aa7a7feab55bc6b1 Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Tue, 1 Aug 2023 15:53:42 -0400 Subject: [PATCH] Revert "Upgrade from psycopg2 to psycopg3 (#10742)" (#14252) This reverts commit 92e21dadf09351867aa6fc50d5b1b6dbe9710045. --- dev/environment | 2 +- docs/dev/development/getting-started.rst | 2 +- requirements/lint.in | 1 + requirements/lint.txt | 4 + requirements/main.in | 2 +- requirements/main.txt | 21 ++- requirements/tests.in | 2 +- requirements/tests.txt | 20 +-- tests/conftest.py | 4 +- tests/unit/cli/test_db.py | 35 +++++ tests/unit/test_db.py | 5 +- tests/unit/utils/test_wsgi.py | 14 +- warehouse/accounts/views.py | 2 +- warehouse/admin/bans.py | 7 +- warehouse/banners/views.py | 2 +- warehouse/cli/db/__init__.py | 23 ++++ warehouse/cli/db/branches.py | 7 +- warehouse/cli/db/current.py | 7 +- warehouse/cli/db/downgrade.py | 7 +- warehouse/cli/db/heads.py | 7 +- warehouse/cli/db/history.py | 7 +- warehouse/cli/db/merge.py | 7 +- warehouse/cli/db/revision.py | 7 +- warehouse/cli/db/show.py | 7 +- warehouse/cli/db/stamp.py | 7 +- warehouse/cli/db/upgrade.py | 7 +- warehouse/ip_addresses/models.py | 4 +- warehouse/migrations/env.py | 32 +++-- .../1b97443dea8a_create_missing_fk_indexes.py | 130 +++++++++--------- ...00_index_canonical_version_for_releases.py | 19 ++- ...f_migrate_existing_data_for_release_is_.py | 2 +- ...a5_add_missing_indexes_for_foreign_keys.py | 21 ++- ...18cb98ac_add_cached_bool_on_files_table.py | 17 ++- ...42f435bb39_add_archived_column_to_files.py | 17 ++- warehouse/utils/wsgi.py | 9 +- 35 files changed, 275 insertions(+), 192 deletions(-) diff --git a/dev/environment b/dev/environment index 978408e0143d..a047fa572236 100644 --- a/dev/environment +++ b/dev/environment @@ -9,7 +9,7 @@ AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=foo BROKER_URL=sqs://localstack:4566/?region=us-east-1&queue_name_prefix=warehouse-dev -DATABASE_URL=postgresql+psycopg://postgres@db/warehouse +DATABASE_URL=postgresql://postgres@db/warehouse ELASTICSEARCH_URL=http://elasticsearch:9200/development diff --git a/docs/dev/development/getting-started.rst b/docs/dev/development/getting-started.rst index 3355d0b04069..5476a63f4039 100644 --- a/docs/dev/development/getting-started.rst +++ b/docs/dev/development/getting-started.rst @@ -427,7 +427,7 @@ compilation errors due to your system not including libraries or binaries required by some of Warehouse's dependencies. An example of such dependency is -`psycopg `_ +`psycopg2 `_ which requires PostgreSQL binaries and will fail if not present. If there's a specific use case you think requires development outside diff --git a/requirements/lint.in b/requirements/lint.in index e1665dc8de4a..1dbd6846f5d4 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -16,6 +16,7 @@ types-first types-html5lib types-itsdangerous types-passlib +types-psycopg2 types-python-slugify types-pytz types-redis diff --git a/requirements/lint.txt b/requirements/lint.txt index eb366502d276..c986fb99aff1 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -310,6 +310,10 @@ types-passlib==1.7.7.12 \ --hash=sha256:6abbf2400a8f1cba48639753e3a034af507a765489bb070974d7f68d9ceef883 \ --hash=sha256:7a4df64b53c2746f804aa29fb361974e5894e0df30ff18cf60b9518696ffc9d3 # via -r requirements/lint.in +types-psycopg2==2.9.21.11 \ + --hash=sha256:7a323d7744bc8a882fb5a6f63448e903fc70d3dc0d6da9ec1f9c6c4dc10a7102 \ + --hash=sha256:d5077eacf90e61db8c0b8eea2fdc9d4a97d7aaa16865fb4bd7034a7571520b4d + # via -r requirements/lint.in types-pyopenssl==23.2.0.2 \ --hash=sha256:19536aa3debfbe25a918cf0d898e9f5fbbe6f3594a429da7914bf331deb1b342 \ --hash=sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d diff --git a/requirements/main.in b/requirements/main.in index a8dc75182cad..f09cbcb75cda 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -39,7 +39,7 @@ paginate_sqlalchemy passlib>=1.6.4 pip-api premailer -psycopg[c] +psycopg2 pycurl pydantic pyqrcode diff --git a/requirements/main.txt b/requirements/main.txt index 813c736fbb87..09cb0d3c6862 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1209,13 +1209,21 @@ protobuf==4.23.4 \ # googleapis-common-protos # grpcio-status # proto-plus -psycopg[c]==3.1.9 \ - --hash=sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9 \ - --hash=sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc +psycopg2==2.9.6 \ + --hash=sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744 \ + --hash=sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f \ + --hash=sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214 \ + --hash=sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39 \ + --hash=sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad \ + --hash=sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889 \ + --hash=sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258 \ + --hash=sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7 \ + --hash=sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394 \ + --hash=sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5 \ + --hash=sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011 \ + --hash=sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494 \ + --hash=sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1 # via -r requirements/main.in -psycopg-c==3.1.9 \ - --hash=sha256:d160b45b0ee1eb05d78a81538c2bc6868bacb5f421b7190ed65d4681e4552455 - # via psycopg pyasn1==0.5.0 \ --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde @@ -1522,7 +1530,6 @@ typing-extensions==4.7.1 \ # via # alembic # limits - # psycopg # pydantic # sqlalchemy tzdata==2023.3 \ diff --git a/requirements/tests.in b/requirements/tests.in index db34d73fa93d..03322e95afd7 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -3,7 +3,7 @@ factory_boy freezegun pretend pytest>=3.0.0 -pytest-postgresql>=3.1.3,<6.0.0 +pytest-postgresql>=3.1.3,<4.0.0 pytest-socket pytz responses>=0.5.1 diff --git a/requirements/tests.txt b/requirements/tests.txt index b8a1952f81e4..4a2c6fe6244d 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -207,10 +207,6 @@ psutil==5.9.5 \ --hash=sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30 \ --hash=sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48 # via mirakuru -psycopg==3.1.9 \ - --hash=sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9 \ - --hash=sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc - # via pytest-postgresql pytest==7.4.0 \ --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a @@ -218,9 +214,9 @@ pytest==7.4.0 \ # -r requirements/tests.in # pytest-postgresql # pytest-socket -pytest-postgresql==5.0.0 \ - --hash=sha256:22edcbafab8995ee85b8d948ddfaad4f70c2c7462303d7477ecd2f77fc9d15bd \ - --hash=sha256:6e8f0773b57c9b8975b6392c241b7b81b7018f32079a533f368f2fbda732ecd3 +pytest-postgresql==3.1.3 \ + --hash=sha256:05b87a192741511f5171e0300689a531a2a48b4483c69ae2b5f565d3e429b1d5 \ + --hash=sha256:3649bcac5a0cd0d2cc1470a1087739990d402e2e910d53265ac486321a833898 # via -r requirements/tests.in pytest-socket==0.6.0 \ --hash=sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138 \ @@ -298,10 +294,6 @@ types-pyyaml==6.0.12.11 \ --hash=sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b \ --hash=sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d # via responses -typing-extensions==4.7.1 \ - --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ - --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 - # via psycopg urllib3==1.26.16 \ --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 @@ -320,9 +312,3 @@ webtest==3.0.0 \ --hash=sha256:2a001a9efa40d2a7e5d9cd8d1527c75f41814eb6afce2c3d207402547b1e5ead \ --hash=sha256:54bd969725838d9861a9fa27f8d971f79d275d94ae255f5c501f53bb6d9929eb # via -r requirements/tests.in - -# The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 \ - --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ - --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 - # via pytest-postgresql diff --git a/tests/conftest.py b/tests/conftest.py index 81476bb73d90..d0e439950834 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,7 +28,7 @@ import webtest as _webtest from jinja2 import Environment, FileSystemLoader -from psycopg.errors import InvalidCatalogName +from psycopg2.errors import InvalidCatalogName from pyramid.i18n import TranslationString from pyramid.static import ManifestCacheBuster from pyramid_jinja2 import IJinja2Environment @@ -242,7 +242,7 @@ def database(request): def drop_database(): janitor.drop() - return f"postgresql+psycopg://{pg_user}@{pg_host}:{pg_port}/{pg_db}" + return f"postgresql://{pg_user}@{pg_host}:{pg_port}/{pg_db}" class MockManifestCacheBuster(ManifestCacheBuster): diff --git a/tests/unit/cli/test_db.py b/tests/unit/cli/test_db.py index 5f27db71dc89..5e6aa1be10eb 100644 --- a/tests/unit/cli/test_db.py +++ b/tests/unit/cli/test_db.py @@ -33,6 +33,21 @@ from warehouse.cli.db.upgrade import upgrade +def _compare_alembic_locks(calls: list[pretend.call]) -> bool: + sql = [] + for t in calls: + assert len(t.args) == 1 + assert len(t.kwargs) == 0 + + tc = t.args[0] + assert isinstance(tc, sqlalchemy.sql.expression.TextClause) + sql.append(tc.text) + return sql == [ + "SELECT pg_advisory_lock(hashtext('alembic'))", + "SELECT pg_advisory_unlock(hashtext('alembic'))", + ] + + def test_branches_command(monkeypatch, cli, pyramid_config): alembic_branches = pretend.call_recorder(lambda config: None) monkeypatch.setattr(alembic.command, "branches", alembic_branches) @@ -50,6 +65,8 @@ def test_branches_command(monkeypatch, cli, pyramid_config): result = cli.invoke(branches, obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_branches.calls == [pretend.call(alembic_config)] @@ -70,6 +87,8 @@ def test_current_command(monkeypatch, cli, pyramid_config): result = cli.invoke(current, obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_current.calls == [pretend.call(alembic_config)] @@ -90,6 +109,8 @@ def test_downgrade_command(monkeypatch, cli, pyramid_config): result = cli.invoke(downgrade, ["--", "-1"], obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_downgrade.calls == [pretend.call(alembic_config, "-1")] @@ -118,6 +139,8 @@ def test_heads_command(monkeypatch, cli, pyramid_config, args, ekwargs): result = cli.invoke(heads, args, obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_heads.calls == [pretend.call(alembic_config, **ekwargs)] @@ -138,6 +161,8 @@ def test_history_command(monkeypatch, cli, pyramid_config): result = cli.invoke(history, ["foo:bar"], obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_history.calls == [pretend.call(alembic_config, "foo:bar")] @@ -177,6 +202,8 @@ def test_merge_command(monkeypatch, cli, pyramid_config, args, eargs, ekwargs): result = cli.invoke(merge, args, obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_merge.calls == [pretend.call(alembic_config, *eargs, **ekwargs)] @@ -233,6 +260,8 @@ def test_revision_command(monkeypatch, cli, pyramid_config, args, ekwargs): result = cli.invoke(revision, args, obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_revision.calls == [pretend.call(alembic_config, **ekwargs)] @@ -253,6 +282,8 @@ def test_show_command(monkeypatch, cli, pyramid_config): result = cli.invoke(show, ["foo"], obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_show.calls == [pretend.call(alembic_config, "foo")] @@ -273,6 +304,8 @@ def test_stamp_command(monkeypatch, cli, pyramid_config): result = cli.invoke(stamp, ["foo"], obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_stamp.calls == [pretend.call(alembic_config, "foo")] @@ -293,6 +326,8 @@ def test_upgrade_command(monkeypatch, cli, pyramid_config): result = cli.invoke(upgrade, ["foo"], obj=pyramid_config) assert result.exit_code == 0 + assert alembic_config.attributes == {"connection": connection} + assert _compare_alembic_locks(connection.execute.calls) assert alembic_upgrade.calls == [pretend.call(alembic_config, "foo")] diff --git a/tests/unit/test_db.py b/tests/unit/test_db.py index 453c8d1c4938..24fc3ac28350 100644 --- a/tests/unit/test_db.py +++ b/tests/unit/test_db.py @@ -14,7 +14,7 @@ import alembic.config import pretend -import psycopg +import psycopg2.extensions import pytest import sqlalchemy import venusian @@ -114,7 +114,7 @@ def config_cls(): def test_raises_db_available_error(pyramid_services, metrics): def raiser(): - raise OperationalError("foo", {}, psycopg.OperationalError()) + raise OperationalError("foo", {}, psycopg2.OperationalError()) engine = pretend.stub(connect=raiser) request = pretend.stub( @@ -199,6 +199,7 @@ def test_create_session_read_only_mode( connection = pretend.stub( connection=pretend.stub( + get_transaction_status=lambda: pretend.stub(), set_session=lambda **kw: None, rollback=lambda: None, ), diff --git a/tests/unit/utils/test_wsgi.py b/tests/unit/utils/test_wsgi.py index d8d0b72c003c..943e1a309209 100644 --- a/tests/unit/utils/test_wsgi.py +++ b/tests/unit/utils/test_wsgi.py @@ -13,8 +13,6 @@ import pretend import pytest -from sqlalchemy import type_coerce -from sqlalchemy.dialects.postgresql import INET from sqlalchemy.exc import NoResultFound from warehouse.ip_addresses.models import IpAddress @@ -198,9 +196,7 @@ def test_ip_address_exists(db_request): def test_ip_address_created(db_request): with pytest.raises(NoResultFound): - db_request.db.query(IpAddress).filter_by( - ip_address=type_coerce("192.0.2.69", INET) - ).one() + db_request.db.query(IpAddress).filter_by(ip_address="192.0.2.69").one() db_request.environ["GEOIP_CITY"] = "Anytown, ST" db_request.remote_addr = "192.0.2.69" @@ -208,12 +204,8 @@ def test_ip_address_created(db_request): wsgi._ip_address(db_request) - ip_address = ( - db_request.db.query(IpAddress) - .filter_by(ip_address=type_coerce("192.0.2.69", INET)) - .one() - ) - assert str(ip_address.ip_address) == "192.0.2.69" + ip_address = db_request.db.query(IpAddress).filter_by(ip_address="192.0.2.69").one() + assert ip_address.ip_address == "192.0.2.69" assert ip_address.hashed_ip_address == "deadbeef" assert ip_address.geoip_info == {"city": "Anytown, ST"} diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py index e5667285890b..62fc7152c5d4 100644 --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -834,7 +834,7 @@ def _error(message): try: email = ( request.db.query(Email) - .filter(Email.id == int(data["email.id"]), Email.user == request.user) + .filter(Email.id == data["email.id"], Email.user == request.user) .one() ) except NoResultFound: diff --git a/warehouse/admin/bans.py b/warehouse/admin/bans.py index a475e209d62f..f4ae406a041a 100644 --- a/warehouse/admin/bans.py +++ b/warehouse/admin/bans.py @@ -10,9 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from sqlalchemy import type_coerce -from sqlalchemy.dialects.postgresql import INET - from warehouse.accounts.interfaces import IUserService from warehouse.events.models import IpAddress @@ -21,10 +18,10 @@ class Bans: def __init__(self, request): self.request = request - def by_ip(self, ip_address: str) -> bool: + def by_ip(self, ip_address): banned = ( self.request.db.query(IpAddress) - .filter_by(ip_address=type_coerce(ip_address, INET), is_banned=True) + .filter_by(ip_address=ip_address, is_banned=True) .one_or_none() ) if banned is not None: diff --git a/warehouse/banners/views.py b/warehouse/banners/views.py index a24130b4cb83..e3efe93c6794 100644 --- a/warehouse/banners/views.py +++ b/warehouse/banners/views.py @@ -29,7 +29,7 @@ def list_banner_messages(request): if banner_id: query = request.db.query(Banner).filter(Banner.id == banner_id) else: - today = datetime.date.today() + today = str(datetime.date.today()) query = request.db.query(Banner).filter( (Banner.active == True) & (Banner.end >= today) # noqa ) diff --git a/warehouse/cli/db/__init__.py b/warehouse/cli/db/__init__.py index e883a2fecde1..08368cddc36b 100644 --- a/warehouse/cli/db/__init__.py +++ b/warehouse/cli/db/__init__.py @@ -10,9 +10,32 @@ # See the License for the specific language governing permissions and # limitations under the License. +import contextlib + +from sqlalchemy import text + from warehouse.cli import warehouse +@contextlib.contextmanager +def alembic_lock(engine, alembic_config): + with engine.begin() as connection: + # Attempt to acquire the alembic lock, this will wait until the lock + # has been acquired allowing multiple commands to wait for each other. + connection.execute(text("SELECT pg_advisory_lock(hashtext('alembic'))")) + + try: + # Tell Alembic use our current connection instead of creating it's + # own. + alembic_config.attributes["connection"] = connection + + # Yield control back up to let the command itself run. + yield alembic_config + finally: + # Finally we need to release the lock we've acquired. + connection.execute(text("SELECT pg_advisory_unlock(hashtext('alembic'))")) + + @warehouse.group() # pragma: no branch def db(): """ diff --git a/warehouse/cli/db/branches.py b/warehouse/cli/db/branches.py index cf6896697164..79e972692112 100644 --- a/warehouse/cli/db/branches.py +++ b/warehouse/cli/db/branches.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -22,4 +22,7 @@ def branches(config, **kwargs): """ Show current branch points. """ - alembic.command.branches(config.alembic_config(), **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.branches(alembic_config, **kwargs) diff --git a/warehouse/cli/db/current.py b/warehouse/cli/db/current.py index a6f7425f7011..416ada2f63ad 100644 --- a/warehouse/cli/db/current.py +++ b/warehouse/cli/db/current.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -22,4 +22,7 @@ def current(config, **kwargs): """ Display the current revision for a database. """ - alembic.command.current(config.alembic_config(), **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.current(alembic_config, **kwargs) diff --git a/warehouse/cli/db/downgrade.py b/warehouse/cli/db/downgrade.py index f03c584c14d1..3bba1c8da10c 100644 --- a/warehouse/cli/db/downgrade.py +++ b/warehouse/cli/db/downgrade.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -23,4 +23,7 @@ def downgrade(config, revision, **kwargs): """ Revert to a previous version. """ - alembic.command.downgrade(config.alembic_config(), revision, **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.downgrade(alembic_config, revision, **kwargs) diff --git a/warehouse/cli/db/heads.py b/warehouse/cli/db/heads.py index 333a9acb2254..1e94a1ab5f7a 100644 --- a/warehouse/cli/db/heads.py +++ b/warehouse/cli/db/heads.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -28,4 +28,7 @@ def heads(config, **kwargs): """ Show current available heads. """ - alembic.command.heads(config.alembic_config(), **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.heads(alembic_config, **kwargs) diff --git a/warehouse/cli/db/history.py b/warehouse/cli/db/history.py index dd7bf48fae5c..dba1bfcc804e 100644 --- a/warehouse/cli/db/history.py +++ b/warehouse/cli/db/history.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -23,4 +23,7 @@ def history(config, revision_range, **kwargs): """ List changeset scripts in chronological order. """ - alembic.command.history(config.alembic_config(), revision_range, **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.history(alembic_config, revision_range, **kwargs) diff --git a/warehouse/cli/db/merge.py b/warehouse/cli/db/merge.py index e1c52aa930d5..089ff548392b 100644 --- a/warehouse/cli/db/merge.py +++ b/warehouse/cli/db/merge.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -34,4 +34,7 @@ def merge(config, revisions, **kwargs): Takes one or more revisions or "heads" for all heads and merges them into a single revision. """ - alembic.command.merge(config.alembic_config(), revisions, **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.merge(alembic_config, revisions, **kwargs) diff --git a/warehouse/cli/db/revision.py b/warehouse/cli/db/revision.py index 07cdd6f0b9dd..87355ee9a024 100644 --- a/warehouse/cli/db/revision.py +++ b/warehouse/cli/db/revision.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -49,4 +49,7 @@ def revision(config, **kwargs): """ Create a new revision file. """ - alembic.command.revision(config.alembic_config(), **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.revision(alembic_config, **kwargs) diff --git a/warehouse/cli/db/show.py b/warehouse/cli/db/show.py index 579da8e18c88..e663361a01f3 100644 --- a/warehouse/cli/db/show.py +++ b/warehouse/cli/db/show.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -23,4 +23,7 @@ def show(config, revision, **kwargs): """ Show the revision(s) denoted by the given symbol. """ - alembic.command.show(config.alembic_config(), revision, **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.show(alembic_config, revision, **kwargs) diff --git a/warehouse/cli/db/stamp.py b/warehouse/cli/db/stamp.py index dfdde5649901..5a973bd47a70 100644 --- a/warehouse/cli/db/stamp.py +++ b/warehouse/cli/db/stamp.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -23,4 +23,7 @@ def stamp(config, revision, **kwargs): """ Stamp the revision table with the given revision. """ - alembic.command.stamp(config.alembic_config(), revision, **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.stamp(alembic_config, revision, **kwargs) diff --git a/warehouse/cli/db/upgrade.py b/warehouse/cli/db/upgrade.py index 00d879b47a26..fba82a565348 100644 --- a/warehouse/cli/db/upgrade.py +++ b/warehouse/cli/db/upgrade.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import db +from warehouse.cli.db import alembic_lock, db @db.command() @@ -23,4 +23,7 @@ def upgrade(config, revision, **kwargs): """ Upgrade database. """ - alembic.command.upgrade(config.alembic_config(), revision, **kwargs) + with alembic_lock( + config.registry["sqlalchemy.engine"], config.alembic_config() + ) as alembic_config: + alembic.command.upgrade(alembic_config, revision, **kwargs) diff --git a/warehouse/ip_addresses/models.py b/warehouse/ip_addresses/models.py index 88dc552aa247..a1bf5f65e435 100644 --- a/warehouse/ip_addresses/models.py +++ b/warehouse/ip_addresses/models.py @@ -45,8 +45,8 @@ class IpAddress(db.Model): {"comment": "Tracks IP Addresses that have modified PyPI state"}, ) - def __repr__(self) -> str: - return str(self.ip_address) + def __repr__(self): + return self.ip_address def __lt__(self, other): return self.id < other.id diff --git a/warehouse/migrations/env.py b/warehouse/migrations/env.py index 6ba8b8dd981a..4cc737aab941 100644 --- a/warehouse/migrations/env.py +++ b/warehouse/migrations/env.py @@ -42,25 +42,37 @@ def run_migrations_online(): In this scenario we need to create an Engine and associate a connection with the context. """ - options = context.config.get_section(context.config.config_ini_section) - url = options.pop("url") - connectable = create_engine(url, poolclass=pool.NullPool) + connectable = context.config.attributes.get("connection", None) - with connectable.connect() as connection: - connection.execute(text("SET statement_timeout = 5000")) - connection.execute(text("SET lock_timeout = 4000")) + if connectable is None: + options = context.config.get_section(context.config.config_ini_section) + url = options.pop("url") + connectable = create_engine(url, poolclass=pool.NullPool) + with connectable.connect() as connection: + connection.execute(text("SET statement_timeout = 5000")) + connection.execute(text("SET lock_timeout = 4000")) + + context.configure( + connection=connection, + target_metadata=db.metadata, + compare_server_default=True, + transaction_per_migration=True, + ) + with context.begin_transaction(): + context.run_migrations() + else: context.configure( - connection=connection, + connection=connectable, target_metadata=db.metadata, compare_server_default=True, transaction_per_migration=True, ) + context.execute(text("SET statement_timeout = 5000")) + context.execute(text("SET lock_timeout = 4000")) + with context.begin_transaction(): - connection.execute(text("SELECT pg_advisory_lock(hashtext('alembic'))")) context.run_migrations() - context.get_bind().commit() - connection.execute(text("SELECT pg_advisory_unlock(hashtext('alembic'))")) if context.is_offline_mode(): diff --git a/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py b/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py index 48714f640049..914d5f14fa95 100644 --- a/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py +++ b/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py @@ -26,71 +26,71 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.get_bind().commit() - with op.get_context().autocommit_block(): - op.create_index( - op.f("ix_macaroons_user_id"), - "macaroons", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_project_events_project_id"), - "project_events", - ["project_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_release_vulnerabilities_release_id"), - "release_vulnerabilities", - ["release_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_releases_description_id"), - "releases", - ["description_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_role_invitations_project_id"), - "role_invitations", - ["project_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_role_invitations_user_id"), - "role_invitations", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_events_user_id"), - "user_events", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_recovery_codes_user_id"), - "user_recovery_codes", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_security_keys_user_id"), - "user_security_keys", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) + op.execute("COMMIT") + + op.create_index( + op.f("ix_macaroons_user_id"), + "macaroons", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_project_events_project_id"), + "project_events", + ["project_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_release_vulnerabilities_release_id"), + "release_vulnerabilities", + ["release_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_releases_description_id"), + "releases", + ["description_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_role_invitations_project_id"), + "role_invitations", + ["project_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_role_invitations_user_id"), + "role_invitations", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_events_user_id"), + "user_events", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_recovery_codes_user_id"), + "user_recovery_codes", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_security_keys_user_id"), + "user_security_keys", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py b/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py index 406b72a8656e..1c62621c676d 100644 --- a/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py +++ b/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py @@ -26,16 +26,15 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.get_bind().commit() - - with op.get_context().autocommit_block(): - op.create_index( - "release_canonical_version_idx", - "releases", - ["canonical_version"], - unique=False, - postgresql_concurrently=True, - ) + op.execute("COMMIT") + + op.create_index( + "release_canonical_version_idx", + "releases", + ["canonical_version"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py index 180eb3973903..115b0b8df68d 100644 --- a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py +++ b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py @@ -55,7 +55,7 @@ def upgrade(): """ ) ) - op.get_bind().commit() + conn.execute(sa.text("COMMIT")) op.alter_column( "releases", diff --git a/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py b/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py index ef4aad82a017..89f7c58d84e8 100644 --- a/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py +++ b/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py @@ -33,17 +33,16 @@ def upgrade(): op.create_index( op.f("ix_ses_events_email_id"), "ses_events", ["email_id"], unique=False ) - # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll run this - # outside of the transaction for the migration. - op.get_bind().commit() - with op.get_context().autocommit_block(): - op.create_index( - "journals_submitted_by_idx", - "journals", - ["submitted_by"], - unique=False, - postgresql_concurrently=True, - ) + # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close + # our transaction here and issue the statement. + op.execute("COMMIT") + op.create_index( + "journals_submitted_by_idx", + "journals", + ["submitted_by"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py b/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py index 8a6587c1a37a..1f5daad4fb08 100644 --- a/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py +++ b/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py @@ -38,15 +38,14 @@ def upgrade(): ) # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.get_bind().commit() - with op.get_context().autocommit_block(): - op.create_index( - "release_files_cached_idx", - "release_files", - ["cached"], - unique=False, - postgresql_concurrently=True, - ) + op.execute("COMMIT") + op.create_index( + "release_files_cached_idx", + "release_files", + ["cached"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py b/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py index e5cf64ee57f7..d413f50ead64 100644 --- a/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py +++ b/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py @@ -41,15 +41,14 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.get_bind().commit() - with op.get_context().autocommit_block(): - op.create_index( - "release_files_archived_idx", - "release_files", - ["archived"], - unique=False, - postgresql_concurrently=True, - ) + op.execute("COMMIT") + op.create_index( + "release_files_archived_idx", + "release_files", + ["archived"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/utils/wsgi.py b/warehouse/utils/wsgi.py index f4cced25e27f..95350703969f 100644 --- a/warehouse/utils/wsgi.py +++ b/warehouse/utils/wsgi.py @@ -16,8 +16,6 @@ from typing import TYPE_CHECKING -from sqlalchemy import type_coerce -from sqlalchemy.dialects.postgresql import INET from sqlalchemy.exc import NoResultFound from warehouse.ip_addresses.models import IpAddress @@ -140,11 +138,12 @@ def _remote_addr_hashed(request: Request) -> str: def _ip_address(request): """Return the IpAddress object for the remote address from the environment.""" - remote_inet = type_coerce(request.remote_addr, INET) try: - ip_address = request.db.query(IpAddress).filter_by(ip_address=remote_inet).one() + ip_address = ( + request.db.query(IpAddress).filter_by(ip_address=request.remote_addr).one() + ) except NoResultFound: - ip_address = IpAddress(ip_address=remote_inet) + ip_address = IpAddress(ip_address=request.remote_addr) request.db.add(ip_address) ip_address.hashed_ip_address = request.remote_addr_hashed