diff --git a/dev/environment b/dev/environment index a047fa572236..978408e0143d 100644 --- a/dev/environment +++ b/dev/environment @@ -9,7 +9,7 @@ AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=foo BROKER_URL=sqs://localstack:4566/?region=us-east-1&queue_name_prefix=warehouse-dev -DATABASE_URL=postgresql://postgres@db/warehouse +DATABASE_URL=postgresql+psycopg://postgres@db/warehouse ELASTICSEARCH_URL=http://elasticsearch:9200/development diff --git a/docs/dev/development/getting-started.rst b/docs/dev/development/getting-started.rst index 5476a63f4039..3355d0b04069 100644 --- a/docs/dev/development/getting-started.rst +++ b/docs/dev/development/getting-started.rst @@ -427,7 +427,7 @@ compilation errors due to your system not including libraries or binaries required by some of Warehouse's dependencies. An example of such dependency is -`psycopg2 `_ +`psycopg `_ which requires PostgreSQL binaries and will fail if not present. If there's a specific use case you think requires development outside diff --git a/requirements/lint.in b/requirements/lint.in index 1dbd6846f5d4..e1665dc8de4a 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -16,7 +16,6 @@ types-first types-html5lib types-itsdangerous types-passlib -types-psycopg2 types-python-slugify types-pytz types-redis diff --git a/requirements/lint.txt b/requirements/lint.txt index c986fb99aff1..eb366502d276 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -310,10 +310,6 @@ types-passlib==1.7.7.12 \ --hash=sha256:6abbf2400a8f1cba48639753e3a034af507a765489bb070974d7f68d9ceef883 \ --hash=sha256:7a4df64b53c2746f804aa29fb361974e5894e0df30ff18cf60b9518696ffc9d3 # via -r requirements/lint.in -types-psycopg2==2.9.21.11 \ - --hash=sha256:7a323d7744bc8a882fb5a6f63448e903fc70d3dc0d6da9ec1f9c6c4dc10a7102 \ - --hash=sha256:d5077eacf90e61db8c0b8eea2fdc9d4a97d7aaa16865fb4bd7034a7571520b4d - # via -r requirements/lint.in types-pyopenssl==23.2.0.2 \ --hash=sha256:19536aa3debfbe25a918cf0d898e9f5fbbe6f3594a429da7914bf331deb1b342 \ --hash=sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d diff --git a/requirements/main.in b/requirements/main.in index f09cbcb75cda..a8dc75182cad 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -39,7 +39,7 @@ paginate_sqlalchemy passlib>=1.6.4 pip-api premailer -psycopg2 +psycopg[c] pycurl pydantic pyqrcode diff --git a/requirements/main.txt b/requirements/main.txt index 09cb0d3c6862..813c736fbb87 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1209,21 +1209,13 @@ protobuf==4.23.4 \ # googleapis-common-protos # grpcio-status # proto-plus -psycopg2==2.9.6 \ - --hash=sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744 \ - --hash=sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f \ - --hash=sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214 \ - --hash=sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39 \ - --hash=sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad \ - --hash=sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889 \ - --hash=sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258 \ - --hash=sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7 \ - --hash=sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394 \ - --hash=sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5 \ - --hash=sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011 \ - --hash=sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494 \ - --hash=sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1 +psycopg[c]==3.1.9 \ + --hash=sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9 \ + --hash=sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc # via -r requirements/main.in +psycopg-c==3.1.9 \ + --hash=sha256:d160b45b0ee1eb05d78a81538c2bc6868bacb5f421b7190ed65d4681e4552455 + # via psycopg pyasn1==0.5.0 \ --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde @@ -1530,6 +1522,7 @@ typing-extensions==4.7.1 \ # via # alembic # limits + # psycopg # pydantic # sqlalchemy tzdata==2023.3 \ diff --git a/requirements/tests.in b/requirements/tests.in index 03322e95afd7..db34d73fa93d 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -3,7 +3,7 @@ factory_boy freezegun pretend pytest>=3.0.0 -pytest-postgresql>=3.1.3,<4.0.0 +pytest-postgresql>=3.1.3,<6.0.0 pytest-socket pytz responses>=0.5.1 diff --git a/requirements/tests.txt b/requirements/tests.txt index 4a2c6fe6244d..b8a1952f81e4 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -207,6 +207,10 @@ psutil==5.9.5 \ --hash=sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30 \ --hash=sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48 # via mirakuru +psycopg==3.1.9 \ + --hash=sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9 \ + --hash=sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc + # via pytest-postgresql pytest==7.4.0 \ --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a @@ -214,9 +218,9 @@ pytest==7.4.0 \ # -r requirements/tests.in # pytest-postgresql # pytest-socket -pytest-postgresql==3.1.3 \ - --hash=sha256:05b87a192741511f5171e0300689a531a2a48b4483c69ae2b5f565d3e429b1d5 \ - --hash=sha256:3649bcac5a0cd0d2cc1470a1087739990d402e2e910d53265ac486321a833898 +pytest-postgresql==5.0.0 \ + --hash=sha256:22edcbafab8995ee85b8d948ddfaad4f70c2c7462303d7477ecd2f77fc9d15bd \ + --hash=sha256:6e8f0773b57c9b8975b6392c241b7b81b7018f32079a533f368f2fbda732ecd3 # via -r requirements/tests.in pytest-socket==0.6.0 \ --hash=sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138 \ @@ -294,6 +298,10 @@ types-pyyaml==6.0.12.11 \ --hash=sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b \ --hash=sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d # via responses +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 + # via psycopg urllib3==1.26.16 \ --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 @@ -312,3 +320,9 @@ webtest==3.0.0 \ --hash=sha256:2a001a9efa40d2a7e5d9cd8d1527c75f41814eb6afce2c3d207402547b1e5ead \ --hash=sha256:54bd969725838d9861a9fa27f8d971f79d275d94ae255f5c501f53bb6d9929eb # via -r requirements/tests.in + +# The following packages are considered to be unsafe in a requirements file: +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 + # via pytest-postgresql diff --git a/tests/conftest.py b/tests/conftest.py index d0e439950834..81476bb73d90 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,7 +28,7 @@ import webtest as _webtest from jinja2 import Environment, FileSystemLoader -from psycopg2.errors import InvalidCatalogName +from psycopg.errors import InvalidCatalogName from pyramid.i18n import TranslationString from pyramid.static import ManifestCacheBuster from pyramid_jinja2 import IJinja2Environment @@ -242,7 +242,7 @@ def database(request): def drop_database(): janitor.drop() - return f"postgresql://{pg_user}@{pg_host}:{pg_port}/{pg_db}" + return f"postgresql+psycopg://{pg_user}@{pg_host}:{pg_port}/{pg_db}" class MockManifestCacheBuster(ManifestCacheBuster): diff --git a/tests/unit/cli/test_db.py b/tests/unit/cli/test_db.py index 5e6aa1be10eb..5f27db71dc89 100644 --- a/tests/unit/cli/test_db.py +++ b/tests/unit/cli/test_db.py @@ -33,21 +33,6 @@ from warehouse.cli.db.upgrade import upgrade -def _compare_alembic_locks(calls: list[pretend.call]) -> bool: - sql = [] - for t in calls: - assert len(t.args) == 1 - assert len(t.kwargs) == 0 - - tc = t.args[0] - assert isinstance(tc, sqlalchemy.sql.expression.TextClause) - sql.append(tc.text) - return sql == [ - "SELECT pg_advisory_lock(hashtext('alembic'))", - "SELECT pg_advisory_unlock(hashtext('alembic'))", - ] - - def test_branches_command(monkeypatch, cli, pyramid_config): alembic_branches = pretend.call_recorder(lambda config: None) monkeypatch.setattr(alembic.command, "branches", alembic_branches) @@ -65,8 +50,6 @@ def test_branches_command(monkeypatch, cli, pyramid_config): result = cli.invoke(branches, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_branches.calls == [pretend.call(alembic_config)] @@ -87,8 +70,6 @@ def test_current_command(monkeypatch, cli, pyramid_config): result = cli.invoke(current, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_current.calls == [pretend.call(alembic_config)] @@ -109,8 +90,6 @@ def test_downgrade_command(monkeypatch, cli, pyramid_config): result = cli.invoke(downgrade, ["--", "-1"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_downgrade.calls == [pretend.call(alembic_config, "-1")] @@ -139,8 +118,6 @@ def test_heads_command(monkeypatch, cli, pyramid_config, args, ekwargs): result = cli.invoke(heads, args, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_heads.calls == [pretend.call(alembic_config, **ekwargs)] @@ -161,8 +138,6 @@ def test_history_command(monkeypatch, cli, pyramid_config): result = cli.invoke(history, ["foo:bar"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_history.calls == [pretend.call(alembic_config, "foo:bar")] @@ -202,8 +177,6 @@ def test_merge_command(monkeypatch, cli, pyramid_config, args, eargs, ekwargs): result = cli.invoke(merge, args, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_merge.calls == [pretend.call(alembic_config, *eargs, **ekwargs)] @@ -260,8 +233,6 @@ def test_revision_command(monkeypatch, cli, pyramid_config, args, ekwargs): result = cli.invoke(revision, args, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_revision.calls == [pretend.call(alembic_config, **ekwargs)] @@ -282,8 +253,6 @@ def test_show_command(monkeypatch, cli, pyramid_config): result = cli.invoke(show, ["foo"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_show.calls == [pretend.call(alembic_config, "foo")] @@ -304,8 +273,6 @@ def test_stamp_command(monkeypatch, cli, pyramid_config): result = cli.invoke(stamp, ["foo"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_stamp.calls == [pretend.call(alembic_config, "foo")] @@ -326,8 +293,6 @@ def test_upgrade_command(monkeypatch, cli, pyramid_config): result = cli.invoke(upgrade, ["foo"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_upgrade.calls == [pretend.call(alembic_config, "foo")] diff --git a/tests/unit/test_db.py b/tests/unit/test_db.py index 24fc3ac28350..453c8d1c4938 100644 --- a/tests/unit/test_db.py +++ b/tests/unit/test_db.py @@ -14,7 +14,7 @@ import alembic.config import pretend -import psycopg2.extensions +import psycopg import pytest import sqlalchemy import venusian @@ -114,7 +114,7 @@ def config_cls(): def test_raises_db_available_error(pyramid_services, metrics): def raiser(): - raise OperationalError("foo", {}, psycopg2.OperationalError()) + raise OperationalError("foo", {}, psycopg.OperationalError()) engine = pretend.stub(connect=raiser) request = pretend.stub( @@ -199,7 +199,6 @@ def test_create_session_read_only_mode( connection = pretend.stub( connection=pretend.stub( - get_transaction_status=lambda: pretend.stub(), set_session=lambda **kw: None, rollback=lambda: None, ), diff --git a/tests/unit/utils/test_wsgi.py b/tests/unit/utils/test_wsgi.py index 943e1a309209..d8d0b72c003c 100644 --- a/tests/unit/utils/test_wsgi.py +++ b/tests/unit/utils/test_wsgi.py @@ -13,6 +13,8 @@ import pretend import pytest +from sqlalchemy import type_coerce +from sqlalchemy.dialects.postgresql import INET from sqlalchemy.exc import NoResultFound from warehouse.ip_addresses.models import IpAddress @@ -196,7 +198,9 @@ def test_ip_address_exists(db_request): def test_ip_address_created(db_request): with pytest.raises(NoResultFound): - db_request.db.query(IpAddress).filter_by(ip_address="192.0.2.69").one() + db_request.db.query(IpAddress).filter_by( + ip_address=type_coerce("192.0.2.69", INET) + ).one() db_request.environ["GEOIP_CITY"] = "Anytown, ST" db_request.remote_addr = "192.0.2.69" @@ -204,8 +208,12 @@ def test_ip_address_created(db_request): wsgi._ip_address(db_request) - ip_address = db_request.db.query(IpAddress).filter_by(ip_address="192.0.2.69").one() - assert ip_address.ip_address == "192.0.2.69" + ip_address = ( + db_request.db.query(IpAddress) + .filter_by(ip_address=type_coerce("192.0.2.69", INET)) + .one() + ) + assert str(ip_address.ip_address) == "192.0.2.69" assert ip_address.hashed_ip_address == "deadbeef" assert ip_address.geoip_info == {"city": "Anytown, ST"} diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py index 62fc7152c5d4..e5667285890b 100644 --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -834,7 +834,7 @@ def _error(message): try: email = ( request.db.query(Email) - .filter(Email.id == data["email.id"], Email.user == request.user) + .filter(Email.id == int(data["email.id"]), Email.user == request.user) .one() ) except NoResultFound: diff --git a/warehouse/admin/bans.py b/warehouse/admin/bans.py index f4ae406a041a..a475e209d62f 100644 --- a/warehouse/admin/bans.py +++ b/warehouse/admin/bans.py @@ -10,6 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from sqlalchemy import type_coerce +from sqlalchemy.dialects.postgresql import INET + from warehouse.accounts.interfaces import IUserService from warehouse.events.models import IpAddress @@ -18,10 +21,10 @@ class Bans: def __init__(self, request): self.request = request - def by_ip(self, ip_address): + def by_ip(self, ip_address: str) -> bool: banned = ( self.request.db.query(IpAddress) - .filter_by(ip_address=ip_address, is_banned=True) + .filter_by(ip_address=type_coerce(ip_address, INET), is_banned=True) .one_or_none() ) if banned is not None: diff --git a/warehouse/banners/views.py b/warehouse/banners/views.py index e3efe93c6794..a24130b4cb83 100644 --- a/warehouse/banners/views.py +++ b/warehouse/banners/views.py @@ -29,7 +29,7 @@ def list_banner_messages(request): if banner_id: query = request.db.query(Banner).filter(Banner.id == banner_id) else: - today = str(datetime.date.today()) + today = datetime.date.today() query = request.db.query(Banner).filter( (Banner.active == True) & (Banner.end >= today) # noqa ) diff --git a/warehouse/cli/db/__init__.py b/warehouse/cli/db/__init__.py index 08368cddc36b..e883a2fecde1 100644 --- a/warehouse/cli/db/__init__.py +++ b/warehouse/cli/db/__init__.py @@ -10,32 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import contextlib - -from sqlalchemy import text - from warehouse.cli import warehouse -@contextlib.contextmanager -def alembic_lock(engine, alembic_config): - with engine.begin() as connection: - # Attempt to acquire the alembic lock, this will wait until the lock - # has been acquired allowing multiple commands to wait for each other. - connection.execute(text("SELECT pg_advisory_lock(hashtext('alembic'))")) - - try: - # Tell Alembic use our current connection instead of creating it's - # own. - alembic_config.attributes["connection"] = connection - - # Yield control back up to let the command itself run. - yield alembic_config - finally: - # Finally we need to release the lock we've acquired. - connection.execute(text("SELECT pg_advisory_unlock(hashtext('alembic'))")) - - @warehouse.group() # pragma: no branch def db(): """ diff --git a/warehouse/cli/db/branches.py b/warehouse/cli/db/branches.py index 79e972692112..cf6896697164 100644 --- a/warehouse/cli/db/branches.py +++ b/warehouse/cli/db/branches.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -22,7 +22,4 @@ def branches(config, **kwargs): """ Show current branch points. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.branches(alembic_config, **kwargs) + alembic.command.branches(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/current.py b/warehouse/cli/db/current.py index 416ada2f63ad..a6f7425f7011 100644 --- a/warehouse/cli/db/current.py +++ b/warehouse/cli/db/current.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -22,7 +22,4 @@ def current(config, **kwargs): """ Display the current revision for a database. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.current(alembic_config, **kwargs) + alembic.command.current(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/downgrade.py b/warehouse/cli/db/downgrade.py index 3bba1c8da10c..f03c584c14d1 100644 --- a/warehouse/cli/db/downgrade.py +++ b/warehouse/cli/db/downgrade.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def downgrade(config, revision, **kwargs): """ Revert to a previous version. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.downgrade(alembic_config, revision, **kwargs) + alembic.command.downgrade(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/cli/db/heads.py b/warehouse/cli/db/heads.py index 1e94a1ab5f7a..333a9acb2254 100644 --- a/warehouse/cli/db/heads.py +++ b/warehouse/cli/db/heads.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -28,7 +28,4 @@ def heads(config, **kwargs): """ Show current available heads. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.heads(alembic_config, **kwargs) + alembic.command.heads(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/history.py b/warehouse/cli/db/history.py index dba1bfcc804e..dd7bf48fae5c 100644 --- a/warehouse/cli/db/history.py +++ b/warehouse/cli/db/history.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def history(config, revision_range, **kwargs): """ List changeset scripts in chronological order. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.history(alembic_config, revision_range, **kwargs) + alembic.command.history(config.alembic_config(), revision_range, **kwargs) diff --git a/warehouse/cli/db/merge.py b/warehouse/cli/db/merge.py index 089ff548392b..e1c52aa930d5 100644 --- a/warehouse/cli/db/merge.py +++ b/warehouse/cli/db/merge.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -34,7 +34,4 @@ def merge(config, revisions, **kwargs): Takes one or more revisions or "heads" for all heads and merges them into a single revision. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.merge(alembic_config, revisions, **kwargs) + alembic.command.merge(config.alembic_config(), revisions, **kwargs) diff --git a/warehouse/cli/db/revision.py b/warehouse/cli/db/revision.py index 87355ee9a024..07cdd6f0b9dd 100644 --- a/warehouse/cli/db/revision.py +++ b/warehouse/cli/db/revision.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -49,7 +49,4 @@ def revision(config, **kwargs): """ Create a new revision file. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.revision(alembic_config, **kwargs) + alembic.command.revision(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/show.py b/warehouse/cli/db/show.py index e663361a01f3..579da8e18c88 100644 --- a/warehouse/cli/db/show.py +++ b/warehouse/cli/db/show.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def show(config, revision, **kwargs): """ Show the revision(s) denoted by the given symbol. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.show(alembic_config, revision, **kwargs) + alembic.command.show(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/cli/db/stamp.py b/warehouse/cli/db/stamp.py index 5a973bd47a70..dfdde5649901 100644 --- a/warehouse/cli/db/stamp.py +++ b/warehouse/cli/db/stamp.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def stamp(config, revision, **kwargs): """ Stamp the revision table with the given revision. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.stamp(alembic_config, revision, **kwargs) + alembic.command.stamp(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/cli/db/upgrade.py b/warehouse/cli/db/upgrade.py index fba82a565348..00d879b47a26 100644 --- a/warehouse/cli/db/upgrade.py +++ b/warehouse/cli/db/upgrade.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def upgrade(config, revision, **kwargs): """ Upgrade database. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.upgrade(alembic_config, revision, **kwargs) + alembic.command.upgrade(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/ip_addresses/models.py b/warehouse/ip_addresses/models.py index a1bf5f65e435..88dc552aa247 100644 --- a/warehouse/ip_addresses/models.py +++ b/warehouse/ip_addresses/models.py @@ -45,8 +45,8 @@ class IpAddress(db.Model): {"comment": "Tracks IP Addresses that have modified PyPI state"}, ) - def __repr__(self): - return self.ip_address + def __repr__(self) -> str: + return str(self.ip_address) def __lt__(self, other): return self.id < other.id diff --git a/warehouse/migrations/env.py b/warehouse/migrations/env.py index 4cc737aab941..6ba8b8dd981a 100644 --- a/warehouse/migrations/env.py +++ b/warehouse/migrations/env.py @@ -42,37 +42,25 @@ def run_migrations_online(): In this scenario we need to create an Engine and associate a connection with the context. """ - connectable = context.config.attributes.get("connection", None) + options = context.config.get_section(context.config.config_ini_section) + url = options.pop("url") + connectable = create_engine(url, poolclass=pool.NullPool) - if connectable is None: - options = context.config.get_section(context.config.config_ini_section) - url = options.pop("url") - connectable = create_engine(url, poolclass=pool.NullPool) + with connectable.connect() as connection: + connection.execute(text("SET statement_timeout = 5000")) + connection.execute(text("SET lock_timeout = 4000")) - with connectable.connect() as connection: - connection.execute(text("SET statement_timeout = 5000")) - connection.execute(text("SET lock_timeout = 4000")) - - context.configure( - connection=connection, - target_metadata=db.metadata, - compare_server_default=True, - transaction_per_migration=True, - ) - with context.begin_transaction(): - context.run_migrations() - else: context.configure( - connection=connectable, + connection=connection, target_metadata=db.metadata, compare_server_default=True, transaction_per_migration=True, ) - context.execute(text("SET statement_timeout = 5000")) - context.execute(text("SET lock_timeout = 4000")) - with context.begin_transaction(): + connection.execute(text("SELECT pg_advisory_lock(hashtext('alembic'))")) context.run_migrations() + context.get_bind().commit() + connection.execute(text("SELECT pg_advisory_unlock(hashtext('alembic'))")) if context.is_offline_mode(): diff --git a/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py b/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py index 914d5f14fa95..48714f640049 100644 --- a/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py +++ b/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py @@ -26,71 +26,71 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - - op.create_index( - op.f("ix_macaroons_user_id"), - "macaroons", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_project_events_project_id"), - "project_events", - ["project_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_release_vulnerabilities_release_id"), - "release_vulnerabilities", - ["release_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_releases_description_id"), - "releases", - ["description_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_role_invitations_project_id"), - "role_invitations", - ["project_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_role_invitations_user_id"), - "role_invitations", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_events_user_id"), - "user_events", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_recovery_codes_user_id"), - "user_recovery_codes", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_security_keys_user_id"), - "user_security_keys", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + op.f("ix_macaroons_user_id"), + "macaroons", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_project_events_project_id"), + "project_events", + ["project_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_release_vulnerabilities_release_id"), + "release_vulnerabilities", + ["release_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_releases_description_id"), + "releases", + ["description_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_role_invitations_project_id"), + "role_invitations", + ["project_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_role_invitations_user_id"), + "role_invitations", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_events_user_id"), + "user_events", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_recovery_codes_user_id"), + "user_recovery_codes", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_security_keys_user_id"), + "user_security_keys", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py b/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py index 1c62621c676d..406b72a8656e 100644 --- a/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py +++ b/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py @@ -26,15 +26,16 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - - op.create_index( - "release_canonical_version_idx", - "releases", - ["canonical_version"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + + with op.get_context().autocommit_block(): + op.create_index( + "release_canonical_version_idx", + "releases", + ["canonical_version"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py index 115b0b8df68d..180eb3973903 100644 --- a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py +++ b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py @@ -55,7 +55,7 @@ def upgrade(): """ ) ) - conn.execute(sa.text("COMMIT")) + op.get_bind().commit() op.alter_column( "releases", diff --git a/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py b/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py index 89f7c58d84e8..ef4aad82a017 100644 --- a/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py +++ b/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py @@ -33,16 +33,17 @@ def upgrade(): op.create_index( op.f("ix_ses_events_email_id"), "ses_events", ["email_id"], unique=False ) - # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close - # our transaction here and issue the statement. - op.execute("COMMIT") - op.create_index( - "journals_submitted_by_idx", - "journals", - ["submitted_by"], - unique=False, - postgresql_concurrently=True, - ) + # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll run this + # outside of the transaction for the migration. + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + "journals_submitted_by_idx", + "journals", + ["submitted_by"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py b/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py index 1f5daad4fb08..8a6587c1a37a 100644 --- a/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py +++ b/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py @@ -38,14 +38,15 @@ def upgrade(): ) # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - op.create_index( - "release_files_cached_idx", - "release_files", - ["cached"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + "release_files_cached_idx", + "release_files", + ["cached"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py b/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py index d413f50ead64..e5cf64ee57f7 100644 --- a/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py +++ b/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py @@ -41,14 +41,15 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - op.create_index( - "release_files_archived_idx", - "release_files", - ["archived"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + "release_files_archived_idx", + "release_files", + ["archived"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/utils/wsgi.py b/warehouse/utils/wsgi.py index 95350703969f..f4cced25e27f 100644 --- a/warehouse/utils/wsgi.py +++ b/warehouse/utils/wsgi.py @@ -16,6 +16,8 @@ from typing import TYPE_CHECKING +from sqlalchemy import type_coerce +from sqlalchemy.dialects.postgresql import INET from sqlalchemy.exc import NoResultFound from warehouse.ip_addresses.models import IpAddress @@ -138,12 +140,11 @@ def _remote_addr_hashed(request: Request) -> str: def _ip_address(request): """Return the IpAddress object for the remote address from the environment.""" + remote_inet = type_coerce(request.remote_addr, INET) try: - ip_address = ( - request.db.query(IpAddress).filter_by(ip_address=request.remote_addr).one() - ) + ip_address = request.db.query(IpAddress).filter_by(ip_address=remote_inet).one() except NoResultFound: - ip_address = IpAddress(ip_address=request.remote_addr) + ip_address = IpAddress(ip_address=remote_inet) request.db.add(ip_address) ip_address.hashed_ip_address = request.remote_addr_hashed