From 99d55c96cca1d9576d72ceee9178b3d8d8e5ca2e Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 11:26:33 -0400 Subject: [PATCH 1/7] use opensearch container locally, to match production index --- dev/compose/opensearch/Dockerfile | 7 +++++++ dev/environment | 2 +- docker-compose.yml | 17 +++++++++++++---- 3 files changed, 21 insertions(+), 5 deletions(-) create mode 100644 dev/compose/opensearch/Dockerfile diff --git a/dev/compose/opensearch/Dockerfile b/dev/compose/opensearch/Dockerfile new file mode 100644 index 000000000000..6bd0a27b2f42 --- /dev/null +++ b/dev/compose/opensearch/Dockerfile @@ -0,0 +1,7 @@ +FROM opensearchproject/opensearch:2.12.0 + +RUN opensearch-plugin remove opensearch-skills --purge +RUN opensearch-plugin remove opensearch-ml --purge +RUN opensearch-plugin remove opensearch-neural-search --purge +RUN opensearch-plugin remove opensearch-performance-analyzer --purge +RUN opensearch-plugin remove opensearch-security-analytics --purge diff --git a/dev/environment b/dev/environment index 1d7695c25208..cbfd78736d93 100644 --- a/dev/environment +++ b/dev/environment @@ -11,7 +11,7 @@ BROKER_URL=sqs://localstack:4566/?region=us-east-1&queue_name_prefix=warehouse-d DATABASE_URL=postgresql+psycopg://postgres@db/warehouse -ELASTICSEARCH_URL=http://elasticsearch:9200/development +ELASTICSEARCH_URL=http://opensearch:9200/development REDIS_URL=redis://redis:6379/0 diff --git a/docker-compose.yml b/docker-compose.yml index e96214a9eb5b..b4ff9f8807d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -44,11 +44,20 @@ services: ports: - "4566:4566" - elasticsearch: - image: elasticsearch:7.10.1 + opensearch: + build: + context: ./dev/compose/opensearch + init: true + healthcheck: + test: ["CMD-SHELL", "curl -u admin:gqYeDIzbEwTTYmB7 --silent --fail http://localhost:9200/_cluster/health || exit 1"] + interval: 1s + start_period: 10s environment: - - xpack.security.enabled=false - discovery.type=single-node + - OPENSEARCH_INITIAL_ADMIN_PASSWORD=gqYeDIzbEwTTYmB7 + - DISABLE_INSTALL_DEMO_CONFIG=true + - DISABLE_SECURITY_PLUGIN=true + - DISABLE_PERFORMANCE_ANALYZER_AGENT_CLI=true ulimits: nofile: soft: 65536 @@ -109,7 +118,7 @@ services: depends_on: db: condition: service_healthy - elasticsearch: + opensearch: condition: service_started redis: condition: service_started From 04f7aec15ed8571acd2d42ed865a11161366faf0 Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 12:09:53 -0400 Subject: [PATCH 2/7] migrate to opensearchpy --- requirements/main.in | 5 ++-- requirements/main.txt | 26 ++++++++--------- tests/conftest.py | 2 +- tests/unit/search/test_init.py | 46 +++++++++++++++--------------- tests/unit/search/test_tasks.py | 50 ++++++++++++++++----------------- tests/unit/test_search.py | 34 +++++++++++----------- tests/unit/test_views.py | 48 +++++++++++++++---------------- warehouse/config.py | 2 +- warehouse/packaging/search.py | 4 +-- warehouse/search/__init__.py | 29 +++++++++---------- warehouse/search/queries.py | 6 ++-- warehouse/search/tasks.py | 35 +++++++++++------------ warehouse/search/utils.py | 2 +- warehouse/views.py | 8 +++--- 14 files changed, 146 insertions(+), 151 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index e720c007d061..8d2a4ac8820c 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -13,8 +13,6 @@ click cryptography datadog>=0.19.0 disposable-email-domains -elasticsearch>=7.0.0,<7.11.0 -elasticsearch_dsl>=7.0.0,<8.0.0 first forcediphttpsadapter github-reserved-names>=1.0.0 @@ -31,6 +29,7 @@ linehaul lxml msgpack natsort +opensearch-py orjson packaging>=23.2 packaging_legacy @@ -69,7 +68,7 @@ structlog transaction trove-classifiers ua-parser -urllib3<2 # See https://github.com/pypi/warehouse/issues/14671 +urllib3 webauthn>=1.0.0,<3.0.0 whitenoise WTForms[email]>=2.0.0 diff --git a/requirements/main.txt b/requirements/main.txt index db384b048df1..c3dfedabe806 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -184,7 +184,7 @@ certifi==2024.6.2 \ --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via # -r requirements/main.in - # elasticsearch + # opensearch-py # requests # sentry-sdk cffi==1.16.0 \ @@ -485,20 +485,13 @@ docutils==0.20.1 \ --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -elasticsearch==7.10.1 \ - --hash=sha256:4ebd34fd223b31c99d9f3b6b6236d3ac18b3046191a37231e8235b06ae7db955 \ - --hash=sha256:a725dd923d349ca0652cf95d6ce23d952e2153740cf4ab6daf4a2d804feeed48 - # via - # -r requirements/main.in - # elasticsearch-dsl -elasticsearch-dsl==7.4.1 \ - --hash=sha256:07ee9c87dc28cc3cae2daa19401e1e18a172174ad9e5ca67938f752e3902a1d5 \ - --hash=sha256:97f79239a252be7c4cce554c29e64695d7ef6a4828372316a5e5ff815e7a7498 - # via -r requirements/main.in email-validator==2.2.0 \ --hash=sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631 \ --hash=sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7 # via wtforms +events==0.5 \ + --hash=sha256:a7286af378ba3e46640ac9825156c93bdba7502174dd696090fdfcd4d80a1abd + # via opensearch-py first==2.0.2 \ --hash=sha256:8d8e46e115ea8ac652c76123c0865e3ff18372aef6f03c22809ceefcea9dec86 \ --hash=sha256:ff285b08c55f8c97ce4ea7012743af2495c9f1291785f163722bd36f6af6d3bf @@ -1285,6 +1278,10 @@ openapi-spec-validator==0.7.1 \ --hash=sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959 \ --hash=sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7 # via openapi-core +opensearch-py==2.6.0 \ + --hash=sha256:0b7c27e8ed84c03c99558406927b6161f186a72502ca6d0325413d8e5523ba96 \ + --hash=sha256:b6e78b685dd4e9c016d7a4299cf1de69e299c88322e3f81c716e6e23fe5683c1 + # via -r requirements/main.in orjson==3.10.5 \ --hash=sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01 \ --hash=sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa \ @@ -1649,8 +1646,8 @@ python-dateutil==2.9.0.post0 \ # botocore # celery # celery-redbeat - # elasticsearch-dsl # google-cloud-bigquery + # opensearch-py python-slugify==8.0.4 \ --hash=sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8 \ --hash=sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856 @@ -1745,6 +1742,7 @@ requests==2.32.3 \ # google-cloud-bigquery # google-cloud-storage # jsonschema-path + # opensearch-py # premailer # requests-aws4auth # stripe @@ -1880,9 +1878,9 @@ six==1.16.0 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via # automat - # elasticsearch-dsl # html5lib # isodate + # opensearch-py # pymacaroons # python-dateutil # requests-aws4auth @@ -2006,8 +2004,8 @@ urllib3==1.26.19 \ # -r requirements/main.in # botocore # celery - # elasticsearch # kombu + # opensearch-py # requests # sentry-sdk venusian==3.1.0 \ diff --git a/tests/conftest.py b/tests/conftest.py index 2f5fa57c445b..61cdb1a53a31 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -312,7 +312,7 @@ def app_config(database): "database.url": database, "docs.url": "http://docs.example.com/", "ratelimit.url": "memory://", - "elasticsearch.url": "https://localhost/warehouse", + "opensearch.url": "https://localhost/warehouse", "files.backend": "warehouse.packaging.services.LocalFileStorage", "archive_files.backend": "warehouse.packaging.services.LocalArchiveFileStorage", "simple.backend": "warehouse.packaging.services.LocalSimpleStorage", diff --git a/tests/unit/search/test_init.py b/tests/unit/search/test_init.py index ec6803746ad8..9a9657611bbd 100644 --- a/tests/unit/search/test_init.py +++ b/tests/unit/search/test_init.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import elasticsearch +import opensearchpy import pretend from warehouse import search @@ -69,7 +69,7 @@ def test_execute_unindex_success(app_config): assert "warehouse.search.project_deletes" not in session.info -def test_es(monkeypatch): +def test_os(monkeypatch): search_obj = pretend.stub() index_obj = pretend.stub( document=pretend.call_recorder(lambda d: None), @@ -84,15 +84,15 @@ def test_es(monkeypatch): client = pretend.stub() request = pretend.stub( registry={ - "elasticsearch.client": client, - "elasticsearch.index": "warehouse", + "opensearch.client": client, + "opensearch.index": "warehouse", "search.doc_types": doc_types, } ) - es = search.es(request) + os = search.os(request) - assert es is search_obj + assert os is search_obj assert index_cls.calls == [pretend.call("warehouse", using=client)] assert index_obj.document.calls == [pretend.call(d) for d in doc_types] assert index_obj.settings.calls == [ @@ -104,20 +104,20 @@ def test_es(monkeypatch): def test_includeme(monkeypatch): aws4auth_stub = pretend.stub() aws4auth = pretend.call_recorder(lambda *a, **kw: aws4auth_stub) - es_client = pretend.stub() - es_client_init = pretend.call_recorder(lambda *a, **kw: es_client) + os_client = pretend.stub() + os_client_init = pretend.call_recorder(lambda *a, **kw: os_client) monkeypatch.setattr(search.requests_aws4auth, "AWS4Auth", aws4auth) - monkeypatch.setattr(search.elasticsearch, "Elasticsearch", es_client_init) + monkeypatch.setattr(search.opensearchpy, "OpenSearch", os_client_init) registry = {} - es_url = "https://some.url/some-index?aws_auth=1®ion=us-east-2" + os_url = "https://some.url/some-index?aws_auth=1®ion=us-east-2" config = pretend.stub( registry=pretend.stub( settings={ "aws.key_id": "AAAAAAAAAAAA", "aws.secret_key": "deadbeefdeadbeefdeadbeef", - "elasticsearch.url": es_url, + "opensearch.url": os_url, }, __setitem__=registry.__setitem__, ), @@ -130,20 +130,20 @@ def test_includeme(monkeypatch): assert aws4auth.calls == [ pretend.call("AAAAAAAAAAAA", "deadbeefdeadbeefdeadbeef", "us-east-2", "es") ] - assert len(es_client_init.calls) == 1 - assert es_client_init.calls[0].kwargs["hosts"] == ["https://some.url"] - assert es_client_init.calls[0].kwargs["timeout"] == 2 - assert es_client_init.calls[0].kwargs["retry_on_timeout"] is False + assert len(os_client_init.calls) == 1 + assert os_client_init.calls[0].kwargs["hosts"] == ["https://some.url"] + assert os_client_init.calls[0].kwargs["timeout"] == 2 + assert os_client_init.calls[0].kwargs["retry_on_timeout"] is False assert ( - es_client_init.calls[0].kwargs["connection_class"] - == elasticsearch.connection.http_requests.RequestsHttpConnection + os_client_init.calls[0].kwargs["connection_class"] + == opensearchpy.connection.http_requests.RequestsHttpConnection ) - assert es_client_init.calls[0].kwargs["http_auth"] == aws4auth_stub + assert os_client_init.calls[0].kwargs["http_auth"] == aws4auth_stub - assert registry["elasticsearch.client"] == es_client - assert registry["elasticsearch.index"] == "some-index" - assert registry["elasticsearch.shards"] == 1 - assert registry["elasticsearch.replicas"] == 0 + assert registry["opensearch.client"] == os_client + assert registry["opensearch.index"] == "some-index" + assert registry["opensearch.shards"] == 1 + assert registry["opensearch.replicas"] == 0 assert config.add_request_method.calls == [ - pretend.call(search.es, name="es", reify=True) + pretend.call(search.os, name="os", reify=True) ] diff --git a/tests/unit/search/test_tasks.py b/tests/unit/search/test_tasks.py index 0307862161e5..14bad3d011f2 100644 --- a/tests/unit/search/test_tasks.py +++ b/tests/unit/search/test_tasks.py @@ -13,7 +13,7 @@ import os import celery.exceptions -import elasticsearch +import opensearchpy import packaging.version import pretend import pytest @@ -227,14 +227,14 @@ def project_docs(db): task = pretend.stub() es_client = FakeESClient() - db_request.registry.update({"elasticsearch.index": "warehouse"}) + db_request.registry.update({"opensearch.index": "warehouse"}) db_request.registry.settings = { - "elasticsearch.url": "http://some.url", + "opensearch.url": "http://some.url", "celery.scheduler_url": "redis://redis:6379/0", } monkeypatch.setattr( - warehouse.search.tasks.elasticsearch, - "Elasticsearch", + warehouse.search.tasks.opensearchpy, + "OpenSearch", lambda *a, **kw: es_client, ) @@ -288,15 +288,15 @@ def project_docs(db): es_client = FakeESClient() db_request.registry.update( - {"elasticsearch.index": "warehouse", "elasticsearch.shards": 42} + {"opensearch.index": "warehouse", "opensearch.shards": 42} ) db_request.registry.settings = { - "elasticsearch.url": "http://some.url", + "opensearch.url": "http://some.url", "celery.scheduler_url": "redis://redis:6379/0", } monkeypatch.setattr( - warehouse.search.tasks.elasticsearch, - "Elasticsearch", + warehouse.search.tasks.opensearchpy, + "OpenSearch", lambda *a, **kw: es_client, ) monkeypatch.setattr(warehouse.search.tasks, "SearchLock", NotLock) @@ -349,18 +349,18 @@ def project_docs(db): db_request.registry.update( { - "elasticsearch.index": "warehouse", - "elasticsearch.shards": 42, + "opensearch.index": "warehouse", + "opensearch.shards": 42, "sqlalchemy.engine": db_engine, } ) db_request.registry.settings = { - "elasticsearch.url": "http://some.url", + "opensearch.url": "http://some.url", "celery.scheduler_url": "redis://redis:6379/0", } monkeypatch.setattr( - warehouse.search.tasks.elasticsearch, - "Elasticsearch", + warehouse.search.tasks.opensearchpy, + "OpenSearch", lambda *a, **kw: es_client, ) monkeypatch.setattr(warehouse.search.tasks, "SearchLock", NotLock) @@ -412,19 +412,19 @@ def project_docs(db): es_client_init = pretend.call_recorder(lambda *a, **kw: es_client) db_request.registry.update( - {"elasticsearch.index": "warehouse", "elasticsearch.shards": 42} + {"opensearch.index": "warehouse", "opensearch.shards": 42} ) db_request.registry.settings = { "aws.key_id": "AAAAAAAAAAAAAAAAAA", "aws.secret_key": "deadbeefdeadbeefdeadbeef", - "elasticsearch.url": "https://some.url?aws_auth=1®ion=us-east-2", + "opensearch.url": "https://some.url?aws_auth=1®ion=us-east-2", "celery.scheduler_url": "redis://redis:6379/0", } monkeypatch.setattr( warehouse.search.tasks.requests_aws4auth, "AWS4Auth", aws4auth ) monkeypatch.setattr( - warehouse.search.tasks.elasticsearch, "Elasticsearch", es_client_init + warehouse.search.tasks.opensearchpy, "OpenSearch", es_client_init ) monkeypatch.setattr(warehouse.search.tasks, "SearchLock", NotLock) @@ -441,7 +441,7 @@ def project_docs(db): assert es_client_init.calls[0].kwargs["retry_on_timeout"] is True assert ( es_client_init.calls[0].kwargs["connection_class"] - == elasticsearch.connection.http_requests.RequestsHttpConnection + == opensearchpy.connection.http_requests.RequestsHttpConnection ) assert es_client_init.calls[0].kwargs["http_auth"] == aws4auth_stub assert aws4auth.calls == [ @@ -491,7 +491,7 @@ def project_docs(db, project_name=None): es_client = FakeESClient() db_request.registry.update( - {"elasticsearch.client": es_client, "elasticsearch.index": "warehouse"} + {"opensearch.client": es_client, "opensearch.index": "warehouse"} ) class TestError(Exception): @@ -523,7 +523,7 @@ class TestError(Exception): monkeypatch.setattr(warehouse.search.tasks, "SearchLock", NotLock) db_request.registry.update( - {"elasticsearch.client": es_client, "elasticsearch.index": "warehouse"} + {"opensearch.client": es_client, "opensearch.index": "warehouse"} ) with pytest.raises(TestError): @@ -536,12 +536,12 @@ def test_unindex_accepts_defeat(self, db_request, monkeypatch): es_client = FakeESClient() es_client.delete = pretend.call_recorder( - pretend.raiser(elasticsearch.exceptions.NotFoundError) + pretend.raiser(opensearchpy.exceptions.NotFoundError) ) monkeypatch.setattr(warehouse.search.tasks, "SearchLock", NotLock) db_request.registry.update( - {"elasticsearch.client": es_client, "elasticsearch.index": "warehouse"} + {"opensearch.client": es_client, "opensearch.index": "warehouse"} ) unindex_project(task, db_request, "foo") @@ -596,9 +596,9 @@ def project_docs(db, project_name=None): db_request.registry.update( { - "elasticsearch.client": es_client, - "elasticsearch.index": "warehouse", - "elasticsearch.shards": 42, + "opensearch.client": es_client, + "opensearch.index": "warehouse", + "opensearch.shards": 42, "sqlalchemy.engine": db_engine, } ) diff --git a/tests/unit/test_search.py b/tests/unit/test_search.py index 2aacefbfb982..034cb736d05b 100644 --- a/tests/unit/test_search.py +++ b/tests/unit/test_search.py @@ -13,7 +13,7 @@ import pytest -from elasticsearch_dsl import Search +from opensearchpy import Search from warehouse.search import queries @@ -35,9 +35,9 @@ class TestQueries: def test_no_terms(self): - es = Search() + os = Search() - query = queries.get_es_query(es, "", "", []) + query = queries.get_os_query(os, "", "", []) assert query.to_dict() == {"query": {"match_all": {}}} @@ -50,9 +50,9 @@ def test_no_terms(self): ], ) def test_quoted_query(self, terms, expected_prefix, expected_type): - es = Search() + os = Search() - query = queries.get_es_query(es, terms, "", []) + query = queries.get_os_query(os, terms, "", []) assert query.to_dict() == { "query": { @@ -81,10 +81,10 @@ def test_quoted_query(self, terms, expected_prefix, expected_type): } def test_single_not_quoted_character(self): - es = Search() + os = Search() terms = "a" - query = queries.get_es_query(es, terms, "", []) + query = queries.get_os_query(os, terms, "", []) assert query.to_dict() == { "query": { @@ -104,10 +104,10 @@ def test_single_not_quoted_character(self): } def test_mixed_quoted_query(self): - es = Search() + os = Search() terms = '"foo bar" baz' - query = queries.get_es_query(es, terms, "", []) + query = queries.get_os_query(os, terms, "", []) assert query.to_dict() == { "query": { @@ -144,10 +144,10 @@ def test_mixed_quoted_query(self): @pytest.mark.parametrize("order,field", [("created", "created")]) def test_sort_order(self, order, field): - es = Search() + os = Search() terms = "foo bar" - query = queries.get_es_query(es, terms, order, []) + query = queries.get_os_query(os, terms, order, []) assert query.to_dict() == { "query": { @@ -182,11 +182,11 @@ def test_sort_order(self, order, field): } def test_with_classifiers_with_terms(self): - es = Search() + os = Search() terms = "foo bar" classifiers = ["foo :: bar", "fiz :: buz"] - query = queries.get_es_query(es, terms, "", classifiers) + query = queries.get_os_query(os, terms, "", classifiers) assert query.to_dict() == { "query": { @@ -237,11 +237,11 @@ def test_with_classifiers_with_terms(self): } def test_with_classifiers_with_no_terms(self): - es = Search() + os = Search() terms = "" classifiers = ["foo :: bar", "fiz :: buz"] - query = queries.get_es_query(es, terms, "", classifiers) + query = queries.get_os_query(os, terms, "", classifiers) assert query.to_dict() == { "query": { @@ -262,11 +262,11 @@ def test_with_classifiers_with_no_terms(self): } def test_with_classifier_with_no_terms_and_order(self): - es = Search() + os = Search() terms = "" classifiers = ["foo :: bar"] - query = queries.get_es_query(es, terms, "-created", classifiers) + query = queries.get_os_query(os, terms, "-created", classifiers) assert query.to_dict() == { "query": { diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py index c6f9d26a7d98..126363bbe919 100644 --- a/tests/unit/test_views.py +++ b/tests/unit/test_views.py @@ -12,7 +12,7 @@ import datetime -import elasticsearch +import opensearchpy import pretend import pytest import sqlalchemy @@ -465,10 +465,10 @@ def test_with_a_query(self, monkeypatch, db_request, metrics, page): params["page"] = page db_request.params = params - db_request.es = pretend.stub() - es_query = pretend.stub() - get_es_query = pretend.call_recorder(lambda *a, **kw: es_query) - monkeypatch.setattr(views, "get_es_query", get_es_query) + db_request.os = pretend.stub() + os_query = pretend.stub() + get_os_query = pretend.call_recorder(lambda *a, **kw: os_query) + monkeypatch.setattr(views, "get_os_query", get_os_query) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) @@ -485,11 +485,11 @@ def test_with_a_query(self, monkeypatch, db_request, metrics, page): "applied_filters": [], "available_filters": [], } - assert get_es_query.calls == [ - pretend.call(db_request.es, params.get("q"), "", []) + assert get_os_query.calls == [ + pretend.call(db_request.os, params.get("q"), "", []) ] assert page_cls.calls == [ - pretend.call(es_query, url_maker=url_maker, page=page or 1) + pretend.call(os_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.histogram.calls == [ @@ -503,10 +503,10 @@ def test_with_classifiers(self, monkeypatch, db_request, metrics, page): params["page"] = page db_request.params = params - es_query = pretend.stub() - db_request.es = pretend.stub() - get_es_query = pretend.call_recorder(lambda *a, **kw: es_query) - monkeypatch.setattr(views, "get_es_query", get_es_query) + os_query = pretend.stub() + db_request.os = pretend.stub() + get_os_query = pretend.call_recorder(lambda *a, **kw: os_query) + monkeypatch.setattr(views, "get_os_query", get_os_query) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") @@ -543,11 +543,11 @@ def test_with_classifiers(self, monkeypatch, db_request, metrics, page): } assert ("fiz", [classifier3.classifier]) not in search_view["available_filters"] assert page_cls.calls == [ - pretend.call(es_query, url_maker=url_maker, page=page or 1) + pretend.call(os_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] - assert get_es_query.calls == [ - pretend.call(db_request.es, params.get("q"), "", params.getall("c")) + assert get_os_query.calls == [ + pretend.call(db_request.os, params.get("q"), "", params.getall("c")) ] assert metrics.histogram.calls == [ pretend.call("warehouse.views.search.results", 1000) @@ -557,8 +557,8 @@ def test_returns_404_with_pagenum_too_high(self, monkeypatch, db_request, metric params = MultiDict({"page": 15}) db_request.params = params - es_query = pretend.stub() - db_request.es = pretend.stub(query=lambda *a, **kw: es_query) + os_query = pretend.stub() + db_request.os = pretend.stub(query=lambda *a, **kw: os_query) page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) @@ -572,7 +572,7 @@ def test_returns_404_with_pagenum_too_high(self, monkeypatch, db_request, metric search(db_request) assert page_cls.calls == [ - pretend.call(es_query, url_maker=url_maker, page=15 or 1) + pretend.call(os_query, url_maker=url_maker, page=15 or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.histogram.calls == [] @@ -581,8 +581,8 @@ def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request, metrics params = MultiDict({"page": "abc"}) db_request.params = params - es_query = pretend.stub() - db_request.es = pretend.stub(query=lambda *a, **kw: es_query) + os_query = pretend.stub() + db_request.os = pretend.stub(query=lambda *a, **kw: os_query) page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) @@ -609,15 +609,15 @@ def test_return_413_when_query_too_long(self, db_request, metrics): pretend.call("warehouse.views.search.error", tags=["error:query_too_long"]) ] - def test_returns_503_when_es_unavailable(self, monkeypatch, db_request, metrics): + def test_returns_503_when_os_unavailable(self, monkeypatch, db_request, metrics): params = MultiDict({"page": 15}) db_request.params = params - es_query = pretend.stub() - db_request.es = pretend.stub(query=lambda *a, **kw: es_query) + os_query = pretend.stub() + db_request.os = pretend.stub(query=lambda *a, **kw: os_query) def raiser(*args, **kwargs): - raise elasticsearch.ConnectionError() + raise opensearchpy.ConnectionError() monkeypatch.setattr(views, "ElasticsearchPage", raiser) diff --git a/warehouse/config.py b/warehouse/config.py index a312ed1dd6d0..f4d8e270164c 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -270,7 +270,7 @@ def configure(settings=None): maybe_set(settings, "celery.scheduler_url", "REDIS_URL") maybe_set(settings, "oidc.jwk_cache_url", "REDIS_URL") maybe_set(settings, "database.url", "DATABASE_URL") - maybe_set(settings, "elasticsearch.url", "ELASTICSEARCH_URL") + maybe_set(settings, "opensearch.url", "ELASTICSEARCH_URL") maybe_set(settings, "sentry.dsn", "SENTRY_DSN") maybe_set(settings, "sentry.transport", "SENTRY_TRANSPORT") maybe_set(settings, "sessions.url", "REDIS_URL") diff --git a/warehouse/packaging/search.py b/warehouse/packaging/search.py index 072a490c73fc..8eb4ea649b34 100644 --- a/warehouse/packaging/search.py +++ b/warehouse/packaging/search.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch_dsl import Date, Document, Keyword, Text, analyzer +from opensearchpy import Date, Document, Keyword, Text, analyzer from warehouse.search.utils import doc_type @@ -69,5 +69,5 @@ def from_db(cls, release): class Index: # make sure this class can match any index so it will always be used to - # deserialize data coming from elasticsearch. + # deserialize data coming from opensearch. name = "*" diff --git a/warehouse/search/__init__.py b/warehouse/search/__init__.py index ec6c92746d5a..53ad94394db1 100644 --- a/warehouse/search/__init__.py +++ b/warehouse/search/__init__.py @@ -13,11 +13,10 @@ import urllib.parse import certifi -import elasticsearch +import opensearchpy import requests_aws4auth from celery.schedules import crontab -from elasticsearch_dsl import serializer from urllib3.util import parse_url from warehouse import db @@ -65,22 +64,22 @@ def execute_project_reindex(config, session): config.task(unindex_project).delay(project.normalized_name) -def es(request): - client = request.registry["elasticsearch.client"] +def os(request): + client = request.registry["opensearch.client"] doc_types = request.registry.get("search.doc_types", set()) - index_name = request.registry["elasticsearch.index"] + index_name = request.registry["opensearch.index"] index = get_index( index_name, doc_types, using=client, - shards=request.registry.get("elasticsearch.shards", 1), - replicas=request.registry.get("elasticsearch.replicas", 0), + shards=request.registry.get("opensearch.shards", 1), + replicas=request.registry.get("opensearch.replicas", 0), ) return index.search() def includeme(config): - p = parse_url(config.registry.settings["elasticsearch.url"]) + p = parse_url(config.registry.settings["opensearch.url"]) qs = urllib.parse.parse_qs(p.query) kwargs = { "hosts": [urllib.parse.urlunparse((p.scheme, p.netloc) + ("",) * 4)], @@ -88,24 +87,24 @@ def includeme(config): "ca_certs": certifi.where(), "timeout": 2, "retry_on_timeout": False, - "serializer": serializer.serializer, + "serializer": opensearchpy.serializer.serializer, "max_retries": 1, } aws_auth = bool(qs.get("aws_auth", False)) if aws_auth: aws_region = qs.get("region", ["us-east-1"])[0] - kwargs["connection_class"] = elasticsearch.RequestsHttpConnection + kwargs["connection_class"] = opensearchpy.RequestsHttpConnection kwargs["http_auth"] = requests_aws4auth.AWS4Auth( config.registry.settings["aws.key_id"], config.registry.settings["aws.secret_key"], aws_region, "es", ) - config.registry["elasticsearch.client"] = elasticsearch.Elasticsearch(**kwargs) - config.registry["elasticsearch.index"] = p.path.strip("/") - config.registry["elasticsearch.shards"] = int(qs.get("shards", ["1"])[0]) - config.registry["elasticsearch.replicas"] = int(qs.get("replicas", ["0"])[0]) - config.add_request_method(es, name="es", reify=True) + config.registry["opensearch.client"] = opensearchpy.OpenSearch(**kwargs) + config.registry["opensearch.index"] = p.path.strip("/") + config.registry["opensearch.shards"] = int(qs.get("shards", ["1"])[0]) + config.registry["opensearch.replicas"] = int(qs.get("replicas", ["0"])[0]) + config.add_request_method(os, name="os", reify=True) from warehouse.search.tasks import reindex diff --git a/warehouse/search/queries.py b/warehouse/search/queries.py index c34c26853d30..3cd2cb2e63bf 100644 --- a/warehouse/search/queries.py +++ b/warehouse/search/queries.py @@ -12,7 +12,7 @@ import re -from elasticsearch_dsl import Q +from opensearchpy import Q SEARCH_FIELDS = [ "author", @@ -48,9 +48,9 @@ ) -def get_es_query(es, terms, order, classifiers): +def get_os_query(es, terms, order, classifiers): """ - Returns an Elasticsearch query from data from the request. + Returns an OpenSearch query from data from the request. """ classifier_q = Q( "bool", diff --git a/warehouse/search/tasks.py b/warehouse/search/tasks.py index d6da6bae1ce4..cd97498d3a48 100644 --- a/warehouse/search/tasks.py +++ b/warehouse/search/tasks.py @@ -15,13 +15,12 @@ import urllib.parse import certifi -import elasticsearch +import opensearchpy import redis import requests_aws4auth import sentry_sdk -from elasticsearch.helpers import parallel_bulk -from elasticsearch_dsl import serializer +from opensearchpy.helpers import parallel_bulk from redis.lock import Lock from sqlalchemy import func, select, text from sqlalchemy.orm import aliased @@ -120,7 +119,7 @@ def reindex(self, request): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) try: with SearchLock(r, timeout=30 * 60, blocking_timeout=30): - p = parse_url(request.registry.settings["elasticsearch.url"]) + p = parse_url(request.registry.settings["opensearch.url"]) qs = urllib.parse.parse_qs(p.query) kwargs = { "hosts": [urllib.parse.urlunparse((p.scheme, p.netloc) + ("",) * 4)], @@ -128,21 +127,21 @@ def reindex(self, request): "ca_certs": certifi.where(), "timeout": 30, "retry_on_timeout": True, - "serializer": serializer.serializer, + "serializer": opensearchpy.serializer.serializer, } aws_auth = bool(qs.get("aws_auth", False)) if aws_auth: aws_region = qs.get("region", ["us-east-1"])[0] - kwargs["connection_class"] = elasticsearch.RequestsHttpConnection + kwargs["connection_class"] = opensearchpy.RequestsHttpConnection kwargs["http_auth"] = requests_aws4auth.AWS4Auth( request.registry.settings["aws.key_id"], request.registry.settings["aws.secret_key"], aws_region, "es", ) - client = elasticsearch.Elasticsearch(**kwargs) - number_of_replicas = request.registry.get("elasticsearch.replicas", 0) - refresh_interval = request.registry.get("elasticsearch.interval", "1s") + client = opensearchpy.OpenSearch(**kwargs) + number_of_replicas = request.registry.get("opensearch.replicas", 0) + refresh_interval = request.registry.get("opensearch.interval", "1s") # We use a randomly named index so that we can do a zero downtime reindex. # Essentially we'll use a randomly named index which we will use until all @@ -150,11 +149,11 @@ def reindex(self, request): # our randomly named index, and then delete the old randomly named index. # Create the new index and associate all of our doc types with it. - index_base = request.registry["elasticsearch.index"] + index_base = request.registry["opensearch.index"] random_token = binascii.hexlify(os.urandom(5)).decode("ascii") new_index_name = f"{index_base}-{random_token}" doc_types = request.registry.get("search.doc_types", set()) - shards = request.registry.get("elasticsearch.shards", 1) + shards = request.registry.get("opensearch.shards", 1) # Create the new index with zero replicas and index refreshes disabled # while we are bulk indexing. @@ -218,15 +217,15 @@ def reindex_project(self, request, project_name): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) try: with SearchLock(r, timeout=15, blocking_timeout=1): - client = request.registry["elasticsearch.client"] + client = request.registry["opensearch.client"] doc_types = request.registry.get("search.doc_types", set()) - index_name = request.registry["elasticsearch.index"] + index_name = request.registry["opensearch.index"] get_index( index_name, doc_types, using=client, - shards=request.registry.get("elasticsearch.shards", 1), - replicas=request.registry.get("elasticsearch.replicas", 0), + shards=request.registry.get("opensearch.shards", 1), + replicas=request.registry.get("opensearch.replicas", 0), ) for _ in parallel_bulk( @@ -243,11 +242,11 @@ def unindex_project(self, request, project_name): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) try: with SearchLock(r, timeout=15, blocking_timeout=1): - client = request.registry["elasticsearch.client"] - index_name = request.registry["elasticsearch.index"] + client = request.registry["opensearch.client"] + index_name = request.registry["opensearch.index"] try: client.delete(index=index_name, id=project_name) - except elasticsearch.exceptions.NotFoundError: + except opensearchpy.exceptions.NotFoundError: pass except redis.exceptions.LockError as exc: sentry_sdk.capture_exception(exc) diff --git a/warehouse/search/utils.py b/warehouse/search/utils.py index b3ae6a5d7880..b6a8e8e2fbf8 100644 --- a/warehouse/search/utils.py +++ b/warehouse/search/utils.py @@ -12,7 +12,7 @@ import venusian -from elasticsearch_dsl import Index +from opensearchpy import Index def doc_type(cls): diff --git a/warehouse/views.py b/warehouse/views.py index ef620fa68e53..0532a38fe95b 100644 --- a/warehouse/views.py +++ b/warehouse/views.py @@ -14,7 +14,7 @@ import collections import re -import elasticsearch +import opensearchpy from pyramid.exceptions import PredicateMismatch from pyramid.httpexceptions import ( @@ -60,7 +60,7 @@ Release, ReleaseClassifiers, ) -from warehouse.search.queries import SEARCH_FILTER_ORDER, get_es_query +from warehouse.search.queries import SEARCH_FILTER_ORDER, get_os_query from warehouse.utils.http import is_safe_url from warehouse.utils.paginate import ElasticsearchPage, paginate_url_factory from warehouse.utils.row_counter import RowCount @@ -327,7 +327,7 @@ def search(request): order = request.params.get("o", "") classifiers = request.params.getall("c") - query = get_es_query(request.es, querystring, order, classifiers) + query = get_os_query(request.os, querystring, order, classifiers) try: page_num = int(request.params.get("page", 1)) @@ -338,7 +338,7 @@ def search(request): page = ElasticsearchPage( query, page=page_num, url_maker=paginate_url_factory(request) ) - except elasticsearch.TransportError: + except opensearchpy.TransportError: metrics.increment("warehouse.views.search.error") raise HTTPServiceUnavailable From 58b2481689e927d7d3a613acc9102581c16a6b47 Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 13:06:09 -0400 Subject: [PATCH 3/7] rename os -> opensearch, no need to save keystrokes --- tests/unit/search/test_init.py | 32 +++++++++++------------ tests/unit/test_search.py | 32 +++++++++++------------ tests/unit/test_views.py | 46 ++++++++++++++++++---------------- warehouse/search/__init__.py | 4 +-- warehouse/search/queries.py | 6 ++--- warehouse/views.py | 4 +-- 6 files changed, 63 insertions(+), 61 deletions(-) diff --git a/tests/unit/search/test_init.py b/tests/unit/search/test_init.py index 9a9657611bbd..712122b0be6e 100644 --- a/tests/unit/search/test_init.py +++ b/tests/unit/search/test_init.py @@ -69,7 +69,7 @@ def test_execute_unindex_success(app_config): assert "warehouse.search.project_deletes" not in session.info -def test_os(monkeypatch): +def test_opensearch(monkeypatch): search_obj = pretend.stub() index_obj = pretend.stub( document=pretend.call_recorder(lambda d: None), @@ -90,9 +90,9 @@ def test_os(monkeypatch): } ) - os = search.os(request) + opensearch = search.opensearch(request) - assert os is search_obj + assert opensearch is search_obj assert index_cls.calls == [pretend.call("warehouse", using=client)] assert index_obj.document.calls == [pretend.call(d) for d in doc_types] assert index_obj.settings.calls == [ @@ -104,20 +104,20 @@ def test_os(monkeypatch): def test_includeme(monkeypatch): aws4auth_stub = pretend.stub() aws4auth = pretend.call_recorder(lambda *a, **kw: aws4auth_stub) - os_client = pretend.stub() - os_client_init = pretend.call_recorder(lambda *a, **kw: os_client) + opensearch_client = pretend.stub() + opensearch_client_init = pretend.call_recorder(lambda *a, **kw: opensearch_client) monkeypatch.setattr(search.requests_aws4auth, "AWS4Auth", aws4auth) - monkeypatch.setattr(search.opensearchpy, "OpenSearch", os_client_init) + monkeypatch.setattr(search.opensearchpy, "OpenSearch", opensearch_client_init) registry = {} - os_url = "https://some.url/some-index?aws_auth=1®ion=us-east-2" + opensearch_url = "https://some.url/some-index?aws_auth=1®ion=us-east-2" config = pretend.stub( registry=pretend.stub( settings={ "aws.key_id": "AAAAAAAAAAAA", "aws.secret_key": "deadbeefdeadbeefdeadbeef", - "opensearch.url": os_url, + "opensearch.url": opensearch_url, }, __setitem__=registry.__setitem__, ), @@ -130,20 +130,20 @@ def test_includeme(monkeypatch): assert aws4auth.calls == [ pretend.call("AAAAAAAAAAAA", "deadbeefdeadbeefdeadbeef", "us-east-2", "es") ] - assert len(os_client_init.calls) == 1 - assert os_client_init.calls[0].kwargs["hosts"] == ["https://some.url"] - assert os_client_init.calls[0].kwargs["timeout"] == 2 - assert os_client_init.calls[0].kwargs["retry_on_timeout"] is False + assert len(opensearch_client_init.calls) == 1 + assert opensearch_client_init.calls[0].kwargs["hosts"] == ["https://some.url"] + assert opensearch_client_init.calls[0].kwargs["timeout"] == 2 + assert opensearch_client_init.calls[0].kwargs["retry_on_timeout"] is False assert ( - os_client_init.calls[0].kwargs["connection_class"] + opensearch_client_init.calls[0].kwargs["connection_class"] == opensearchpy.connection.http_requests.RequestsHttpConnection ) - assert os_client_init.calls[0].kwargs["http_auth"] == aws4auth_stub + assert opensearch_client_init.calls[0].kwargs["http_auth"] == aws4auth_stub - assert registry["opensearch.client"] == os_client + assert registry["opensearch.client"] == opensearch_client assert registry["opensearch.index"] == "some-index" assert registry["opensearch.shards"] == 1 assert registry["opensearch.replicas"] == 0 assert config.add_request_method.calls == [ - pretend.call(search.os, name="os", reify=True) + pretend.call(search.opensearch, name="opensearch", reify=True) ] diff --git a/tests/unit/test_search.py b/tests/unit/test_search.py index 034cb736d05b..f5c5a4841ebf 100644 --- a/tests/unit/test_search.py +++ b/tests/unit/test_search.py @@ -35,9 +35,9 @@ class TestQueries: def test_no_terms(self): - os = Search() + opensearch = Search() - query = queries.get_os_query(os, "", "", []) + query = queries.get_opensearch_query(opensearch, "", "", []) assert query.to_dict() == {"query": {"match_all": {}}} @@ -50,9 +50,9 @@ def test_no_terms(self): ], ) def test_quoted_query(self, terms, expected_prefix, expected_type): - os = Search() + opensearch = Search() - query = queries.get_os_query(os, terms, "", []) + query = queries.get_opensearch_query(opensearch, terms, "", []) assert query.to_dict() == { "query": { @@ -81,10 +81,10 @@ def test_quoted_query(self, terms, expected_prefix, expected_type): } def test_single_not_quoted_character(self): - os = Search() + opensearch = Search() terms = "a" - query = queries.get_os_query(os, terms, "", []) + query = queries.get_opensearch_query(opensearch, terms, "", []) assert query.to_dict() == { "query": { @@ -104,10 +104,10 @@ def test_single_not_quoted_character(self): } def test_mixed_quoted_query(self): - os = Search() + opensearch = Search() terms = '"foo bar" baz' - query = queries.get_os_query(os, terms, "", []) + query = queries.get_opensearch_query(opensearch, terms, "", []) assert query.to_dict() == { "query": { @@ -144,10 +144,10 @@ def test_mixed_quoted_query(self): @pytest.mark.parametrize("order,field", [("created", "created")]) def test_sort_order(self, order, field): - os = Search() + opensearch = Search() terms = "foo bar" - query = queries.get_os_query(os, terms, order, []) + query = queries.get_opensearch_query(opensearch, terms, order, []) assert query.to_dict() == { "query": { @@ -182,11 +182,11 @@ def test_sort_order(self, order, field): } def test_with_classifiers_with_terms(self): - os = Search() + opensearch = Search() terms = "foo bar" classifiers = ["foo :: bar", "fiz :: buz"] - query = queries.get_os_query(os, terms, "", classifiers) + query = queries.get_opensearch_query(opensearch, terms, "", classifiers) assert query.to_dict() == { "query": { @@ -237,11 +237,11 @@ def test_with_classifiers_with_terms(self): } def test_with_classifiers_with_no_terms(self): - os = Search() + opensearch = Search() terms = "" classifiers = ["foo :: bar", "fiz :: buz"] - query = queries.get_os_query(os, terms, "", classifiers) + query = queries.get_opensearch_query(opensearch, terms, "", classifiers) assert query.to_dict() == { "query": { @@ -262,11 +262,11 @@ def test_with_classifiers_with_no_terms(self): } def test_with_classifier_with_no_terms_and_order(self): - os = Search() + opensearch = Search() terms = "" classifiers = ["foo :: bar"] - query = queries.get_os_query(os, terms, "-created", classifiers) + query = queries.get_opensearch_query(opensearch, terms, "-created", classifiers) assert query.to_dict() == { "query": { diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py index 126363bbe919..ea3af143a2c9 100644 --- a/tests/unit/test_views.py +++ b/tests/unit/test_views.py @@ -465,10 +465,10 @@ def test_with_a_query(self, monkeypatch, db_request, metrics, page): params["page"] = page db_request.params = params - db_request.os = pretend.stub() - os_query = pretend.stub() - get_os_query = pretend.call_recorder(lambda *a, **kw: os_query) - monkeypatch.setattr(views, "get_os_query", get_os_query) + db_request.opensearch = pretend.stub() + opensearch_query = pretend.stub() + get_opensearch_query = pretend.call_recorder(lambda *a, **kw: opensearch_query) + monkeypatch.setattr(views, "get_opensearch_query", get_opensearch_query) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) @@ -485,11 +485,11 @@ def test_with_a_query(self, monkeypatch, db_request, metrics, page): "applied_filters": [], "available_filters": [], } - assert get_os_query.calls == [ - pretend.call(db_request.os, params.get("q"), "", []) + assert get_opensearch_query.calls == [ + pretend.call(db_request.opensearch, params.get("q"), "", []) ] assert page_cls.calls == [ - pretend.call(os_query, url_maker=url_maker, page=page or 1) + pretend.call(opensearch_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.histogram.calls == [ @@ -503,10 +503,10 @@ def test_with_classifiers(self, monkeypatch, db_request, metrics, page): params["page"] = page db_request.params = params - os_query = pretend.stub() - db_request.os = pretend.stub() - get_os_query = pretend.call_recorder(lambda *a, **kw: os_query) - monkeypatch.setattr(views, "get_os_query", get_os_query) + opensearch_query = pretend.stub() + db_request.opensearch = pretend.stub() + get_opensearch_query = pretend.call_recorder(lambda *a, **kw: opensearch_query) + monkeypatch.setattr(views, "get_opensearch_query", get_opensearch_query) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") @@ -543,11 +543,11 @@ def test_with_classifiers(self, monkeypatch, db_request, metrics, page): } assert ("fiz", [classifier3.classifier]) not in search_view["available_filters"] assert page_cls.calls == [ - pretend.call(os_query, url_maker=url_maker, page=page or 1) + pretend.call(opensearch_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] - assert get_os_query.calls == [ - pretend.call(db_request.os, params.get("q"), "", params.getall("c")) + assert get_opensearch_query.calls == [ + pretend.call(db_request.opensearch, params.get("q"), "", params.getall("c")) ] assert metrics.histogram.calls == [ pretend.call("warehouse.views.search.results", 1000) @@ -557,8 +557,8 @@ def test_returns_404_with_pagenum_too_high(self, monkeypatch, db_request, metric params = MultiDict({"page": 15}) db_request.params = params - os_query = pretend.stub() - db_request.os = pretend.stub(query=lambda *a, **kw: os_query) + opensearch_query = pretend.stub() + db_request.opensearch = pretend.stub(query=lambda *a, **kw: opensearch_query) page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) @@ -572,7 +572,7 @@ def test_returns_404_with_pagenum_too_high(self, monkeypatch, db_request, metric search(db_request) assert page_cls.calls == [ - pretend.call(os_query, url_maker=url_maker, page=15 or 1) + pretend.call(opensearch_query, url_maker=url_maker, page=15 or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.histogram.calls == [] @@ -581,8 +581,8 @@ def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request, metrics params = MultiDict({"page": "abc"}) db_request.params = params - os_query = pretend.stub() - db_request.os = pretend.stub(query=lambda *a, **kw: os_query) + opensearch_query = pretend.stub() + db_request.opensearch = pretend.stub(query=lambda *a, **kw: opensearch_query) page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) @@ -609,12 +609,14 @@ def test_return_413_when_query_too_long(self, db_request, metrics): pretend.call("warehouse.views.search.error", tags=["error:query_too_long"]) ] - def test_returns_503_when_os_unavailable(self, monkeypatch, db_request, metrics): + def test_returns_503_when_opensearch_unavailable( + self, monkeypatch, db_request, metrics + ): params = MultiDict({"page": 15}) db_request.params = params - os_query = pretend.stub() - db_request.os = pretend.stub(query=lambda *a, **kw: os_query) + opensearch_query = pretend.stub() + db_request.opensearch = pretend.stub(query=lambda *a, **kw: opensearch_query) def raiser(*args, **kwargs): raise opensearchpy.ConnectionError() diff --git a/warehouse/search/__init__.py b/warehouse/search/__init__.py index 53ad94394db1..a27b7179ec73 100644 --- a/warehouse/search/__init__.py +++ b/warehouse/search/__init__.py @@ -64,7 +64,7 @@ def execute_project_reindex(config, session): config.task(unindex_project).delay(project.normalized_name) -def os(request): +def opensearch(request): client = request.registry["opensearch.client"] doc_types = request.registry.get("search.doc_types", set()) index_name = request.registry["opensearch.index"] @@ -104,7 +104,7 @@ def includeme(config): config.registry["opensearch.index"] = p.path.strip("/") config.registry["opensearch.shards"] = int(qs.get("shards", ["1"])[0]) config.registry["opensearch.replicas"] = int(qs.get("replicas", ["0"])[0]) - config.add_request_method(os, name="os", reify=True) + config.add_request_method(opensearch, name="opensearch", reify=True) from warehouse.search.tasks import reindex diff --git a/warehouse/search/queries.py b/warehouse/search/queries.py index 3cd2cb2e63bf..0973da6bd117 100644 --- a/warehouse/search/queries.py +++ b/warehouse/search/queries.py @@ -48,7 +48,7 @@ ) -def get_os_query(es, terms, order, classifiers): +def get_opensearch_query(opensearch, terms, order, classifiers): """ Returns an OpenSearch query from data from the request. """ @@ -69,7 +69,7 @@ def get_os_query(es, terms, order, classifiers): ], ) if not terms: - query = es.query(classifier_q) if classifiers else es.query() + query = opensearch.query(classifier_q) if classifiers else opensearch.query() else: quoted_string, unquoted_string = filter_query(terms) bool_query = Q( @@ -84,7 +84,7 @@ def get_os_query(es, terms, order, classifiers): if len(terms) > 1: bool_query = bool_query | Q("prefix", normalized_name=terms) - query = es.query(bool_query) + query = opensearch.query(bool_query) query = query.suggest("name_suggestion", terms, term={"field": "name"}) query = query_for_order(query, order) diff --git a/warehouse/views.py b/warehouse/views.py index 0532a38fe95b..c13111341ca3 100644 --- a/warehouse/views.py +++ b/warehouse/views.py @@ -60,7 +60,7 @@ Release, ReleaseClassifiers, ) -from warehouse.search.queries import SEARCH_FILTER_ORDER, get_os_query +from warehouse.search.queries import SEARCH_FILTER_ORDER, get_opensearch_query from warehouse.utils.http import is_safe_url from warehouse.utils.paginate import ElasticsearchPage, paginate_url_factory from warehouse.utils.row_counter import RowCount @@ -327,7 +327,7 @@ def search(request): order = request.params.get("o", "") classifiers = request.params.getall("c") - query = get_os_query(request.os, querystring, order, classifiers) + query = get_opensearch_query(request.opensearch, querystring, order, classifiers) try: page_num = int(request.params.get("page", 1)) From 45852783d7af89a495e190233c49340cd432053e Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 13:07:03 -0400 Subject: [PATCH 4/7] Rename wrapper/page --- tests/unit/test_views.py | 10 +++---- tests/unit/utils/test_paginate.py | 44 +++++++++++++++---------------- warehouse/utils/paginate.py | 6 ++--- warehouse/views.py | 4 +-- 4 files changed, 31 insertions(+), 33 deletions(-) diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py index ea3af143a2c9..bd273acfd78a 100644 --- a/tests/unit/test_views.py +++ b/tests/unit/test_views.py @@ -472,7 +472,7 @@ def test_with_a_query(self, monkeypatch, db_request, metrics, page): page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) - monkeypatch.setattr(views, "ElasticsearchPage", page_cls) + monkeypatch.setattr(views, "OpenSearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) @@ -520,7 +520,7 @@ def test_with_classifiers(self, monkeypatch, db_request, metrics, page): page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) - monkeypatch.setattr(views, "ElasticsearchPage", page_cls) + monkeypatch.setattr(views, "OpenSearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) @@ -562,7 +562,7 @@ def test_returns_404_with_pagenum_too_high(self, monkeypatch, db_request, metric page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) - monkeypatch.setattr(views, "ElasticsearchPage", page_cls) + monkeypatch.setattr(views, "OpenSearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) @@ -586,7 +586,7 @@ def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request, metrics page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) - monkeypatch.setattr(views, "ElasticsearchPage", page_cls) + monkeypatch.setattr(views, "OpenSearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) @@ -621,7 +621,7 @@ def test_returns_503_when_opensearch_unavailable( def raiser(*args, **kwargs): raise opensearchpy.ConnectionError() - monkeypatch.setattr(views, "ElasticsearchPage", raiser) + monkeypatch.setattr(views, "OpenSearchPage", raiser) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) diff --git a/tests/unit/utils/test_paginate.py b/tests/unit/utils/test_paginate.py index 00311bc01ffa..ee56bdfeee3e 100644 --- a/tests/unit/utils/test_paginate.py +++ b/tests/unit/utils/test_paginate.py @@ -106,33 +106,33 @@ def execute(self): return FakeSuggestResult(data, total, self.options, self.suggestion) -class TestElasticsearchWrapper: +class TestOpenSearchWrapper: def test_slices_and_length(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) assert wrapper[1:3] == [2, 3] assert len(wrapper) == 6 def test_slice_start_clamps_to_max(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) wrapper.max_results = 5 assert wrapper[6:10] == [] assert len(wrapper) == 5 def test_slice_end_clamps_to_max(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) wrapper.max_results = 5 assert wrapper[1:10] == [2, 3, 4, 5] assert len(wrapper) == 5 def test_second_slice_fails(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) wrapper[1:3] with pytest.raises(RuntimeError): wrapper[1:3] def test_len_before_slice_fails(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6])) with pytest.raises(RuntimeError): len(wrapper) @@ -140,53 +140,53 @@ def test_len_before_slice_fails(self): def test_best_guess_suggestion(self): fake_option = pretend.stub() query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[fake_option]) - wrapper = paginate._ElasticsearchWrapper(query) + wrapper = paginate._OpenSearchWrapper(query) wrapper[1:3] assert wrapper.best_guess == fake_option def test_best_guess_suggestion_no_suggestions(self): query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], suggestion=[]) - wrapper = paginate._ElasticsearchWrapper(query) + wrapper = paginate._OpenSearchWrapper(query) wrapper[1:3] assert wrapper.best_guess is None def test_best_guess_suggestion_no_options(self): query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[]) - wrapper = paginate._ElasticsearchWrapper(query) + wrapper = paginate._OpenSearchWrapper(query) wrapper[1:3] assert wrapper.best_guess is None -class TestElasticsearchWrapper6: +class TestOpenSearchWrapper6: def test_slices_and_length(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) assert wrapper[1:3] == [2, 3] assert len(wrapper) == 6 def test_slice_start_clamps_to_max(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) wrapper.max_results = 5 assert wrapper[6:10] == [] assert len(wrapper) == 5 def test_slice_end_clamps_to_max(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) wrapper.max_results = 5 assert wrapper[1:10] == [2, 3, 4, 5] assert len(wrapper) == 5 def test_second_slice_fails(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) wrapper[1:3] with pytest.raises(RuntimeError): wrapper[1:3] def test_len_before_slice_fails(self): - wrapper = paginate._ElasticsearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) + wrapper = paginate._OpenSearchWrapper(FakeQuery6([1, 2, 3, 4, 5, 6])) with pytest.raises(RuntimeError): len(wrapper) @@ -194,36 +194,34 @@ def test_len_before_slice_fails(self): def test_best_guess_suggestion(self): fake_option = pretend.stub() query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[fake_option]) - wrapper = paginate._ElasticsearchWrapper(query) + wrapper = paginate._OpenSearchWrapper(query) wrapper[1:3] assert wrapper.best_guess == fake_option def test_best_guess_suggestion_no_suggestions(self): query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], suggestion=[]) - wrapper = paginate._ElasticsearchWrapper(query) + wrapper = paginate._OpenSearchWrapper(query) wrapper[1:3] assert wrapper.best_guess is None def test_best_guess_suggestion_no_options(self): query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[]) - wrapper = paginate._ElasticsearchWrapper(query) + wrapper = paginate._OpenSearchWrapper(query) wrapper[1:3] assert wrapper.best_guess is None -def test_elasticsearch_page_has_wrapper(monkeypatch): +def test_opensearch_page_has_wrapper(monkeypatch): page_obj = pretend.stub() page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(paginate, "Page", page_cls) - assert paginate.ElasticsearchPage("first", second="foo") is page_obj + assert paginate.OpenSearchPage("first", second="foo") is page_obj assert page_cls.calls == [ - pretend.call( - "first", second="foo", wrapper_class=paginate._ElasticsearchWrapper - ) + pretend.call("first", second="foo", wrapper_class=paginate._OpenSearchWrapper) ] diff --git a/warehouse/utils/paginate.py b/warehouse/utils/paginate.py index 94dbeea77efe..340ad6b9317a 100644 --- a/warehouse/utils/paginate.py +++ b/warehouse/utils/paginate.py @@ -13,7 +13,7 @@ from paginate import Page -class _ElasticsearchWrapper: +class _OpenSearchWrapper: max_results = 10000 def __init__(self, query): @@ -56,8 +56,8 @@ def __len__(self): return min(self.results.hits.total["value"], self.max_results) -def ElasticsearchPage(*args, **kwargs): # noqa - kwargs.setdefault("wrapper_class", _ElasticsearchWrapper) +def OpenSearchPage(*args, **kwargs): # noqa + kwargs.setdefault("wrapper_class", _OpenSearchWrapper) return Page(*args, **kwargs) diff --git a/warehouse/views.py b/warehouse/views.py index c13111341ca3..a93a0b6542b6 100644 --- a/warehouse/views.py +++ b/warehouse/views.py @@ -62,7 +62,7 @@ ) from warehouse.search.queries import SEARCH_FILTER_ORDER, get_opensearch_query from warehouse.utils.http import is_safe_url -from warehouse.utils.paginate import ElasticsearchPage, paginate_url_factory +from warehouse.utils.paginate import OpenSearchPage, paginate_url_factory from warehouse.utils.row_counter import RowCount JSON_REGEX = r"^/pypi/([^\/]+)\/?([^\/]+)?/json\/?$" @@ -335,7 +335,7 @@ def search(request): raise HTTPBadRequest("'page' must be an integer.") try: - page = ElasticsearchPage( + page = OpenSearchPage( query, page=page_num, url_maker=paginate_url_factory(request) ) except opensearchpy.TransportError: From b8156da38baa8ff5c65e95ddca125e90268bf3a3 Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 13:07:56 -0400 Subject: [PATCH 5/7] rename env var --- dev/environment | 2 +- warehouse/config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/environment b/dev/environment index cbfd78736d93..787b083958ac 100644 --- a/dev/environment +++ b/dev/environment @@ -11,7 +11,7 @@ BROKER_URL=sqs://localstack:4566/?region=us-east-1&queue_name_prefix=warehouse-d DATABASE_URL=postgresql+psycopg://postgres@db/warehouse -ELASTICSEARCH_URL=http://opensearch:9200/development +OPENSEARCH_URL=http://opensearch:9200/development REDIS_URL=redis://redis:6379/0 diff --git a/warehouse/config.py b/warehouse/config.py index f4d8e270164c..59ca22dc222a 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -270,7 +270,7 @@ def configure(settings=None): maybe_set(settings, "celery.scheduler_url", "REDIS_URL") maybe_set(settings, "oidc.jwk_cache_url", "REDIS_URL") maybe_set(settings, "database.url", "DATABASE_URL") - maybe_set(settings, "opensearch.url", "ELASTICSEARCH_URL") + maybe_set(settings, "opensearch.url", "OPENSEARCH_URL") maybe_set(settings, "sentry.dsn", "SENTRY_DSN") maybe_set(settings, "sentry.transport", "SENTRY_TRANSPORT") maybe_set(settings, "sessions.url", "REDIS_URL") From 39da688288945dc54e72e1633ace23c27785ffef Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 13:08:23 -0400 Subject: [PATCH 6/7] update docs --- docker-compose.override.yaml-sample | 2 +- docs/dev/architecture.md | 8 ++++---- docs/dev/development/getting-started.rst | 20 ++++++++++---------- warehouse/locale/messages.pot | 4 ++-- warehouse/templates/pages/help.html | 2 +- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/docker-compose.override.yaml-sample b/docker-compose.override.yaml-sample index 8977b966911c..40f0bc439ccf 100644 --- a/docker-compose.override.yaml-sample +++ b/docker-compose.override.yaml-sample @@ -34,7 +34,7 @@ services: dev-docs: *disable-service user-docs: *disable-service - elasticsearch: + opensearch: # You can also add selective environment variables environment: logger.level: WARN # default INFO is pretty noisy diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index 693a2e4e8487..6ebaf0dbcf08 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -91,12 +91,12 @@ C4Container Container(camo, "Camo", "image proxy") Container(web_app, "Web", "Python (Pyramid, SQLAlchemy)", "Delivers HTML and API content") SystemQueue(sqs, "AWS SQS", "task broker") - SystemDb(elasticsearch, "Elasticsearch", "Index of projects, packages, metadata") + SystemDb(opensearch, "OpenSearch", "Index of projects, packages, metadata") SystemDb(db, "Postgres Database", "Store project, package metadata, user details") SystemDb(redis, "Redis", "Store short-term cache data") Rel(web_app, sqs, "queue tasks") - Rel(web_app, elasticsearch, "search for projects") + Rel(web_app, opensearch, "search for projects") Rel(web_app, db, "store/retrieve most data") Rel(web_app, redis, "cache data") } @@ -153,7 +153,7 @@ C4Container Container_Boundary(c1, "Supporting Systems") { SystemDb(redis, "Redis", "Store short-term cache data") SystemQueue(sqs, "AWS SQS", "task broker") - SystemDb(elasticsearch, "Elasticsearch", "Index of projects, packages, metadata") + SystemDb(opensearch, "OpenSearch", "Index of projects, packages, metadata") SystemDb(db, "Postgres Database", "Store project, package metadata, user details") System(ses, "AWS SES", "Simple Email Service") } @@ -163,7 +163,7 @@ C4Container BiRel(worker, sqs, "get next task/ack") BiRel(worker, redis, "store task results") BiRel(worker, db, "interact with models") - BiRel(worker, elasticsearch, "update search index") + BiRel(worker, opensearch, "update search index") Rel(worker, fastly, "purge URLs") Rel(worker, ses, "send emails") diff --git a/docs/dev/development/getting-started.rst b/docs/dev/development/getting-started.rst index f68a780873b2..a9367a9f872e 100644 --- a/docs/dev/development/getting-started.rst +++ b/docs/dev/development/getting-started.rst @@ -188,8 +188,8 @@ application. (on Windows by editing the config file found at ``C:\Users\\AppData\Local\Docker\wsl``). If you are using Linux, you may need to configure the maximum map count to get - the `elasticsearch` up and running. According to the - `documentation `_ + the `opensearch` up and running. According to the + `documentation `_ this can be set temporarily: .. code-block:: console @@ -200,9 +200,9 @@ application. :file:`/etc/sysctl.conf`. Also check that you have more than 5% disk space free, otherwise - elasticsearch will become read only. See ``flood_stage`` in the - `elasticsearch disk allocation docs - `_. + opensearch will become read only. See ``flood_stage`` in the + `opensearch disk allocation docs + `_. Once ``make build`` has finished, run the command: @@ -414,10 +414,10 @@ Errors when executing ``make initdb`` * If ``make initdb`` fails with a timeout like:: - urllib3.exceptions.ConnectTimeoutError: (, 'Connection to elasticsearch timed out. (connect timeout=30)') + urllib3.exceptions.ConnectTimeoutError: (, 'Connection to opensearch timed out. (connect timeout=30)') you might need to increase the amount of memory allocated to docker, since - elasticsearch wants a lot of memory (Dustin gives warehouse ~4GB locally). + opensearch wants a lot of memory (Dustin gives warehouse ~4GB locally). Refer to the tip under :ref:`running-warehouse-containers` section for more details. @@ -478,7 +478,7 @@ Docker please raise an issue in Disabling services locally ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Some services, such as Elasticsearch, consume a lot of resources when running +Some services, such as OpenSearch, consume a lot of resources when running locally, but might not always be necessary when doing local development. To disable these locally, you can create a ``docker-compose.override.yaml`` @@ -490,8 +490,8 @@ individually disable services, modify their entrypoint to do something else: version: "3" services: - elasticsearch: - entrypoint: ["echo", "Elasticsearch disabled"] + opensearch: + entrypoint: ["echo", "OpenSearch disabled"] Note that disabling services might cause things to fail in unexpected ways. diff --git a/warehouse/locale/messages.pot b/warehouse/locale/messages.pot index 2afcd3718942..64a745f044c7 100644 --- a/warehouse/locale/messages.pot +++ b/warehouse/locale/messages.pot @@ -8418,8 +8418,8 @@ msgstr "" #: warehouse/templates/pages/help.html:897 #, python-format msgid "" -"If you have skills in Python, ElasticSearch, HTML, SCSS, JavaScript, or " -"SQLAlchemy then skim our \"Getting " "started\" guide, then take a look at the {{ contributing() }}

{% trans %}Development:{% endtrans %} {% trans %}Warehouse is open source, and we would love to see some new faces working on the project. You do not need to be an experienced open-source developer to make a contribution – in fact, we'd love to help you make your first open source pull request!{% endtrans %}

{% trans getting_started_href='https://warehouse.pypa.io/development/getting-started/', issue_tracker_href='https://github.com/pypi/warehouse/issues', good_first_issue_href='https://github.com/pypi/warehouse/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22', title=gettext('External link') %} - If you have skills in Python, ElasticSearch, HTML, SCSS, JavaScript, or SQLAlchemy then skim our + If you have skills in Python, Full-Text Search, HTML, SCSS, JavaScript, or SQLAlchemy then skim our "Getting started" guide, then take a look at the issue tracker. We've created a 'Good first issue' label – we recommend you start here. From 69f9ab1fd75019fb443f88ed3b9aef4a61233e94 Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Thu, 27 Jun 2024 13:08:33 -0400 Subject: [PATCH 7/7] remove refs to dead deps --- .github/dependabot.yml | 2 -- pyproject.toml | 1 - 2 files changed, 3 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 754fcf24075e..c136d19cde95 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,8 +11,6 @@ updates: - dependency-type: indirect rebase-strategy: "disabled" ignore: - # Always ignore elasticsearch, future versions are always incompatible with our provider - - dependency-name: "elasticsearch" # These update basically every day, and 99.9% of the time we don't care - dependency-name: "boto3" - dependency-name: "boto3-stubs" diff --git a/pyproject.toml b/pyproject.toml index c93378a5ab24..b093bd2e55dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,6 @@ module = [ "b2sdk.*", # https://github.com/Backblaze/b2-sdk-python/issues/148 "celery.app.backends.*", "celery.backends.redis.*", - "elasticsearch_dsl.*", # https://github.com/elastic/elasticsearch-dsl-py/issues/1533 "github_reserved_names.*", "google.cloud.*", "forcediphttpsadapter.*",