diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dc9daa7d..b384d056 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,88 +7,20 @@ on: jobs: test: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest strategy: fail-fast: false matrix: - python-version: ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] - django-version: ["1.11", "2.0", "2.2", "2.1", "3.0", "3.1","3.2", "4.0", "4.1",] + python-version: ["3.8", "3.9", "3.10", "3.11"] + django-version: ["3.2", "4.1", "4.2"] es-dsl-version: ["6.4", "7.4"] - es-version: ["7.13.4"] + es-version: ["8.10.2"] exclude: - - python-version: "2.7" - django-version: "2.0" - - python-version: "2.7" - django-version: "2.1" - - python-version: "2.7" - django-version: "2.2" - - python-version: "2.7" - django-version: "3.0" - - python-version: "2.7" - django-version: "3.1" - - python-version: "2.7" - django-version: "3.2" - - python-version: "2.7" - django-version: "4.0" - - python-version: "2.7" - django-version: "4.1" - - - python-version: "3.6" - django-version: "4.0" - - python-version: "3.6" - django-version: "4.1" - - - python-version: "3.7" - django-version: "4.0" - - python-version: "3.7" - django-version: "4.1" - - - python-version: "3.8" - django-version: "1.11" - - python-version: "3.8" - django-version: "2.0" - - python-version: "3.8" - django-version: "2.1" - - - python-version: "3.9" - django-version: "1.11" - - python-version: "3.9" - django-version: "2.0" - - python-version: "3.9" - django-version: "2.1" - - - python-version: "3.10" - django-version: "1.11" - - python-version: "3.10" - django-version: "2.0" - - python-version: "3.10" - django-version: "2.1" - - python-version: "3.10" - django-version: "2.2" - - python-version: "3.10" - django-version: "3.0" - - python-version: "3.10" - django-version: "3.1" - - - python-version: "3.11" - django-version: "1.11" - - python-version: "3.11" - django-version: "2.0" - - python-version: "3.11" - django-version: "2.1" - - python-version: "3.11" - django-version: "2.2" - - python-version: "3.11" - django-version: "3.0" - - python-version: "3.11" - django-version: "3.1" - python-version: "3.11" django-version: "3.2" - - python-version: "3.11" - django-version: "4.0" steps: - name: Install and Run Elasticsearch @@ -96,15 +28,15 @@ jobs: with: stack-version: ${{ matrix.es-version }} - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Install Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache Pip Dependencies - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('requirements_test.txt') }} @@ -122,6 +54,7 @@ jobs: run: | TOX_ENV=$(echo "py${{ matrix.python-version }}-django-${{ matrix.django-version }}-es${{ matrix.es-dsl-version }}" | tr -d .) python -m tox -e $TOX_ENV -- --elasticsearch + python -m tox -e $TOX_ENV -- --elasticsearch --signal-processor celery - name: Publish Coverage Report - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 diff --git a/README.rst b/README.rst index 81c46f90..eb29ad89 100644 --- a/README.rst +++ b/README.rst @@ -30,28 +30,28 @@ Features - Index fast using `parallel` indexing. - Requirements - - Django >= 1.11 - - Python 2.7, 3.5, 3.6, 3.7, 3.8 + - Django >= 3.2 + - Python 3.8, 3.9, 3.10, 3.11 **Elasticsearch Compatibility:** The library is compatible with all Elasticsearch versions since 5.x **but you have to use a matching major version:** +- For Elasticsearch 8.0 and later, use the major version 8 (8.x.y) of the library. + - For Elasticsearch 7.0 and later, use the major version 7 (7.x.y) of the library. - For Elasticsearch 6.0 and later, use the major version 6 (6.x.y) of the library. -- For Elasticsearch 5.0 and later, use the major version 0.5 (0.5.x) of the library. - .. code-block:: python + # Elasticsearch 8.x + elasticsearch-dsl>=8.0.0,<9.0.0 + # Elasticsearch 7.x elasticsearch-dsl>=7.0.0,<8.0.0 # Elasticsearch 6.x elasticsearch-dsl>=6.0.0,<7.0.0 - # Elasticsearch 5.x - elasticsearch-dsl>=0.5.1,<6.0.0 - .. _Search: http://elasticsearch-dsl.readthedocs.io/en/stable/search_dsl.html diff --git a/django_elasticsearch_dsl/__init__.py b/django_elasticsearch_dsl/__init__.py index 109e0a48..04d7ad59 100644 --- a/django_elasticsearch_dsl/__init__.py +++ b/django_elasticsearch_dsl/__init__.py @@ -13,5 +13,3 @@ def autodiscover(): autodiscover_modules('documents') -if django.VERSION < (3, 2): - default_app_config = 'django_elasticsearch_dsl.apps.DEDConfig' diff --git a/django_elasticsearch_dsl/documents.py b/django_elasticsearch_dsl/documents.py index dcbfd784..4671064d 100644 --- a/django_elasticsearch_dsl/documents.py +++ b/django_elasticsearch_dsl/documents.py @@ -219,6 +219,13 @@ def _get_actions(self, object_list, action): for object_instance in object_list: if action == 'delete' or self.should_index_object(object_instance): yield self._prepare_action(object_instance, action) + + def get_actions(self, object_list, action): + """ + Generate the elasticsearch payload. + """ + return self._get_actions(object_list, action) + def _bulk(self, *args, **kwargs): """Helper for switching between normal and parallel bulk operation""" diff --git a/django_elasticsearch_dsl/management/commands/search_index.py b/django_elasticsearch_dsl/management/commands/search_index.py index 4137c370..06bf8519 100644 --- a/django_elasticsearch_dsl/management/commands/search_index.py +++ b/django_elasticsearch_dsl/management/commands/search_index.py @@ -161,7 +161,7 @@ def _delete_alias_indices(self, alias): alias_delete_actions = [ {"remove_index": {"index": index}} for index in alias_indices ] - self.es_conn.indices.update_aliases({"actions": alias_delete_actions}) + self.es_conn.indices.update_aliases(actions=alias_delete_actions) for index in alias_indices: self.stdout.write("Deleted index '{}'".format(index)) @@ -231,7 +231,7 @@ def _update_alias(self, alias, new_index, alias_exists, options): {"remove_index": {"index": index}} for index in old_indices ] - self.es_conn.indices.update_aliases({"actions": alias_actions}) + self.es_conn.indices.update_aliases(actions=alias_actions) if delete_existing_index: self.stdout.write("Deleted index '{}'".format(alias)) @@ -247,7 +247,7 @@ def _update_alias(self, alias, new_index, alias_exists, options): if alias_delete_actions and not options['use_alias_keep_index']: self.es_conn.indices.update_aliases( - {"actions": alias_delete_actions} + actions=alias_delete_actions ) for index in old_indices: self.stdout.write("Deleted index '{}'".format(index)) diff --git a/django_elasticsearch_dsl/registries.py b/django_elasticsearch_dsl/registries.py index 72510610..e2623ddd 100644 --- a/django_elasticsearch_dsl/registries.py +++ b/django_elasticsearch_dsl/registries.py @@ -174,5 +174,11 @@ def get_indices(self, models=None): return set(iterkeys(self._indices)) + def __contains__(self, model): + """ + Checks that model is in registry + """ + return model in self._models or model in self._related_models + registry = DocumentRegistry() diff --git a/django_elasticsearch_dsl/signals.py b/django_elasticsearch_dsl/signals.py index 35a631c4..48f42249 100644 --- a/django_elasticsearch_dsl/signals.py +++ b/django_elasticsearch_dsl/signals.py @@ -7,10 +7,13 @@ from __future__ import absolute_import from django.db import models +from django.apps import apps from django.dispatch import Signal - from .registries import registry - +from django.core.exceptions import ObjectDoesNotExist +from importlib import import_module +# Sent after document indexing is completed +post_index = Signal() class BaseSignalProcessor(object): """Base signal processor. @@ -96,6 +99,124 @@ def teardown(self): models.signals.m2m_changed.disconnect(self.handle_m2m_changed) models.signals.pre_delete.disconnect(self.handle_pre_delete) +try: + from celery import shared_task +except ImportError: + pass +else: + class CelerySignalProcessor(RealTimeSignalProcessor): + """Celery signal processor. + + Allows automatic updates on the index as delayed background tasks using + Celery. + + NB: We cannot process deletes as background tasks. + By the time the Celery worker would pick up the delete job, the + model instance would already deleted. We can get around this by + setting Celery to use `pickle` and sending the object to the worker, + but using `pickle` opens the application up to security concerns. + """ -# Sent after document indexing is completed -post_index = Signal() + def handle_save(self, sender, instance, **kwargs): + """Handle save with a Celery task. + + Given an individual model instance, update the object in the index. + Update the related objects either. + """ + pk = instance.pk + app_label = instance._meta.app_label + model_name = instance.__class__.__name__ + + self.registry_update_task.delay(pk, app_label, model_name) + self.registry_update_related_task.delay(pk, app_label, model_name) + + def handle_pre_delete(self, sender, instance, **kwargs): + """Handle removing of instance object from related models instance. + We need to do this before the real delete otherwise the relation + doesn't exists anymore and we can't get the related models instance. + """ + self.prepare_registry_delete_related_task(instance) + + def handle_delete(self, sender, instance, **kwargs): + """Handle delete. + + Given an individual model instance, delete the object from index. + """ + self.prepare_registry_delete_task(instance) + + def prepare_registry_delete_related_task(self, instance): + """ + Select its related instance before this instance was deleted. + And pass that to celery. + """ + action = 'index' + for doc in registry._get_related_doc(instance): + doc_instance = doc(related_instance_to_ignore=instance) + try: + related = doc_instance.get_instances_from_related(instance) + except ObjectDoesNotExist: + related = None + if related is not None: + doc_instance.update(related) + if isinstance(related, models.Model): + object_list = [related] + else: + object_list = related + bulk_data = list(doc_instance._get_actions(object_list, action)), + self.registry_delete_task.delay(doc_instance.__class__.__name__, bulk_data) + + @shared_task() + def registry_delete_task(doc_label, data): + """ + Handle the bulk delete data on the registry as a Celery task. + The different implementations used are due to the difference between delete and update operations. + The update operation can re-read the updated data from the database to ensure eventual consistency, + but the delete needs to be processed before the database record is deleted to obtain the associated data. + """ + doc_instance = import_module(doc_label) + parallel = True + doc_instance._bulk(bulk_data, parallel=parallel) + + def prepare_registry_delete_task(self, instance): + """ + Get the prepare did before database record deleted. + """ + action = 'delete' + for doc in registry._get_related_doc(instance): + doc_instance = doc(related_instance_to_ignore=instance) + try: + related = doc_instance.get_instances_from_related(instance) + except ObjectDoesNotExist: + related = None + if related is not None: + doc_instance.update(related) + if isinstance(related, models.Model): + object_list = [related] + else: + object_list = related + bulk_data = list(doc_instance.get_actions(object_list, action)), + self.registry_delete_task.delay(doc_instance.__class__.__name__, bulk_data) + + @shared_task() + def registry_update_task(pk, app_label, model_name): + """Handle the update on the registry as a Celery task.""" + try: + model = apps.get_model(app_label, model_name) + except LookupError: + pass + else: + registry.update( + model.objects.get(pk=pk) + ) + + @shared_task() + def registry_update_related_task(pk, app_label, model_name): + """Handle the related update on the registry as a Celery task.""" + try: + model = apps.get_model(app_label, model_name) + except LookupError: + pass + else: + registry.update_related( + model.objects.get(pk=pk) + ) diff --git a/docs/source/conf.py b/docs/source/conf.py index 21e7a789..8cad968b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -299,7 +299,7 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - '': ('https://docs.python.org/', None), + 'python': ('https://docs.python.org/', None), 'es-py': ('https://elasticsearch-py.readthedocs.io/en/master/', None) , 'es-dsl': ('https://elasticsearch-dsl.readthedocs.io/en/latest/', None), } diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index df7db5d5..f3b02b84 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -20,8 +20,9 @@ For example: ELASTICSEARCH_DSL={ 'default': { - 'hosts': 'localhost:9200' - }, + 'hosts': 'localhost:9200', + 'http_auth': ('username', 'password') + } } ``ELASTICSEARCH_DSL`` is then passed to ``elasticsearch-dsl-py.connections.configure`` (see here_). diff --git a/docs/source/settings.rst b/docs/source/settings.rst index 27e8e136..ae8d5eef 100644 --- a/docs/source/settings.rst +++ b/docs/source/settings.rst @@ -37,8 +37,15 @@ An example: Defaults to ``django_elasticsearch_dsl.signals.RealTimeSignalProcessor``. -You could, for instance, make a ``CelerySignalProcessor`` which would add -update jobs to the queue to for delayed processing. +Options: ``django_elasticsearch_dsl.signals.RealTimeSignalProcessor`` \ ``django_elasticsearch_dsl.signals.CelerySignalProcessor`` + +In this ``CelerySignalProcessor`` implementation, +Create and update operations will record the updated data primary key from the database and delay the time to find the association to ensure eventual consistency. +Delete operations are processed to obtain associated data before database records are deleted. +And celery needs to be pre-configured in the django project, for example `Using Celery with Django `. + +You could, for instance, make a ``CustomSignalProcessor`` which would apply +update jobs as your wish. ELASTICSEARCH_DSL_PARALLEL ========================== diff --git a/example/requirements.txt b/example/requirements.txt index 5cb1cf4a..2a45b67f 100644 --- a/example/requirements.txt +++ b/example/requirements.txt @@ -4,5 +4,5 @@ -e ../ django-autofixture==0.12.1 -Pillow==6.2.0 +Pillow==6.2.2 django==4.1.2 diff --git a/requirements.txt b/requirements.txt index cb1678c7..53d79b32 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ -django>=1.9.6 -elasticsearch-dsl>=7.0.0,<8.0.0 - +django>=3.2 +elasticsearch-dsl>=8.0.0,<9.0.0 diff --git a/requirements_dev.txt b/requirements_dev.txt index f60740cb..43b5c3fe 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,6 +1,6 @@ -bumpversion==0.5.3 -wheel==0.32.2 -django>=2.0,<2.2 +bumpversion==0.6.0 +wheel==0.41.2 +django>=3.2 elasticsearch-dsl>=7.0.0,<8.0.0 twine sphinx diff --git a/requirements_test.txt b/requirements_test.txt index 9586a2ad..683b5270 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,8 +1,8 @@ -coverage==4.1 +coverage==7.3.1 mock>=1.0.1 flake8>=2.1.0 tox>=1.7.0 -Pillow==6.2.0 - +Pillow==10.0.0 +celery>=4.1.0 # Additional test requirements go here diff --git a/runtests.py b/runtests.py index 89451b54..0b613389 100644 --- a/runtests.py +++ b/runtests.py @@ -2,11 +2,40 @@ import sys import argparse +from celery import Celery + try: from django.conf import settings from django.test.utils import get_runner - def get_settings(): + def get_settings(signal_processor): + elasticsearch_dsl_default_settings = { + 'hosts': os.environ.get( + 'ELASTICSEARCH_URL', + 'https://127.0.0.1:9200' + ), + 'basic_auth': ( + os.environ.get('ELASTICSEARCH_USERNAME'), + os.environ.get('ELASTICSEARCH_PASSWORD') + ) + } + + elasticsearch_certs_path = os.environ.get( + 'ELASTICSEARCH_CERTS_PATH' + ) + if elasticsearch_certs_path: + elasticsearch_dsl_default_settings['ca_certs'] = ( + elasticsearch_certs_path + ) + else: + elasticsearch_dsl_default_settings['verify_certs'] = False + + PROCESSOR_CLASSES = { + 'realtime': 'django_elasticsearch_dsl.signals.RealTimeSignalProcessor', + 'celery': 'django_elasticsearch_dsl.signals.CelerySignalProcessor', + } + + signal_processor = PROCESSOR_CLASSES[signal_processor] settings.configure( DEBUG=True, USE_TZ=True, @@ -25,12 +54,13 @@ def get_settings(): SITE_ID=1, MIDDLEWARE_CLASSES=(), ELASTICSEARCH_DSL={ - 'default': { - 'hosts': os.environ.get('ELASTICSEARCH_URL', - '127.0.0.1:9200') - }, + 'default': elasticsearch_dsl_default_settings }, DEFAULT_AUTO_FIELD="django.db.models.BigAutoField", + CELERY_BROKER_URL='memory://localhost/', + CELERY_TASK_ALWAYS_EAGER=True, + CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, + ELASTICSEARCH_DSL_SIGNAL_PROCESSOR=signal_processor ) try: @@ -41,6 +71,9 @@ def get_settings(): else: setup() + app = Celery() + app.config_from_object('django.conf:settings', namespace='CELERY') + app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) return settings except ImportError: @@ -59,18 +92,56 @@ def make_parser(): const='localhost:9200', help="To run integration test against an Elasticsearch server", ) + parser.add_argument( + '--signal-processor', + nargs='?', + default='realtime', + choices=('realtime', 'celery'), + help='Defines which signal backend to choose' + ) + parser.add_argument( + '--elasticsearch-username', + nargs='?', + help="Username for Elasticsearch user" + ) + parser.add_argument( + '--elasticsearch-password', + nargs='?', + help="Password for Elasticsearch user" + ) + parser.add_argument( + '--elasticsearch-certs-path', + nargs='?', + help="Path to CA certificates for Elasticsearch" + ) return parser def run_tests(*test_args): args, test_args = make_parser().parse_known_args(test_args) if args.elasticsearch: - os.environ.setdefault('ELASTICSEARCH_URL', args.elasticsearch) + os.environ.setdefault('ELASTICSEARCH_URL', "https://127.0.0.1:9200") + + username = args.elasticsearch_username or "elastic" + password = args.elasticsearch_password or "changeme" + os.environ.setdefault( + 'ELASTICSEARCH_USERNAME', username + ) + os.environ.setdefault( + 'ELASTICSEARCH_PASSWORD', password + ) + + if args.elasticsearch_certs_path: + os.environ.setdefault( + 'ELASTICSEARCH_CERTS_PATH', args.elasticsearch_certs_path + ) if not test_args: test_args = ['tests'] - settings = get_settings() + signal_processor = args.signal_processor + + settings = get_settings(signal_processor) TestRunner = get_runner(settings) test_runner = TestRunner() diff --git a/setup.py b/setup.py index 35b815d8..a3c60e1b 100755 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ except ImportError: from distutils.core import setup -version = '7.3' +version = '8.0' if sys.argv[-1] == 'publish': try: @@ -33,6 +33,7 @@ setup( name='django-elasticsearch-dsl', version=version, + python_requires=">=3.8", description="""Wrapper around elasticsearch-dsl-py for django models""", long_description=readme + '\n\n' + history, author='Sabricot', @@ -42,7 +43,7 @@ ], include_package_data=True, install_requires=[ - 'elasticsearch-dsl>=7.2.0,<8.0.0', + 'elasticsearch-dsl>=8.9.0,<9.0.0', 'six', ], license="Apache Software License 2.0", @@ -51,25 +52,19 @@ classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', - 'Framework :: Django :: 1.11', - 'Framework :: Django :: 2.0', - 'Framework :: Django :: 2.1', - 'Framework :: Django :: 2.2', - 'Framework :: Django :: 3.0', - 'Framework :: Django :: 3.1', 'Framework :: Django :: 3.2', - 'Framework :: Django :: 4.0', + 'Framework :: Django :: 4.1', + 'Framework :: Django :: 4.2', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', ], + extras_require={ + 'celery': ["celery>=4.1.0"], + } ) diff --git a/tests/test_documents.py b/tests/test_documents.py index 78e5dc43..d7dd2ac7 100644 --- a/tests/test_documents.py +++ b/tests/test_documents.py @@ -1,5 +1,7 @@ import json from unittest import TestCase +from unittest import SkipTest + import django from django.db import models @@ -64,7 +66,19 @@ class Index: doc_type = 'car_document' -class DocTypeTestCase(TestCase): +class BaseDocTypeTestCase(object): + TARGET_PROCESSOR = None + + @classmethod + def setUpClass(cls): + from django.conf import settings + if cls.TARGET_PROCESSOR != settings.ELASTICSEARCH_DSL_SIGNAL_PROCESSOR: + raise SkipTest( + "Skipped because {} is required, not {}".format( + cls.TARGET_PROCESSOR, settings.ELASTICSEARCH_DSL_SIGNAL_PROCESSOR + ) + ) + super(BaseDocTypeTestCase,cls).setUpClass() def test_model_class_added(self): self.assertEqual(CarDocument.django.model, Car) @@ -494,8 +508,8 @@ def generate_id(cls, article): # Get the data from the elasticsearch low level API because # The generator get executed there. - data = json.loads(mock_bulk.call_args[1]['body'].split("\n")[0]) - assert data["index"]["_id"] == article.slug + data = json.loads(mock_bulk.call_args[1]['operations'][1]) + assert data['slug'] == article.slug @patch('elasticsearch_dsl.connections.Elasticsearch.bulk') def test_should_index_object_is_called(self, mock_bulk): @@ -535,6 +549,17 @@ def should_index_object(self, obj): d = ArticleDocument() d.update([article1, article2]) - data_body = mock_bulk.call_args[1]['body'] - self.assertTrue(article1.slug in data_body) - self.assertTrue(article2.slug not in data_body) + operations = mock_bulk.call_args[1]['operations'] + slugs = [ + json.loads(operation)['slug'] for operation in operations + if 'slug' in json.loads(operation) + ] + self.assertTrue(article1.slug in slugs) + self.assertTrue(article2.slug not in slugs) + +class RealTimeDocTypeTestCase(BaseDocTypeTestCase, TestCase): + TARGET_PROCESSOR = 'django_elasticsearch_dsl.signals.RealTimeSignalProcessor' + + +class CeleryDocTypeTestCase(BaseDocTypeTestCase, TestCase): + TARGET_PROCESSOR = 'django_elasticsearch_dsl.signals.CelerySignalProcessor' diff --git a/tests/test_integration.py b/tests/test_integration.py index fad7b753..f01d5781 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -3,7 +3,7 @@ import django from django.core.management import call_command -from django.test import TestCase +from django.test import TestCase, TransactionTestCase if django.VERSION < (4, 0): from django.utils.translation import ugettext_lazy as _ else: @@ -29,7 +29,7 @@ @unittest.skipUnless(is_es_online(), 'Elasticsearch is offline') -class IntegrationTestCase(ESTestCase, TestCase): +class IntegrationTestCase(ESTestCase, TransactionTestCase): def setUp(self): super(IntegrationTestCase, self).setUp() self.manufacturer = Manufacturer( diff --git a/tox.ini b/tox.ini index 2c62d204..54fd05f8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,8 @@ [tox] envlist = - py27-django-111-es74 - {py36,py37,py38,py39,py310,py311}-django-{111,20,21,22,30,31,32,40,41}-{es64,es74} + py{38,39,310}-django-{32,41,42}-{es64,es74} + py{311}-django-{41,42}-{es64,es74} + [testenv] setenv = @@ -9,23 +10,14 @@ setenv = commands = coverage run --source django_elasticsearch_dsl runtests.py {posargs} deps = - django-111: Django>=1.11,<2.0 - django-20: Django>=2.0,<2.1 - django-21: Django>=2.1,<2.2 - django-22: Django>=2.2,<2.3 - django-30: Django>=3.0,<3.1 - django-31: Django>=3.1,<3.2 django-32: Django>=3.2,<3.3 - django-40: Django>=4.0,<4.1 django-41: Django>=4.1,<4.2 + django-42: Django>=4.2,<4.3 es64: elasticsearch-dsl>=6.4.0,<7.0.0 es74: elasticsearch-dsl>=7.4.0,<8 -r{toxinidir}/requirements_test.txt basepython = - py27: python2.7 - py36: python3.6 - py37: python3.7 py38: python3.8 py39: python3.9 py310: python3.10