Browse Source

first commit

wxl 1 year ago
commit
bc16459628
100 changed files with 7352 additions and 0 deletions
  1. 20 0
      .gitignore
  2. 202 0
      LICENSE
  3. 18 0
      MANIFEST.in
  4. 249 0
      README.md
  5. 7 0
      dj_cqrs/__init__.py
  6. 211 0
      dj_cqrs/_validation.py
  7. 57 0
      dj_cqrs/admin.py
  8. 13 0
      dj_cqrs/apps.py
  9. 38 0
      dj_cqrs/constants.py
  10. 1 0
      dj_cqrs/controller/__init__.py
  11. 106 0
      dj_cqrs/controller/consumer.py
  12. 11 0
      dj_cqrs/controller/producer.py
  13. 18 0
      dj_cqrs/correlation.py
  14. 160 0
      dj_cqrs/dataclasses.py
  15. 80 0
      dj_cqrs/delay.py
  16. 60 0
      dj_cqrs/logger.py
  17. 1 0
      dj_cqrs/management/__init__.py
  18. 1 0
      dj_cqrs/management/commands/__init__.py
  19. 144 0
      dj_cqrs/management/commands/cqrs_bulk_dump.py
  20. 121 0
      dj_cqrs/management/commands/cqrs_bulk_load.py
  21. 209 0
      dj_cqrs/management/commands/cqrs_consume.py
  22. 131 0
      dj_cqrs/management/commands/cqrs_dead_letters.py
  23. 52 0
      dj_cqrs/management/commands/cqrs_deleted_diff_master.py
  24. 82 0
      dj_cqrs/management/commands/cqrs_deleted_diff_replica.py
  25. 40 0
      dj_cqrs/management/commands/cqrs_deleted_sync_replica.py
  26. 87 0
      dj_cqrs/management/commands/cqrs_diff_master.py
  27. 58 0
      dj_cqrs/management/commands/cqrs_diff_replica.py
  28. 77 0
      dj_cqrs/management/commands/cqrs_diff_sync.py
  29. 155 0
      dj_cqrs/management/commands/cqrs_sync.py
  30. 15 0
      dj_cqrs/management/utils.py
  31. 390 0
      dj_cqrs/managers.py
  32. 144 0
      dj_cqrs/metas.py
  33. 522 0
      dj_cqrs/mixins.py
  34. 51 0
      dj_cqrs/registries.py
  35. 169 0
      dj_cqrs/signals.py
  36. 53 0
      dj_cqrs/tracker.py
  37. 17 0
      dj_cqrs/transport/__init__.py
  38. 38 0
      dj_cqrs/transport/base.py
  39. 225 0
      dj_cqrs/transport/kombu.py
  40. 97 0
      dj_cqrs/transport/mixins.py
  41. 13 0
      dj_cqrs/transport/mock.py
  42. 439 0
      dj_cqrs/transport/rabbit_mq.py
  43. 82 0
      dj_cqrs/utils.py
  44. 24 0
      docs/admin.md
  45. 20 0
      docs/css/custom.css
  46. 75 0
      docs/custom_serialization.md
  47. 221 0
      docs/getting_started.md
  48. BIN
      docs/images/favicon.ico
  49. BIN
      docs/images/lifecycle.png
  50. BIN
      docs/images/logo_full.png
  51. 26 0
      docs/index.md
  52. 152 0
      docs/lifecycle.md
  53. 16 0
      docs/macros.py
  54. 117 0
      docs/reference.md
  55. 51 0
      docs/track_fields_changes.md
  56. 52 0
      docs/transports.md
  57. 36 0
      docs/utilities.md
  58. 47 0
      examples/demo_project/README.md
  59. 95 0
      examples/demo_project/docker-compose.yml
  60. 16 0
      examples/demo_project/master_service/Dockerfile
  61. 1 0
      examples/demo_project/master_service/__init__.py
  62. 0 0
      examples/demo_project/master_service/app/__init__.py
  63. 19 0
      examples/demo_project/master_service/app/asgi.py
  64. 230 0
      examples/demo_project/master_service/app/migrations/0001_initial.py
  65. 39 0
      examples/demo_project/master_service/app/migrations/0002_fixtures.py
  66. 0 0
      examples/demo_project/master_service/app/migrations/__init__.py
  67. 39 0
      examples/demo_project/master_service/app/models.py
  68. 36 0
      examples/demo_project/master_service/app/serializers.py
  69. 132 0
      examples/demo_project/master_service/app/settings.py
  70. 25 0
      examples/demo_project/master_service/app/urls.py
  71. 95 0
      examples/demo_project/master_service/app/views.py
  72. 18 0
      examples/demo_project/master_service/app/wsgi.py
  73. 22 0
      examples/demo_project/master_service/manage.py
  74. 4 0
      examples/demo_project/master_service/requirements.txt
  75. 193 0
      examples/demo_project/master_service/templates/main.html
  76. 16 0
      examples/demo_project/replica_service/Dockerfile
  77. 0 0
      examples/demo_project/replica_service/__init__.py
  78. 0 0
      examples/demo_project/replica_service/app/__init__.py
  79. 17 0
      examples/demo_project/replica_service/app/asgi.py
  80. 158 0
      examples/demo_project/replica_service/app/migrations/0001_initial.py
  81. 0 0
      examples/demo_project/replica_service/app/migrations/__init__.py
  82. 81 0
      examples/demo_project/replica_service/app/models.py
  83. 143 0
      examples/demo_project/replica_service/app/settings.py
  84. 9 0
      examples/demo_project/replica_service/app/urls.py
  85. 17 0
      examples/demo_project/replica_service/app/views.py
  86. 17 0
      examples/demo_project/replica_service/app/wsgi.py
  87. 22 0
      examples/demo_project/replica_service/manage.py
  88. 5 0
      examples/demo_project/replica_service/requirements.txt
  89. 116 0
      examples/demo_project/replica_service/templates/main.html
  90. 2 0
      integration_tests/.dockerignore
  91. 13 0
      integration_tests/Dockerfile
  92. 26 0
      integration_tests/Dockerfile.Master
  93. 26 0
      integration_tests/Dockerfile.MasterV1
  94. 24 0
      integration_tests/Dockerfile.Replica
  95. 23 0
      integration_tests/Dockerfile.ReplicaV1
  96. 42 0
      integration_tests/Makefile
  97. 1 0
      integration_tests/__init__.py
  98. 61 0
      integration_tests/docker-compose.yml
  99. 68 0
      integration_tests/kombu.yml
  100. 0 0
      integration_tests/manage.py

+ 20 - 0
.gitignore

@@ -0,0 +1,20 @@
+.idea/
+venv/
+
+__pycache__/
+*.py[cod]
+*$py.class
+build/
+dist/
+*.egg-info
+.eggs
+
+tests/reports/
+.coverage
+
+*.dump
+
+htmlcov
+.devcontainer
+
+docs/_build

+ 202 - 0
LICENSE

@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 Ingram Micro Inc. All rights reserved.
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

+ 18 - 0
MANIFEST.in

@@ -0,0 +1,18 @@
+include LICENSE
+include VERSION
+include README.md
+include requirements/dev.txt
+include requirements/test.txt
+
+exclude requirements/docs.txt
+exclude sonar-project.properties
+exclude travis_compat_tests.sh
+exclude travis_integration_tests.sh
+exclude tox.ini
+prune docs
+prune tests
+prune integration_tests
+prune examples
+
+exclude .gitignore
+prune .github

File diff suppressed because it is too large
+ 249 - 0
README.md


+ 7 - 0
dj_cqrs/__init__.py

@@ -0,0 +1,7 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import django  # pragma: no cover
+
+
+if django.VERSION < (3, 2):  # pragma: no cover
+    default_app_config = 'dj_cqrs.apps.CQRSConfig'

+ 211 - 0
dj_cqrs/_validation.py

@@ -0,0 +1,211 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+from inspect import getfullargspec, isfunction
+
+from django.utils.module_loading import import_string
+
+from dj_cqrs.constants import (
+    DEFAULT_MASTER_AUTO_UPDATE_FIELDS,
+    DEFAULT_MASTER_MESSAGE_TTL,
+    DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE,
+    DEFAULT_REPLICA_MAX_RETRIES,
+    DEFAULT_REPLICA_RETRY_DELAY,
+)
+from dj_cqrs.registries import MasterRegistry, ReplicaRegistry
+from dj_cqrs.transport import BaseTransport
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+def validate_settings(settings):
+    is_master = bool(MasterRegistry.models)
+    is_replica = bool(ReplicaRegistry.models)
+    if (not is_master) and (not is_replica):  # pragma: no cover
+        return
+
+    assert hasattr(settings, 'CQRS'), 'CQRS configuration must be set in Django project settings.'
+
+    cqrs_settings = settings.CQRS
+    assert isinstance(cqrs_settings, dict), 'CQRS configuration must be dict.'
+
+    _validate_transport(cqrs_settings)
+
+    if is_master or ('master' in cqrs_settings):
+        _validate_master(cqrs_settings)
+
+    if is_replica or ('replica' in cqrs_settings):
+        _validate_replica(cqrs_settings)
+
+
+def _validate_transport(cqrs_settings):
+    transport_cls_location = cqrs_settings.get('transport')
+    if not transport_cls_location:
+        raise AssertionError('CQRS transport is not set.')
+
+    transport = import_string(transport_cls_location)
+    if not issubclass(transport, BaseTransport):
+        raise AssertionError(
+            'CQRS transport must be inherited from `dj_cqrs.transport.BaseTransport`.',
+        )
+
+
+def _validate_master(cqrs_settings):
+    default_master_settings = {
+        'master': {
+            'CQRS_AUTO_UPDATE_FIELDS': DEFAULT_MASTER_AUTO_UPDATE_FIELDS,
+            'CQRS_MESSAGE_TTL': DEFAULT_MASTER_MESSAGE_TTL,
+            'correlation_function': None,
+            'meta_function': None,
+        },
+    }
+
+    if 'master' not in cqrs_settings:
+        cqrs_settings.update(default_master_settings)
+        return
+
+    master_settings = cqrs_settings['master']
+    assert isinstance(master_settings, dict), 'CQRS master configuration must be dict.'
+
+    _validate_master_auto_update_fields(master_settings)
+    _validate_master_message_ttl(master_settings)
+    _validate_master_correlation_func(master_settings)
+    _validate_master_meta_func(master_settings)
+
+
+def _validate_master_auto_update_fields(master_settings):
+    if 'CQRS_AUTO_UPDATE_FIELDS' in master_settings:
+        assert isinstance(
+            master_settings['CQRS_AUTO_UPDATE_FIELDS'],
+            bool,
+        ), 'CQRS master CQRS_AUTO_UPDATE_FIELDS must be bool.'
+    else:
+        master_settings['CQRS_AUTO_UPDATE_FIELDS'] = DEFAULT_MASTER_AUTO_UPDATE_FIELDS
+
+
+def _validate_master_message_ttl(master_settings):
+    if 'CQRS_MESSAGE_TTL' in master_settings:
+        min_message_ttl = 1
+        message_ttl = master_settings['CQRS_MESSAGE_TTL']
+        if (message_ttl is not None) and (
+            not isinstance(message_ttl, int) or message_ttl < min_message_ttl
+        ):
+            # No error is raised for backward compatibility
+            # TODO: raise error in 2.0.0
+            logger.warning(
+                'Settings CQRS_MESSAGE_TTL=%s is invalid, using default %s.',
+                message_ttl,
+                DEFAULT_MASTER_MESSAGE_TTL,
+            )
+            master_settings['CQRS_MESSAGE_TTL'] = DEFAULT_MASTER_MESSAGE_TTL
+    else:
+        master_settings['CQRS_MESSAGE_TTL'] = DEFAULT_MASTER_MESSAGE_TTL
+
+
+def _validate_master_correlation_func(master_settings):
+    correlation_func = master_settings.get('correlation_function')
+    if not correlation_func:
+        master_settings['correlation_function'] = None
+    elif not callable(correlation_func):
+        raise AssertionError('CQRS master correlation_function must be callable.')
+
+
+def _validate_master_meta_func(master_settings):
+    meta_func = master_settings.get('meta_function')
+    if not meta_func:
+        master_settings['meta_function'] = None
+        return
+
+    if isinstance(meta_func, str):
+        try:
+            meta_func = import_string(meta_func)
+        except ImportError:
+            raise AssertionError('CQRS master meta_function import error.')
+
+    if not isfunction(meta_func):
+        raise AssertionError('CQRS master meta_function must be function.')
+
+    r = getfullargspec(meta_func)
+    if not r.varkw:
+        raise AssertionError('CQRS master meta_function must support **kwargs.')
+
+    master_settings['meta_function'] = meta_func
+
+
+def _validate_replica(cqrs_settings):
+    queue = cqrs_settings.get('queue')
+    assert queue, 'CQRS queue is not set.'
+    assert isinstance(queue, str), 'CQRS queue must be string.'
+
+    default_replica_settings = {
+        'replica': {
+            'CQRS_MAX_RETRIES': DEFAULT_REPLICA_MAX_RETRIES,
+            'CQRS_RETRY_DELAY': DEFAULT_REPLICA_RETRY_DELAY,
+            'delay_queue_max_size': DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE,
+        },
+    }
+
+    if 'replica' not in cqrs_settings:
+        cqrs_settings.update(default_replica_settings)
+        return
+
+    replica_settings = cqrs_settings['replica']
+    assert isinstance(replica_settings, dict), 'CQRS replica configuration must be dict.'
+
+    _validate_replica_max_retries(replica_settings)
+    _validate_replica_retry_delay(replica_settings)
+    _validate_replica_delay_queue_max_size(replica_settings)
+
+
+def _validate_replica_max_retries(replica_settings):
+    if 'CQRS_MAX_RETRIES' in replica_settings:
+        min_retries = 0
+        max_retries = replica_settings['CQRS_MAX_RETRIES']
+        if (max_retries is not None) and (
+            not isinstance(max_retries, int) or max_retries < min_retries
+        ):
+            # No error is raised for backward compatibility
+            # TODO: raise error in 2.0.0
+            logger.warning(
+                'Replica setting CQRS_MAX_RETRIES=%s is invalid, using default %s.',
+                max_retries,
+                DEFAULT_REPLICA_MAX_RETRIES,
+            )
+            replica_settings['CQRS_MAX_RETRIES'] = DEFAULT_REPLICA_MAX_RETRIES
+    else:
+        replica_settings['CQRS_MAX_RETRIES'] = DEFAULT_REPLICA_MAX_RETRIES
+
+
+def _validate_replica_retry_delay(replica_settings):
+    min_retry_delay = 0
+    retry_delay = replica_settings.get('CQRS_RETRY_DELAY')
+    if 'CQRS_RETRY_DELAY' not in replica_settings:
+        replica_settings['CQRS_RETRY_DELAY'] = DEFAULT_REPLICA_RETRY_DELAY
+    elif not isinstance(retry_delay, int) or retry_delay < min_retry_delay:
+        # No error is raised for backward compatibility
+        # TODO: raise error in 2.0.0
+        logger.warning(
+            'Replica setting CQRS_RETRY_DELAY=%s is invalid, using default %s.',
+            retry_delay,
+            DEFAULT_REPLICA_RETRY_DELAY,
+        )
+        replica_settings['CQRS_RETRY_DELAY'] = DEFAULT_REPLICA_RETRY_DELAY
+
+
+def _validate_replica_delay_queue_max_size(replica_settings):
+    min_qsize = 0
+    max_qsize = replica_settings.get('delay_queue_max_size')
+    if 'delay_queue_max_size' not in replica_settings:
+        max_qsize = DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE
+    elif (max_qsize is not None) and (not isinstance(max_qsize, int) or max_qsize <= min_qsize):
+        # No error is raised for backward compatibility
+        # TODO: raise error in 2.0.0
+        logger.warning(
+            'Settings delay_queue_max_size=%s is invalid, using default %s.',
+            max_qsize,
+            DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE,
+        )
+        max_qsize = DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE
+
+    replica_settings['delay_queue_max_size'] = max_qsize

+ 57 - 0
dj_cqrs/admin.py

@@ -0,0 +1,57 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from django.utils.translation import gettext_lazy
+
+
+class CQRSAdminMasterSyncMixin:
+    """
+    Mixin that includes a custom action in AdminModel. This action allows synchronizing
+    master's model items from Django Admin page,
+    """
+
+    def get_actions(self, request):
+        """
+        Overriding method from AdminModel class; it is used to include the sync method in
+        the actions list.
+        """
+        if self.actions is not None and 'sync_items' not in self.actions:
+            self.actions = list(self.actions) + ['sync_items']
+        return super().get_actions(request)
+
+    def _cqrs_sync_queryset(self, queryset):
+        """
+        This function is used to adjust the QuerySet before sending the sync signal.
+
+        Args:
+            queryset (Queryset): Original queryset.
+
+        Returns:
+            (Queryset): Updated queryset.
+        """
+        return queryset
+
+    def sync_items(self, request, queryset):
+        """
+        This method synchronizes selected items from the Admin Page.
+        It is registered as a custom action in Django Admin
+
+        Args:
+            request (Request): Original request.
+            queryset (Queryset): Original queryset.
+        """
+        items_not_synced = []
+        for item in self._cqrs_sync_queryset(queryset):
+            if not item.cqrs_sync():
+                items_not_synced.append(item)
+
+        total = len(queryset)
+        total_w_erros = len(items_not_synced)
+        total_sucess = total - total_w_erros
+        self.message_user(
+            request,
+            f'{total_sucess} successfully synced. {total_w_erros} failed: {items_not_synced}',
+        )
+
+    sync_items.short_description = gettext_lazy(
+        'Synchronize selected %(verbose_name_plural)s via CQRS',
+    )

+ 13 - 0
dj_cqrs/apps.py

@@ -0,0 +1,13 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from django.apps import AppConfig
+from django.conf import settings
+
+from dj_cqrs._validation import validate_settings
+
+
+class CQRSConfig(AppConfig):
+    name = 'dj_cqrs'
+
+    def ready(self):
+        validate_settings(settings)

+ 38 - 0
dj_cqrs/constants.py

@@ -0,0 +1,38 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+ALL_BASIC_FIELDS = '__all__'
+
+FIELDS_TRACKER_FIELD_NAME = '__fields_tracker'
+TRACKED_FIELDS_ATTR_NAME = '__tracked_fields'
+
+
+class SignalType:
+    """Type of signal that generates this event."""
+
+    SAVE = 'SAVE'
+    """The master model has been saved."""
+
+    DELETE = 'DELETE'
+    """The master model has been deleted."""
+
+    SYNC = 'SYNC'
+    """The master model needs syncronization."""
+
+
+NO_QUEUE = 'None'
+
+DEFAULT_DEAD_MESSAGE_TTL = 864000  # 10 days
+
+DEFAULT_MASTER_AUTO_UPDATE_FIELDS = False
+DEFAULT_MASTER_MESSAGE_TTL = 86400  # 1 day
+
+DEFAULT_REPLICA_MAX_RETRIES = 30
+DEFAULT_REPLICA_RETRY_DELAY = 2  # seconds
+DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE = 1000
+
+DB_VENDOR_PG = 'postgresql'
+DB_VENDOR_MYSQL = 'mysql'
+SUPPORTED_TIMEOUT_DB_VENDORS = {DB_VENDOR_MYSQL, DB_VENDOR_PG}
+
+PG_TIMEOUT_FLAG = 'statement timeout'
+MYSQL_TIMEOUT_ERROR_CODE = 3024

+ 1 - 0
dj_cqrs/controller/__init__.py

@@ -0,0 +1 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.

+ 106 - 0
dj_cqrs/controller/consumer.py

@@ -0,0 +1,106 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import copy
+import logging
+from contextlib import ExitStack
+
+from django.conf import settings
+from django.db import Error, close_old_connections, transaction
+
+from dj_cqrs.constants import SignalType
+from dj_cqrs.logger import log_timed_out_queries
+from dj_cqrs.registries import ReplicaRegistry
+from dj_cqrs.utils import apply_query_timeouts
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+def consume(payload):
+    """Consumer controller.
+
+    :param dj_cqrs.dataclasses.TransportPayload payload: Consumed payload from master service.
+    """
+    payload = copy.deepcopy(payload)
+    return route_signal_to_replica_model(
+        payload.signal_type,
+        payload.cqrs_id,
+        payload.instance_data,
+        previous_data=payload.previous_data,
+        meta=payload.meta,
+        queue=payload.queue,
+    )
+
+
+def route_signal_to_replica_model(
+    signal_type,
+    cqrs_id,
+    instance_data,
+    previous_data=None,
+    meta=None,
+    queue=None,
+):
+    """Routes signal to model method to create/update/delete replica instance.
+
+    :param dj_cqrs.constants.SignalType signal_type: Consumed signal type.
+    :param str cqrs_id: Replica model CQRS unique identifier.
+    :param dict instance_data: Master model data.
+    :param dict or None previous_data: Previous model data for changed tracked fields, if exists.
+    :param dict or None meta: Payload metadata, if exists.
+    :param str or None queue: Synced queue.
+    """
+    if signal_type not in (SignalType.DELETE, SignalType.SAVE, SignalType.SYNC):
+        logger.error('Bad signal type "{0}" for CQRS_ID "{1}".'.format(signal_type, cqrs_id))
+        return
+
+    model_cls = ReplicaRegistry.get_model_by_cqrs_id(cqrs_id)
+    if not model_cls:
+        return
+
+    this_queue = settings.CQRS['queue']
+    if signal_type == SignalType.SYNC and model_cls.CQRS_ONLY_DIRECT_SYNCS and queue != this_queue:
+        return True
+
+    db_is_needed = not model_cls.CQRS_NO_DB_OPERATIONS
+    if db_is_needed:
+        close_old_connections()
+
+    is_meta_supported = model_cls.CQRS_META
+    try:
+        if db_is_needed:
+            apply_query_timeouts(model_cls)
+
+        with transaction.atomic(savepoint=False) if db_is_needed else ExitStack():
+            if signal_type == SignalType.DELETE:
+                if is_meta_supported:
+                    return model_cls.cqrs_delete(instance_data, meta=meta)
+
+                return model_cls.cqrs_delete(instance_data)
+
+            f_kw = {'previous_data': previous_data}
+            if is_meta_supported:
+                f_kw['meta'] = meta
+
+            if signal_type == SignalType.SAVE:
+                return model_cls.cqrs_save(instance_data, **f_kw)
+
+            if signal_type == SignalType.SYNC:
+                f_kw['sync'] = True
+                return model_cls.cqrs_save(instance_data, **f_kw)
+
+    except Error as e:
+        pk_name = getattr(model_cls._meta.pk, 'name', 'id')
+        pk_value = instance_data.get(pk_name)
+        cqrs_revision = instance_data.get('cqrs_revision')
+
+        logger.error(
+            '{0}\nCQRS {1} error: pk = {2}, cqrs_revision = {3} ({4}).'.format(
+                str(e),
+                signal_type,
+                pk_value,
+                cqrs_revision,
+                model_cls.CQRS_ID,
+            ),
+        )
+
+        log_timed_out_queries(e, model_cls)

+ 11 - 0
dj_cqrs/controller/producer.py

@@ -0,0 +1,11 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from dj_cqrs.transport import current_transport
+
+
+def produce(payload):
+    """Producer controller.
+
+    :param dj_cqrs.dataclasses.TransportPayload payload: TransportPayload.
+    """
+    current_transport.produce(payload)

+ 18 - 0
dj_cqrs/correlation.py

@@ -0,0 +1,18 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from django.conf import settings
+
+
+def get_correlation_id(signal_type, cqrs_id, instance_pk, queue):
+    """
+    :param signal_type: Type of the signal for this message.
+    :type signal_type: dj_cqrs.constants.SignalType
+    :param cqrs_id: The unique CQRS identifier of the model.
+    :type cqrs_id: str
+    :param instance_pk: Primary key of the instance.
+    :param queue: Queue to synchronize, defaults to None
+    :type queue: str, optional
+    """
+    correlation_func = settings.CQRS.get('master', {}).get('correlation_function')
+    if correlation_func:
+        return correlation_func(signal_type, cqrs_id, instance_pk, queue)

+ 160 - 0
dj_cqrs/dataclasses.py

@@ -0,0 +1,160 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from dateutil.parser import parse as dateutil_parse
+from django.utils import timezone
+
+from dj_cqrs.correlation import get_correlation_id
+from dj_cqrs.utils import get_json_valid_value, get_message_expiration_dt
+
+
+class TransportPayload:
+    """Transport message payload.
+
+    Args:
+        signal_type (dj_cqrs.constants.SignalType): Type of the signal for this message.
+        cqrs_id (str): The unique CQRS identifier of the model.
+        instance_data (dict): Serialized data of the instance that
+            generates the event.
+        instance_pk (str): Primary key of the instance.
+        queue (str): Queue to synchronize, defaults to None.
+        previous_data (dict): Previous values for fields tracked for changes,
+            defaults to None.
+        correlation_id (str): Correlation ID of process, where this payload is used.
+        retries (int): Current number of message retries.
+        expires (datetime): Message expiration datetime, infinite if None
+        meta (dict): Payload metadata
+    """
+
+    def __init__(
+        self,
+        signal_type,
+        cqrs_id: str,
+        instance_data: dict,
+        instance_pk: str,
+        queue: str = None,
+        previous_data: dict = None,
+        correlation_id: str = None,
+        expires=None,
+        retries: int = 0,
+        meta: dict = None,
+    ):
+        self.__signal_type = signal_type
+        self.__cqrs_id = cqrs_id
+        self.__instance_data = instance_data
+        self.__instance_pk = instance_pk
+        self.__queue = queue
+        self.__previous_data = previous_data
+        self.__meta = meta
+
+        if correlation_id:
+            self.__correlation_id = correlation_id
+        else:
+            self.__correlation_id = get_correlation_id(signal_type, cqrs_id, instance_pk, queue)
+
+        self.__expires = expires
+        self.__retries = retries
+
+    @classmethod
+    def from_message(cls, dct):
+        """Builds payload from message data.
+
+        Args:
+            dct (dict): Deserialized message body data.
+
+        Returns:
+            (TransportPayload): TransportPayload instance.
+        """
+        if 'expires' in dct:
+            expires = dct['expires']
+            if dct['expires'] is not None:
+                expires = dateutil_parse(dct['expires'])
+        else:
+            # Backward compatibility for old messages otherwise they are infinite by default.
+            expires = get_message_expiration_dt()
+
+        return cls(
+            dct['signal_type'],
+            dct['cqrs_id'],
+            dct['instance_data'],
+            dct.get('instance_pk'),
+            previous_data=dct.get('previous_data'),
+            correlation_id=dct.get('correlation_id'),
+            expires=expires,
+            retries=dct.get('retries') or 0,
+            meta=dct.get('meta'),
+        )
+
+    @property
+    def signal_type(self):
+        return self.__signal_type
+
+    @property
+    def cqrs_id(self):
+        return self.__cqrs_id
+
+    @property
+    def instance_data(self):
+        return self.__instance_data
+
+    @property
+    def pk(self):
+        return self.__instance_pk
+
+    @property
+    def queue(self):
+        return self.__queue
+
+    @property
+    def previous_data(self):
+        return self.__previous_data
+
+    @property
+    def correlation_id(self):
+        return self.__correlation_id
+
+    @property
+    def meta(self):
+        return self.__meta
+
+    @property
+    def expires(self):
+        return self.__expires
+
+    @property
+    def retries(self):
+        return self.__retries
+
+    @retries.setter
+    def retries(self, value):
+        assert value >= 0, 'Payload retries field should be 0 or positive integer.'
+        self.__retries = value
+
+    def to_dict(self) -> dict:
+        """Return the payload as a dictionary.
+
+        Returns:
+            (dict): This payload.
+        """
+        expires = self.__expires
+        if expires:
+            expires = expires.replace(microsecond=0).isoformat()
+
+        return {
+            'signal_type': self.__signal_type,
+            'cqrs_id': self.__cqrs_id,
+            'instance_data': self.__instance_data,
+            'previous_data': self.__previous_data,
+            'instance_pk': get_json_valid_value(self.__instance_pk),
+            'correlation_id': get_json_valid_value(self.__correlation_id),
+            'retries': self.__retries,
+            'expires': expires,
+            'meta': self.__meta,
+        }
+
+    def is_expired(self):
+        """Checks if this payload is expired.
+
+        Returns:
+            (bool): True if payload is expired, False otherwise.
+        """
+        return self.__expires is not None and self.__expires <= timezone.now()

+ 80 - 0
dj_cqrs/delay.py

@@ -0,0 +1,80 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from queue import Full, PriorityQueue
+
+from django.utils import timezone
+
+
+class DelayMessage:
+    """Delay message.
+
+    :param delivery_tag: The server-assigned and channel-specific delivery tag.
+    :type delivery_tag: int
+    :param payload: Transport payload.
+    :type payload: dj_cqrs.dataclasses.TransportPayload
+    :param eta: Time after which the message should be requeued.
+    :type eta: datetime.datetime
+    """
+
+    def __init__(self, delivery_tag, payload, eta):
+        self.delivery_tag = delivery_tag
+        self.payload = payload
+        self.eta = eta
+
+
+class DelayQueue:
+    """Queue for delay messages."""
+
+    def __init__(self, max_size=None):
+        if max_size is not None:
+            assert max_size > 0, 'Delay queue max_size should be positive integer.'
+
+        self._max_size = max_size
+        self._queue = PriorityQueue()
+
+    def get(self):
+        """
+        :rtype: DelayMessage
+        """
+        *_, delay_message = self._queue.get()
+        return delay_message
+
+    def get_ready(self):
+        """Returns messages with expired ETA.
+
+        :return: delayed messages generator
+        :rtype: typing.Generator[DelayMessage]
+        """
+        while self.qsize():
+            delay_message = self.get()
+            if delay_message.eta > timezone.now():
+                # Queue is ordered by message ETA.
+                # Remaining messages should wait longer, we don't check them.
+                self.put(delay_message)
+                break
+
+            yield delay_message
+
+    def put(self, delay_message):
+        """Adds message to queue.
+
+        :param delay_message: DelayMessage instance.
+        :type delay_message: DelayMessage
+        """
+        assert isinstance(delay_message, DelayMessage)
+        if self.full():
+            raise Full('Delay queue is full')
+
+        self._queue.put(
+            (
+                delay_message.eta.timestamp(),
+                delay_message.delivery_tag,
+                delay_message,
+            ),
+        )
+
+    def qsize(self):
+        return self._queue.qsize()
+
+    def full(self):
+        return self._max_size is not None and self.qsize() >= self._max_size

+ 60 - 0
dj_cqrs/logger.py

@@ -0,0 +1,60 @@
+import logging
+
+from django.conf import settings
+from django.db import OperationalError, transaction
+
+from dj_cqrs.constants import (
+    DB_VENDOR_MYSQL,
+    DB_VENDOR_PG,
+    MYSQL_TIMEOUT_ERROR_CODE,
+    PG_TIMEOUT_FLAG,
+    SUPPORTED_TIMEOUT_DB_VENDORS,
+)
+
+
+def install_last_query_capturer(model_cls):
+    conn = _connection(model_cls)
+    if not _get_last_query_capturer(conn):
+        conn.execute_wrappers.append(_LastQueryCaptureWrapper())
+
+
+def log_timed_out_queries(error, model_cls):  # pragma: no cover
+    log_q = bool(settings.CQRS['replica'].get('CQRS_LOG_TIMED_OUT_QUERIES', False))
+    if not (log_q and isinstance(error, OperationalError) and error.args):
+        return
+
+    conn = _connection(model_cls)
+    conn_vendor = getattr(conn, 'vendor', '')
+    if conn_vendor not in SUPPORTED_TIMEOUT_DB_VENDORS:
+        return
+
+    e_arg = error.args[0]
+    is_timeout_error = bool(
+        (conn_vendor == DB_VENDOR_MYSQL and e_arg == MYSQL_TIMEOUT_ERROR_CODE)
+        or (conn_vendor == DB_VENDOR_PG and isinstance(e_arg, str) and PG_TIMEOUT_FLAG in e_arg)
+    )
+    if is_timeout_error:
+        query = getattr(_get_last_query_capturer(conn), 'query', None)
+        if query:
+            logger_name = settings.CQRS['replica'].get('CQRS_QUERY_LOGGER', '') or 'django-cqrs'
+            logger = logging.getLogger(logger_name)
+            logger.error('Timed out query:\n%s', query)
+
+
+class _LastQueryCaptureWrapper:
+    def __init__(self):
+        self.query = None
+
+    def __call__(self, execute, sql, params, many, context):
+        try:
+            execute(sql, params, many, context)
+        finally:
+            self.query = sql
+
+
+def _get_last_query_capturer(conn):
+    return next((w for w in conn.execute_wrappers if isinstance(w, _LastQueryCaptureWrapper)), None)
+
+
+def _connection(model_cls):
+    return transaction.get_connection(using=model_cls._default_manager.db)

+ 1 - 0
dj_cqrs/management/__init__.py

@@ -0,0 +1 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.

+ 1 - 0
dj_cqrs/management/commands/__init__.py

@@ -0,0 +1 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.

+ 144 - 0
dj_cqrs/management/commands/cqrs_bulk_dump.py

@@ -0,0 +1,144 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import datetime
+import os
+import sys
+import time
+
+import ujson
+from django.core.management.base import BaseCommand, CommandError
+
+from dj_cqrs.management.utils import batch_qs
+from dj_cqrs.registries import MasterRegistry
+
+
+class Command(BaseCommand):
+    help = 'Bulk dump of a CQRS model from master service.'
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--cqrs-id',
+            '-c',
+            help='CQRS_ID of the master model',
+            type=str,
+            required=True,
+        )
+        parser.add_argument(
+            '--output',
+            '-o',
+            help='Output file for dumping (- for writing to stdout)',
+            type=str,
+            default=None,
+        )
+        parser.add_argument(
+            '--batch',
+            '-b',
+            help='Batch size',
+            type=int,
+            default=10000,
+        )
+        parser.add_argument(
+            '--progress',
+            '-p',
+            help='Display progress',
+            action='store_true',
+        )
+        parser.add_argument(
+            '--force',
+            '-f',
+            help='Override output file',
+            action='store_true',
+        )
+
+    def handle(self, *args, **options):
+        model = self._get_model(options)
+        out_fname = self._get_output_filename(options)
+        progress = self._get_progress(options)
+        batch_size = self._get_batch_size(options)
+
+        with sys.stdout if out_fname == '-' else open(out_fname, 'w') as f:
+            f.write(model.CQRS_ID)
+
+            counter, success_counter = 0, 0
+            db_count = model._default_manager.count()
+
+            if progress:
+                print(
+                    'Processing {0} records with batch size {1}'.format(db_count, batch_size),
+                    file=sys.stderr,
+                )
+            for qs in batch_qs(
+                model.relate_cqrs_serialization(model._default_manager.order_by().all()),
+                batch_size=batch_size,
+            ):
+                ts = time.time()
+                cs = counter
+                for instance in qs:
+                    counter += 1
+                    try:
+                        f.write(
+                            '\n' + ujson.dumps(instance.to_cqrs_dict()),
+                        )
+                        success_counter += 1
+                    except Exception as e:
+                        print(
+                            '\nDump record failed for pk={0}: {1}: {2}'.format(
+                                instance.pk,
+                                type(e).__name__,
+                                str(e),
+                            ),
+                            file=sys.stderr,
+                        )
+                if progress:
+                    rate = (counter - cs) / (time.time() - ts)
+                    percent = 100 * counter / db_count
+                    eta = datetime.timedelta(seconds=int((db_count - counter) / rate))
+                    sys.stderr.write(
+                        '\r{0} of {1} processed - {2}% with '
+                        'rate {3:.1f} rps, to go {4} ...{5:20}'.format(
+                            counter,
+                            db_count,
+                            int(percent),
+                            rate,
+                            str(eta),
+                            ' ',
+                        ),
+                    )
+                    sys.stderr.flush()
+
+        print(
+            'Done!\n{0} instance(s) saved.\n{1} instance(s) processed.'.format(
+                success_counter,
+                counter,
+            ),
+            file=sys.stderr,
+        )
+
+    @staticmethod
+    def _get_model(options):
+        cqrs_id = options['cqrs_id']
+        model = MasterRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model
+
+    @staticmethod
+    def _get_output_filename(options):
+        f_name = options['output']
+        if f_name is None:
+            f_name = '{0}.dump'.format(options['cqrs_id'])
+
+        if f_name != '-' and os.path.exists(f_name) and not (options['force']):
+            raise CommandError('File {0} exists!'.format(f_name))
+
+        return f_name
+
+    @staticmethod
+    def _get_progress(options):
+        return bool(options['progress'])
+
+    @staticmethod
+    def _get_batch_size(options):
+        return options['batch']

+ 121 - 0
dj_cqrs/management/commands/cqrs_bulk_load.py

@@ -0,0 +1,121 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import os
+import sys
+
+import ujson
+from django.core.management.base import BaseCommand, CommandError
+from django.db import DatabaseError, transaction
+
+from dj_cqrs.registries import ReplicaRegistry
+
+
+class Command(BaseCommand):
+    help = 'Bulk load of a CQRS model to a replica service.'
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--input',
+            '-i',
+            help='Input file for loading (- for reading from stdin)',
+            type=str,
+            required=True,
+        )
+        parser.add_argument(
+            '--clear',
+            '-c',
+            help='Delete existing models',
+            type=bool,
+            required=False,
+            default=False,
+        )
+        parser.add_argument(
+            '--batch',
+            '-b',
+            help='Batch size',
+            type=int,
+            default=10000,
+        )
+
+    def handle(self, *args, **options):
+        batch_size = self._get_batch_size(options)
+
+        f_name = options['input']
+        if f_name != '-' and not os.path.exists(f_name):
+            raise CommandError("File {0} doesn't exist!".format(f_name))
+
+        with sys.stdin if f_name == '-' else open(f_name, 'r') as f:
+            try:
+                cqrs_id = next(f).strip()
+            except StopIteration:
+                cqrs_id = None
+
+            if not cqrs_id:
+                raise CommandError('File {0} is empty!'.format(f_name))
+
+            model = ReplicaRegistry.get_model_by_cqrs_id(cqrs_id)
+            if not model:
+                raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+            with transaction.atomic():
+                if options['clear']:
+                    try:
+                        model._default_manager.all().delete()
+                    except DatabaseError:
+                        raise CommandError('Delete operation fails!')
+
+            self._process(f, model, batch_size)
+
+    @classmethod
+    def _process(cls, stream, model, batch_size):
+        success_counter = 0
+        line_number = 2
+
+        while True:
+            with transaction.atomic():
+                try:
+                    for _ in range(0, batch_size):
+                        line = stream.readline()
+
+                        success = cls._process_line(line_number, line, model)
+
+                        success_counter += int(bool(success))
+                        line_number += 1
+                except EOFError:
+                    break
+
+        print('Done!\n{0} instance(s) loaded.'.format(success_counter), file=sys.stderr)
+
+    @staticmethod
+    def _process_line(line_number, line, model):
+        if not line:
+            raise EOFError
+        try:
+            try:
+                master_data = ujson.loads(line.strip())
+            except ValueError:
+                print(
+                    "Dump file can't be parsed: line {0}!".format(line_number),
+                    file=sys.stderr,
+                )
+                return False
+
+            instance = model.cqrs_save(master_data)
+            if not instance:
+                print(
+                    "Instance can't be saved: line {0}!".format(line_number),
+                    file=sys.stderr,
+                )
+            else:
+                return True
+        except Exception as e:
+            print(
+                'Unexpected error: line {0}! {1}'.format(line_number, str(e)),
+                file=sys.stderr,
+            )
+
+        return False
+
+    @staticmethod
+    def _get_batch_size(options):
+        return options['batch']

+ 209 - 0
dj_cqrs/management/commands/cqrs_consume.py

@@ -0,0 +1,209 @@
+#  Copyright © 2022 Ingram Micro Inc. All rights reserved.
+import logging
+import signal
+import threading
+from pathlib import Path
+
+from django.core.management.base import BaseCommand, CommandError
+from watchfiles import watch
+from watchfiles.filters import PythonFilter
+from watchfiles.run import start_process
+
+from dj_cqrs.registries import ReplicaRegistry
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+def consume(**kwargs):
+    import django
+
+    django.setup()
+
+    from dj_cqrs.transport import current_transport
+
+    try:
+        current_transport.consume(**kwargs)
+    except KeyboardInterrupt:
+        pass
+
+
+def _display_path(path):
+    try:
+        return f'"{path.relative_to(Path.cwd())}"'
+    except ValueError:  # pragma: no cover
+        return f'"{path}"'
+
+
+class WorkersManager:
+    def __init__(
+        self,
+        consume_kwargs,
+        workers=1,
+        reload=False,
+        ignore_paths=None,
+        sigint_timeout=5,
+        sigkill_timeout=1,
+    ):
+        self.pool = []
+        self.workers = workers
+        self.reload = reload
+        self.consume_kwargs = consume_kwargs
+        self.stop_event = threading.Event()
+        self.sigint_timeout = sigint_timeout
+        self.sigkill_timeout = sigkill_timeout
+
+        if self.reload:
+            self.watch_filter = PythonFilter(ignore_paths=ignore_paths)
+            self.watcher = watch(
+                Path.cwd(),
+                watch_filter=self.watch_filter,
+                stop_event=self.stop_event,
+                yield_on_timeout=True,
+            )
+
+    def handle_signal(self, *args, **kwargs):
+        self.stop_event.set()
+
+    def run(self):
+        for sig in [signal.SIGINT, signal.SIGTERM]:
+            signal.signal(sig, self.handle_signal)
+        if self.reload:
+            signal.signal(signal.SIGHUP, self.restart)
+
+        self.start()
+
+        if self.reload:
+            for files_changed in self:
+                if files_changed:
+                    logger.warning(
+                        'Detected changes in %s. Reloading...',
+                        ', '.join(map(_display_path, files_changed)),
+                    )
+                    self.restart()
+        else:
+            self.stop_event.wait()
+
+        self.terminate()
+
+    def start(self):
+        for _ in range(self.workers):
+            process = start_process(
+                consume,
+                'function',
+                (),
+                self.consume_kwargs,
+            )
+            self.pool.append(process)
+            logger.info(f'Consumer process with pid {process.pid} started')
+
+    def terminate(self, *args, **kwargs):
+        while self.pool:
+            process = self.pool.pop()
+            process.stop(sigint_timeout=self.sigint_timeout, sigkill_timeout=self.sigkill_timeout)
+            logger.info(f'Consumer process with pid {process.pid} stopped.')
+
+    def restart(self, *args, **kwargs):
+        self.terminate()
+        self.start()
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        changes = next(self.watcher)
+        if changes:
+            return list({Path(c[1]) for c in changes})
+        return None
+
+
+class Command(BaseCommand):
+    help = 'Starts CQRS worker, which consumes messages from message queue.'
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--workers',
+            '-w',
+            help='Number of workers',
+            type=int,
+            default=1,
+        )
+        parser.add_argument(
+            '--cqrs-id',
+            '-cid',
+            nargs='*',
+            type=str,
+            help='Choose model(s) by CQRS_ID for consuming',
+        )
+        parser.add_argument(
+            '--reload',
+            '-r',
+            help=('Enable reload signal SIGHUP and autoreload ' 'on file changes'),
+            action='store_true',
+            default=False,
+        )
+        parser.add_argument(
+            '--ignore-paths',
+            nargs='?',
+            type=str,
+            help=(
+                'Specify directories to ignore, '
+                'to ignore multiple paths use a comma as separator, '
+                'e.g. "env" or "env,node_modules"'
+            ),
+        )
+        parser.add_argument(
+            '--sigint-timeout',
+            nargs='?',
+            type=int,
+            default=5,
+            help='How long to wait for the sigint timeout before sending sigkill.',
+        )
+        parser.add_argument(
+            '--sigkill-timeout',
+            nargs='?',
+            type=int,
+            default=1,
+            help='How long to wait for the sigkill timeout before issuing a timeout exception.',
+        )
+
+    def handle(
+        self,
+        *args,
+        workers=1,
+        cqrs_id=None,
+        reload=False,
+        ignore_paths=None,
+        sigint_timeout=5,
+        sigkill_timeout=1,
+        **options,
+    ):
+        paths_to_ignore = None
+        if ignore_paths:
+            paths_to_ignore = [Path(p).resolve() for p in ignore_paths.split(',')]
+
+        workers_manager = WorkersManager(
+            workers=workers,
+            consume_kwargs=self.get_consume_kwargs(cqrs_id),
+            reload=reload,
+            ignore_paths=paths_to_ignore,
+            sigint_timeout=sigint_timeout,
+            sigkill_timeout=sigkill_timeout,
+        )
+
+        workers_manager.run()
+
+    def get_consume_kwargs(self, ids_list):
+        consume_kwargs = {}
+        if ids_list:
+            cqrs_ids = set()
+            for cqrs_id in ids_list:
+                model = ReplicaRegistry.get_model_by_cqrs_id(cqrs_id)
+                if not model:
+                    raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+                cqrs_ids.add(cqrs_id)
+
+            consume_kwargs['cqrs_ids'] = cqrs_ids
+
+        return consume_kwargs

+ 131 - 0
dj_cqrs/management/commands/cqrs_dead_letters.py

@@ -0,0 +1,131 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import ujson
+from django.core.management.base import BaseCommand, CommandError
+
+from dj_cqrs.constants import DEFAULT_MASTER_MESSAGE_TTL
+from dj_cqrs.dataclasses import TransportPayload
+from dj_cqrs.registries import ReplicaRegistry
+from dj_cqrs.transport import current_transport
+from dj_cqrs.transport.rabbit_mq import RabbitMQTransport
+from dj_cqrs.utils import get_message_expiration_dt
+
+
+class RabbitMQTransportService(RabbitMQTransport):
+    @classmethod
+    def get_consumer_settings(cls):
+        return cls._get_consumer_settings()
+
+    @classmethod
+    def get_common_settings(cls):
+        return cls._get_common_settings()
+
+    @classmethod
+    def create_connection(cls, host, port, creds, exchange):
+        return cls._create_connection(host, port, creds, exchange)
+
+    @classmethod
+    def declare_queue(cls, channel, queue_name):
+        return channel.queue_declare(queue_name, durable=True, exclusive=False)
+
+    @classmethod
+    def nack(cls, channel, delivery_tag, payload=None):
+        return cls._nack(channel, delivery_tag, payload)
+
+
+class Command(BaseCommand):
+    help = 'CQRS dead letters queue management commands'
+
+    def add_arguments(self, parser):
+        command = parser.add_subparsers(dest='command')
+        command.required = True
+        command.add_parser('retry', help='Retry all dead letters.')
+        command.add_parser('dump', help='Dumps all dead letter to stdout.')
+        command.add_parser('purge', help='Removes all dead letters.')
+
+    def handle(self, *args, **options):
+        self.check_transport()
+        channel, connection = self.init_broker()
+
+        queue_name, dead_letter_queue_name, *_ = RabbitMQTransportService.get_consumer_settings()
+        dead_letters_queue = RabbitMQTransportService.declare_queue(
+            channel,
+            dead_letter_queue_name,
+        )
+        dead_letters_count = dead_letters_queue.method.message_count
+        consumer_generator = channel.consume(
+            queue=dead_letter_queue_name,
+            auto_ack=False,
+            exclusive=False,
+        )
+
+        command = options['command']
+        if command == 'retry':
+            self.handle_retry(channel, consumer_generator, dead_letters_count)
+        elif command == 'dump':
+            self.handle_dump(consumer_generator, dead_letters_count)
+        elif command == 'purge':
+            self.handle_purge(channel, dead_letter_queue_name, dead_letters_count)
+
+        if not connection.is_closed:
+            connection.close()
+
+    def check_transport(self):
+        if not issubclass(current_transport, RabbitMQTransport):
+            raise CommandError('Dead letters commands available only for RabbitMQTransport.')
+
+    def init_broker(self):
+        host, port, creds, exchange = RabbitMQTransportService.get_common_settings()
+        connection, channel = RabbitMQTransportService.create_connection(
+            host,
+            port,
+            creds,
+            exchange,
+        )
+
+        queue_name, dead_letter_queue_name, *_ = RabbitMQTransportService.get_consumer_settings()
+        RabbitMQTransportService.declare_queue(channel, queue_name)
+        RabbitMQTransportService.declare_queue(channel, dead_letter_queue_name)
+        for cqrs_id, _ in ReplicaRegistry.models.items():
+            channel.queue_bind(exchange=exchange, queue=queue_name, routing_key=cqrs_id)
+
+            # Every service must have specific SYNC or requeue routes
+            channel.queue_bind(
+                exchange=exchange,
+                queue=queue_name,
+                routing_key='cqrs.{0}.{1}'.format(queue_name, cqrs_id),
+            )
+
+        return channel, connection
+
+    def handle_retry(self, channel, consumer_generator, dead_letters_count):
+        self.stdout.write('Total dead letters: {0}'.format(dead_letters_count))
+        for i in range(1, dead_letters_count + 1):
+            self.stdout.write('Retrying: {0}/{1}'.format(i, dead_letters_count))
+            method_frame, properties, body = next(consumer_generator)
+
+            dct = ujson.loads(body)
+            dct['retries'] = 0
+            if dct.get('expires'):
+                # Message could expire already
+                expires = get_message_expiration_dt(DEFAULT_MASTER_MESSAGE_TTL)
+                dct['expires'] = expires.replace(microsecond=0).isoformat()
+            payload = TransportPayload.from_message(dct)
+            payload.is_requeue = True
+
+            RabbitMQTransportService.produce(payload)
+            message = ujson.dumps(dct)
+            self.stdout.write(message)
+
+            RabbitMQTransportService.nack(channel, method_frame.delivery_tag)
+
+    def handle_dump(self, consumer_generator, dead_letters_count):
+        for _ in range(1, dead_letters_count + 1):
+            *_, body = next(consumer_generator)
+            self.stdout.write(body.decode('utf-8'))
+
+    def handle_purge(self, channel, dead_letter_queue_name, dead_letter_count):
+        self.stdout.write('Total dead letters: {0}'.format(dead_letter_count))
+        if dead_letter_count > 0:
+            channel.queue_purge(dead_letter_queue_name)
+            self.stdout.write('Purged')

+ 52 - 0
dj_cqrs/management/commands/cqrs_deleted_diff_master.py

@@ -0,0 +1,52 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import sys
+
+import ujson
+from django.core.management.base import BaseCommand, CommandError
+
+from dj_cqrs.registries import MasterRegistry
+
+
+class Command(BaseCommand):
+    help = 'Diff of deleted CQRS models pks from master diff stream.'
+
+    @classmethod
+    def serialize_out(cls, package):
+        return ujson.dumps(package)
+
+    @classmethod
+    def deserialize_in(cls, package_line):
+        return set(ujson.loads(package_line))
+
+    def handle(self, *args, **options):
+        with sys.stdin as f:
+            first_line = f.readline()
+            model = self._get_model(first_line)
+            self.stdout.write(first_line.strip())
+
+            for package_line in f:
+                master_data = self.deserialize_in(package_line)
+
+                exist_pks = set(
+                    model.objects.filter(
+                        pk__in=master_data,
+                    ).values_list(
+                        'pk',
+                        flat=True,
+                    ),
+                )
+                diff_ids = list(master_data - exist_pks)
+                if diff_ids:
+                    self.stdout.write(self.serialize_out(diff_ids))
+                    self.stderr.write('PK to delete: {0}'.format(str(diff_ids)))
+
+    @staticmethod
+    def _get_model(first_line):
+        cqrs_id = first_line.split(',')[0]
+        model = MasterRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model

+ 82 - 0
dj_cqrs/management/commands/cqrs_deleted_diff_replica.py

@@ -0,0 +1,82 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import ujson
+from django.core.exceptions import FieldError
+from django.core.management.base import BaseCommand, CommandError
+from django.utils.timezone import now
+
+from dj_cqrs.management.utils import batch_qs
+from dj_cqrs.registries import ReplicaRegistry
+
+
+class Command(BaseCommand):
+    help = 'Streaming diff of CQRS model pks from replica service to check for deleted objects.'
+
+    @classmethod
+    def serialize_package(cls, package):
+        return ujson.dumps(package)
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--cqrs-id',
+            '-cid',
+            help='CQRS_ID of the replica model',
+            type=str,
+            required=True,
+        )
+        parser.add_argument(
+            '--filter',
+            '-f',
+            help='Filter kwargs',
+            type=str,
+            default=None,
+        )
+        parser.add_argument(
+            '--batch',
+            '-b',
+            help='Batch size',
+            type=int,
+            default=10000,
+        )
+
+    def handle(self, *args, **options):
+        model = self._get_model(options)
+        batch_size = self._get_batch_size(options)
+
+        qs = model._default_manager.values().order_by()
+        if options['filter']:
+            try:
+                kwargs = ujson.loads(options['filter'])
+                if not isinstance(kwargs, dict):
+                    raise ValueError
+            except ValueError:
+                raise CommandError('Bad filter kwargs!')
+
+            try:
+                qs = qs.filter(**kwargs)
+            except FieldError as e:
+                raise CommandError('Bad filter kwargs! {0}'.format(str(e)))
+
+        if not qs.exists():
+            self.stderr.write('No objects found for filter!')
+            return
+
+        current_dt = now()
+        self.stdout.write('{0},{1}'.format(model.CQRS_ID, str(current_dt)))
+
+        for bqs in batch_qs(qs.values_list('pk', flat=True), batch_size=batch_size):
+            self.stdout.write(self.serialize_package(list(bqs)))
+
+    @staticmethod
+    def _get_model(options):
+        cqrs_id = options['cqrs_id']
+        model = ReplicaRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model
+
+    @staticmethod
+    def _get_batch_size(options):
+        return options['batch']

+ 40 - 0
dj_cqrs/management/commands/cqrs_deleted_sync_replica.py

@@ -0,0 +1,40 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import sys
+
+import ujson
+from django.core.management.base import BaseCommand, CommandError
+from django.db import DatabaseError
+
+from dj_cqrs.registries import ReplicaRegistry
+
+
+class Command(BaseCommand):
+    help = 'Diff for deleted objects synchronizer from CQRS master stream.'
+
+    @classmethod
+    def deserialize_in(cls, package_line):
+        return ujson.loads(package_line)
+
+    def handle(self, *args, **options):
+        with sys.stdin as f:
+            first_line = f.readline().strip()
+            model = self._get_model(first_line)
+
+            for pks_line in f:
+                try:
+                    model._default_manager.filter(
+                        pk__in=self.deserialize_in(pks_line.strip()),
+                    ).delete()
+                except DatabaseError as e:
+                    print(str(e), file=sys.stderr)
+
+    @staticmethod
+    def _get_model(first_line):
+        cqrs_id = first_line.split(',')[0]
+        model = ReplicaRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model

+ 87 - 0
dj_cqrs/management/commands/cqrs_diff_master.py

@@ -0,0 +1,87 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import ujson
+from django.core.exceptions import FieldError
+from django.core.management.base import BaseCommand, CommandError
+from django.utils.timezone import now
+
+from dj_cqrs.management.utils import batch_qs
+from dj_cqrs.registries import MasterRegistry
+
+
+class Command(BaseCommand):
+    help = 'Streaming diff of CQRS models from master service.'
+
+    @classmethod
+    def serialize_package(cls, package):
+        return ujson.dumps(package)
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--cqrs-id',
+            '-cid',
+            help='CQRS_ID of the master model',
+            type=str,
+            required=True,
+        )
+        parser.add_argument(
+            '--filter',
+            '-f',
+            help='Filter kwargs',
+            type=str,
+            default=None,
+        )
+        parser.add_argument(
+            '--batch',
+            '-b',
+            help='Batch size',
+            type=int,
+            default=10000,
+        )
+
+    def handle(self, *args, **options):
+        model = self._get_model(options)
+        batch_size = self._get_batch_size(options)
+
+        qs = model._default_manager.all().order_by()
+        if options['filter']:
+            try:
+                kwargs = ujson.loads(options['filter'])
+                if not isinstance(kwargs, dict):
+                    raise ValueError
+            except ValueError:
+                raise CommandError('Bad filter kwargs!')
+
+            try:
+                qs = qs.filter(**kwargs)
+            except FieldError as e:
+                raise CommandError('Bad filter kwargs! {0}'.format(str(e)))
+
+        if not qs.exists():
+            self.stderr.write('No objects found for filter!')
+            return
+
+        current_dt = now()
+        self.stdout.write('{0},{1}'.format(model.CQRS_ID, str(current_dt)))
+
+        for bqs in batch_qs(qs, batch_size=batch_size):
+            package = [
+                [instance.pk, instance.cqrs_revision]
+                for instance in bqs
+                if instance.is_sync_instance()
+            ]
+            self.stdout.write(self.serialize_package(package))
+
+    @staticmethod
+    def _get_model(options):
+        cqrs_id = options['cqrs_id']
+        model = MasterRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model
+
+    @staticmethod
+    def _get_batch_size(options):
+        return options['batch']

+ 58 - 0
dj_cqrs/management/commands/cqrs_diff_replica.py

@@ -0,0 +1,58 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import sys
+
+import ujson
+from django.conf import settings
+from django.core.management.base import BaseCommand, CommandError
+
+from dj_cqrs.registries import ReplicaRegistry
+
+
+class Command(BaseCommand):
+    help = 'Diff of CQRS replica models from master diff stream.'
+
+    @classmethod
+    def deserialize_in(cls, package_line):
+        return dict(ujson.loads(package_line))
+
+    @classmethod
+    def serialize_out(cls, ids):
+        return ujson.dumps(ids)
+
+    def handle(self, *args, **options):
+        with sys.stdin as f:
+            first_line = f.readline()
+            model = self._get_model(first_line)
+            self.stdout.write('{0},{1}'.format(first_line.strip(), settings.CQRS.get('queue')))
+
+            for package_line in f:
+                master_data = self.deserialize_in(package_line)
+
+                qs = (
+                    model._default_manager.filter(
+                        pk__in=master_data.keys(),
+                    )
+                    .order_by()
+                    .only('pk', 'cqrs_revision')
+                )
+                replica_data = {instance.pk: instance.cqrs_revision for instance in qs}
+
+                diff_ids = set()
+                for pk, cqrs_revision in master_data.items():
+                    if replica_data.get(pk, -1) != cqrs_revision:
+                        diff_ids.add(pk)
+
+                if diff_ids:
+                    self.stdout.write(self.serialize_out(list(diff_ids)))
+                    self.stderr.write('PK to resync: {0}'.format(str(diff_ids)))
+
+    @staticmethod
+    def _get_model(first_line):
+        cqrs_id = first_line.split(',')[0]
+        model = ReplicaRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model

+ 77 - 0
dj_cqrs/management/commands/cqrs_diff_sync.py

@@ -0,0 +1,77 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import sys
+
+from django.core.management.base import BaseCommand, CommandError
+
+from dj_cqrs.constants import NO_QUEUE
+from dj_cqrs.management.commands.cqrs_sync import (
+    DEFAULT_BATCH,
+    DEFAULT_PROGRESS,
+    Command as SyncCommand,
+)
+from dj_cqrs.registries import MasterRegistry
+
+
+class Command(BaseCommand):
+    help = 'Diff synchronizer from CQRS replica stream.'
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--batch',
+            '-b',
+            help='Batch size',
+            type=int,
+            default=DEFAULT_BATCH,
+        )
+        parser.add_argument(
+            '--progress',
+            '-p',
+            help='Display progress',
+            action='store_true',
+        )
+
+    def handle(self, *args, **options):
+        progress = self._get_progress(options)
+        batch_size = self._get_batch_size(options)
+
+        with sys.stdin as f:
+            first_line = f.readline().strip()
+            model = self._get_model(first_line)
+            queue = self._get_queue(first_line)
+
+            for pks_line in f:
+                sync_kwargs = {
+                    'cqrs_id': model.CQRS_ID,
+                    'filter': '{{"id__in": {0}}}'.format(pks_line.strip()),
+                    'progress': progress,
+                    'batch': batch_size,
+                }
+                if queue:
+                    sync_kwargs['queue'] = queue
+
+                SyncCommand().handle(**sync_kwargs)
+
+    @staticmethod
+    def _get_model(first_line):
+        cqrs_id = first_line.split(',')[0]
+        model = MasterRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model
+
+    @staticmethod
+    def _get_queue(first_line):
+        queue = first_line.split(',')[-1]
+        if queue != NO_QUEUE:
+            return queue
+
+    @staticmethod
+    def _get_batch_size(options):
+        return options.get('batch', DEFAULT_BATCH)
+
+    @staticmethod
+    def _get_progress(options):
+        return bool(options.get('progress', DEFAULT_PROGRESS))

+ 155 - 0
dj_cqrs/management/commands/cqrs_sync.py

@@ -0,0 +1,155 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import datetime
+import sys
+import time
+
+import ujson
+from django.core.exceptions import FieldError
+from django.core.management.base import BaseCommand, CommandError
+from django.db import close_old_connections, connections
+
+from dj_cqrs.management.utils import batch_qs
+from dj_cqrs.registries import MasterRegistry
+
+
+DEFAULT_BATCH = 10000
+DEFAULT_PROGRESS = False
+
+
+class Command(BaseCommand):
+    help = 'Filter synchronization of certain CQRS model rows over transport to replicas.'
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '--cqrs-id',
+            '-cid',
+            help='CQRS_ID of the master model',
+            type=str,
+            required=True,
+        )
+        parser.add_argument(
+            '--filter',
+            '-f',
+            help='Filter kwargs',
+            type=str,
+            default=None,
+        )
+        parser.add_argument(
+            '--queue',
+            '-q',
+            help='Name of the specific replica queue',
+            type=str,
+            default=None,
+        )
+        parser.add_argument(
+            '--batch',
+            '-b',
+            help='Batch size',
+            type=int,
+            default=DEFAULT_BATCH,
+        )
+        parser.add_argument(
+            '--progress',
+            '-p',
+            help='Display progress',
+            action='store_true',
+        )
+
+    def handle(self, *args, **options):
+        model = self._get_model(options)
+        progress = self._get_progress(options)
+        batch_size = self._get_batch_size(options)
+
+        qs = self._prepare_qs(model, options)
+        db_count = qs.count()
+        if db_count == 0:
+            print('No objects found for filter!')
+            return
+
+        counter, success_counter = 0, 0
+        if progress:
+            print('Processing {0} records with batch size {1}'.format(db_count, batch_size))
+
+        for qs_ in batch_qs(model.relate_cqrs_serialization(qs), batch_size=batch_size):
+            ts = time.time()
+            cs = counter
+
+            # check if must reconnect
+            if not connections[qs_.db].is_usable():
+                connections[qs_.db].connect()
+
+            for instance in qs_:
+                counter += 1
+                try:
+                    instance.cqrs_sync(queue=options['queue'])
+                    success_counter += 1
+                except Exception as e:
+                    print(
+                        '\nSync record failed for pk={0}: {1}: {2}'.format(
+                            instance.pk,
+                            type(e).__name__,
+                            str(e),
+                        ),
+                    )
+                    close_old_connections()
+
+            if progress:
+                rate = (counter - cs) / (time.time() - ts)
+                percent = 100 * counter / db_count
+                eta = datetime.timedelta(seconds=int((db_count - counter) / rate))
+                sys.stdout.write(
+                    '\r{0} of {1} processed - {2}% with '
+                    'rate {3:.1f} rps, to go {4} ...{5:20}'.format(
+                        counter,
+                        db_count,
+                        int(percent),
+                        rate,
+                        str(eta),
+                        ' ',
+                    ),
+                )
+                sys.stdout.flush()
+
+        print(
+            'Done!\n{0} instance(s) synced.\n{1} instance(s) processed.'.format(
+                success_counter,
+                counter,
+            ),
+        )
+
+    @staticmethod
+    def _prepare_qs(model, options):
+        qs = model._default_manager.none()
+        if options['filter']:
+            try:
+                kwargs = ujson.loads(options['filter'])
+                if not isinstance(kwargs, dict):
+                    raise ValueError
+            except ValueError:
+                raise CommandError('Bad filter kwargs!')
+
+            try:
+                qs = model._default_manager.filter(**kwargs).order_by()
+            except FieldError as e:
+                raise CommandError('Bad filter kwargs! {0}'.format(str(e)))
+
+        return qs
+
+    @staticmethod
+    def _get_model(options):
+        cqrs_id = options['cqrs_id']
+        model = MasterRegistry.get_model_by_cqrs_id(cqrs_id)
+
+        if not model:
+            raise CommandError('Wrong CQRS ID: {0}!'.format(cqrs_id))
+
+        return model
+
+    @staticmethod
+    def _get_batch_size(options):
+        return options.get('batch', DEFAULT_BATCH)
+
+    @staticmethod
+    def _get_progress(options):
+        return bool(options.get('progress', DEFAULT_PROGRESS))

+ 15 - 0
dj_cqrs/management/utils.py

@@ -0,0 +1,15 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+
+def batch_qs(qs, batch_size=10000):
+    """
+    Helper function to manage RAM usage on big dataset iterations.
+    This function can be used only on STATIC DB state. It's a good fit for migrations, but
+    it can't be used in real applications.
+    """
+    assert batch_size > 0
+
+    total = qs.count()
+    for start in range(0, total, batch_size):
+        end = min(start + batch_size, total)
+        yield qs[start:end]

+ 390 - 0
dj_cqrs/managers.py

@@ -0,0 +1,390 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+
+from django.core.exceptions import ValidationError
+from django.db import Error, transaction
+from django.db.models import F, Manager
+from django.utils import timezone
+
+from dj_cqrs.constants import FIELDS_TRACKER_FIELD_NAME, TRACKED_FIELDS_ATTR_NAME
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+class MasterManager(Manager):
+    def bulk_create(self, objs, **kwargs):
+        """
+        Custom bulk create method to support sending of create signals.
+        This can be used only in cases, when IDs are generated on client or DB returns IDs.
+
+        Args:
+            objs (List[django.db.models.Model]): List of objects for creation.
+            kwargs (dict): Bulk create kwargs.
+        """
+        for obj in objs:
+            obj.save_tracked_fields()
+        objs = super().bulk_create(objs, **kwargs)
+
+        if objs:
+            self.model.call_post_bulk_create(objs, using=self.db)
+
+        return objs
+
+    def bulk_update(self, queryset, **kwargs):
+        """Custom update method to support sending of update signals.
+
+        Args:
+            queryset (django.db.models.QuerySet): Django Queryset (f.e. filter).
+            kwargs (dict): Update kwargs.
+        """
+        prev_data_mapper = {}
+        collect_prev_data = hasattr(self.model, FIELDS_TRACKER_FIELD_NAME)
+
+        # Add filter by list of ids in case of update kwargs
+        # are the same as the chain filter kwargs in the Queryset.
+        # If that happen the .all() method will refresh after update and
+        # result in an empty Queryset that will not send the signal.
+        ids_list = list(queryset.values_list('pk', flat=True))
+
+        def list_all():
+            return list(queryset.model.objects.filter(pk__in=ids_list).all())
+
+        with transaction.atomic(savepoint=False):
+            if collect_prev_data:
+                objs = list_all()
+                if not objs:
+                    return
+
+                for obj in objs:
+                    prev_data_mapper[obj.pk] = getattr(obj, FIELDS_TRACKER_FIELD_NAME).current()
+
+            current_dt = timezone.now()
+            result = queryset.update(
+                cqrs_revision=F('cqrs_revision') + 1,
+                cqrs_updated=current_dt,
+                **kwargs,
+            )
+
+            objs = list_all()
+            if collect_prev_data:
+                for obj in objs:
+                    setattr(obj, TRACKED_FIELDS_ATTR_NAME, prev_data_mapper.get(obj.pk))
+
+        queryset.model.call_post_update(objs, using=queryset.db)
+
+        return result
+
+
+class ReplicaManager(Manager):
+    def save_instance(
+        self,
+        master_data: dict,
+        previous_data: dict = None,
+        sync: bool = False,
+        meta: dict = None,
+    ):
+        """This method saves (creates or updates) model instance from CQRS master instance data.
+
+        Args:
+            master_data (dict): CQRS master instance data.
+            previous_data (dict): Previous values for tracked fields.
+            sync (bool): Sync package flag.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (django.db.models.Model): Model instance.
+        """
+        mapped_data = self._map_save_data(master_data)
+        mapped_previous_data = self._map_previous_data(previous_data) if previous_data else None
+        if mapped_data:
+            pk_name = self._get_model_pk_name()
+            pk_value = mapped_data[pk_name]
+            f_kwargs = {pk_name: pk_value}
+
+            qs = self.model._default_manager.filter(**f_kwargs).order_by()
+            if self.model.CQRS_SELECT_FOR_UPDATE:
+                qs = qs.select_for_update()
+
+            instance = qs.first()
+
+            if instance:
+                return self.update_instance(
+                    instance,
+                    mapped_data,
+                    previous_data=mapped_previous_data,
+                    sync=sync,
+                    meta=meta,
+                )
+
+            return self.create_instance(
+                mapped_data,
+                previous_data=mapped_previous_data,
+                sync=sync,
+                meta=meta,
+            )
+
+    def create_instance(
+        self,
+        mapped_data: dict,
+        previous_data: dict = None,
+        sync: bool = False,
+        meta: dict = None,
+    ):
+        """This method creates model instance from mapped CQRS master instance data.
+
+        Args:
+            mapped_data (dict): Mapped CQRS master instance data.
+            previous_data (dict): Previous values for tracked fields.
+            sync (bool): Sync package flag.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (django.db.models.Model): ReplicaMixin instance.
+        """
+        f_kw = {'previous_data': previous_data}
+        if self.model.CQRS_META:
+            f_kw['meta'] = meta
+
+        try:
+            return self.model.cqrs_create(sync, mapped_data, **f_kw)
+        except (Error, ValidationError) as e:
+            pk_value = mapped_data[self._get_model_pk_name()]
+
+            logger.error(
+                '{0}\nCQRS create error: pk = {1} ({2}).'.format(
+                    str(e),
+                    pk_value,
+                    self.model.CQRS_ID,
+                ),
+            )
+
+    def update_instance(
+        self,
+        instance,
+        mapped_data: dict,
+        previous_data: dict = None,
+        sync: bool = False,
+        meta: dict = None,
+    ):
+        """This method updates model instance from mapped CQRS master instance data.
+
+        Args:
+            instance (django.db.models.Model): ReplicaMixin model instance.
+            mapped_data (dict): Mapped CQRS master instance data.
+            previous_data (dict): Previous values for tracked fields.
+            sync (bool): Sync package flag.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (django.db.models.Model): ReplicaMixin instance.
+        """
+        pk_value = mapped_data[self._get_model_pk_name()]
+        current_cqrs_revision = mapped_data['cqrs_revision']
+        existing_cqrs_revision = instance.cqrs_revision
+
+        if sync:
+            if existing_cqrs_revision > current_cqrs_revision:
+                w_tpl = (
+                    'CQRS revision downgrade on sync: pk = {0}, '
+                    'cqrs_revision = new {1} / existing {2} ({3}).'
+                )
+                logger.warning(
+                    w_tpl.format(
+                        pk_value,
+                        current_cqrs_revision,
+                        existing_cqrs_revision,
+                        self.model.CQRS_ID,
+                    ),
+                )
+
+        else:
+            if existing_cqrs_revision > current_cqrs_revision:
+                e_tpl = (
+                    'Wrong CQRS sync order: pk = {0}, '
+                    'cqrs_revision = new {1} / existing {2} ({3}).'
+                )
+                logger.error(
+                    e_tpl.format(
+                        pk_value,
+                        current_cqrs_revision,
+                        existing_cqrs_revision,
+                        self.model.CQRS_ID,
+                    ),
+                )
+                return instance
+
+            if existing_cqrs_revision == current_cqrs_revision:
+                logger.error(
+                    'Received duplicate CQRS data: pk = {0}, cqrs_revision = {1} ({2}).'.format(
+                        pk_value,
+                        current_cqrs_revision,
+                        self.model.CQRS_ID,
+                    ),
+                )
+                if current_cqrs_revision == 0:
+                    logger.warning(
+                        'CQRS potential creation race condition: pk = {0} ({1}).'.format(
+                            pk_value,
+                            self.model.CQRS_ID,
+                        ),
+                    )
+
+                return instance
+
+            if current_cqrs_revision != instance.cqrs_revision + 1:
+                w_tpl = (
+                    'Lost or filtered out {0} CQRS packages: pk = {1}, cqrs_revision = {2} ({3})'
+                )
+                logger.warning(
+                    w_tpl.format(
+                        current_cqrs_revision - instance.cqrs_revision - 1,
+                        pk_value,
+                        current_cqrs_revision,
+                        self.model.CQRS_ID,
+                    ),
+                )
+
+        f_kw = {'previous_data': previous_data}
+        if self.model.CQRS_META:
+            f_kw['meta'] = meta
+
+        try:
+            return instance.cqrs_update(sync, mapped_data, **f_kw)
+        except (Error, ValidationError) as e:
+            logger.error(
+                '{0}\nCQRS update error: pk = {1}, cqrs_revision = {2} ({3}).'.format(
+                    str(e),
+                    pk_value,
+                    current_cqrs_revision,
+                    self.model.CQRS_ID,
+                ),
+            )
+
+    def delete_instance(self, master_data: dict) -> bool:
+        """This method deletes model instance from mapped CQRS master instance data.
+
+        Args:
+            master_data (dict): CQRS master instance data.
+
+        Returns:
+            Flag, if delete operation is successful (even if nothing was deleted).
+        """
+        mapped_data = self._map_delete_data(master_data)
+
+        if mapped_data:
+            pk_name = self._get_model_pk_name()
+            pk_value = mapped_data[pk_name]
+            try:
+                self.model._default_manager.filter(**{pk_name: pk_value}).delete()
+                return True
+            except Error as e:
+                logger.error(
+                    '{0}\nCQRS delete error: pk = {1} ({2}).'.format(
+                        str(e),
+                        pk_value,
+                        self.model.CQRS_ID,
+                    ),
+                )
+
+        return False
+
+    def _map_previous_data(self, previous_data):
+        if self.model.CQRS_MAPPING is None:
+            return previous_data
+
+        mapped_previous_data = {}
+
+        for master_name, replica_name in self.model.CQRS_MAPPING.items():
+            if master_name not in previous_data:
+                continue
+
+            mapped_previous_data[replica_name] = previous_data[master_name]
+        mapped_previous_data = self._remove_excessive_data(mapped_previous_data)
+        return mapped_previous_data
+
+    def _map_save_data(self, master_data):
+        if not self._cqrs_fields_are_filled(master_data):
+            return
+
+        mapped_data = self._make_initial_mapping(master_data)
+        if not mapped_data:
+            return
+
+        if self._get_model_pk_name() not in mapped_data:
+            self._log_pk_data_error()
+            return
+
+        if self.model.CQRS_CUSTOM_SERIALIZATION:
+            return mapped_data
+
+        mapped_data = self._remove_excessive_data(mapped_data)
+
+        if self._all_required_fields_are_filled(mapped_data):
+            return mapped_data
+
+    def _make_initial_mapping(self, master_data):
+        if self.model.CQRS_MAPPING is None:
+            return master_data
+
+        mapped_data = {
+            'cqrs_revision': master_data['cqrs_revision'],
+            'cqrs_updated': master_data['cqrs_updated'],
+        }
+        for master_name, replica_name in self.model.CQRS_MAPPING.items():
+            if master_name not in master_data:
+                logger.error(
+                    'Bad master-replica mapping for {0} ({1}).'.format(
+                        master_name,
+                        self.model.CQRS_ID,
+                    ),
+                )
+                return
+
+            mapped_data[replica_name] = master_data[master_name]
+        return mapped_data
+
+    def _remove_excessive_data(self, data):
+        opts = self.model._meta
+        possible_field_names = {f.name for f in opts.fields}
+        return {k: v for k, v in data.items() if k in possible_field_names}
+
+    def _all_required_fields_are_filled(self, mapped_data):
+        opts = self.model._meta
+
+        required_field_names = {f.name for f in opts.fields if not f.null}
+        if not (required_field_names - set(mapped_data.keys())):
+            return True
+
+        logger.error(
+            'Not all required CQRS fields are provided in data ({0}).'.format(self.model.CQRS_ID),
+        )
+        return False
+
+    def _map_delete_data(self, master_data):
+        if 'id' not in master_data:
+            self._log_pk_data_error()
+            return
+
+        if not self._cqrs_fields_are_filled(master_data):
+            return
+
+        return {
+            self._get_model_pk_name(): master_data['id'],
+            'cqrs_revision': master_data['cqrs_revision'],
+            'cqrs_updated': master_data['cqrs_updated'],
+        }
+
+    def _cqrs_fields_are_filled(self, data):
+        if 'cqrs_revision' in data and 'cqrs_updated' in data:
+            return True
+
+        logger.error('CQRS sync fields are not provided in data ({0}).'.format(self.model.CQRS_ID))
+        return False
+
+    def _log_pk_data_error(self):
+        logger.error('CQRS PK is not provided in data ({0}).'.format(self.model.CQRS_ID))
+
+    def _get_model_pk_name(self):
+        return self.model._meta.pk.name

+ 144 - 0
dj_cqrs/metas.py

@@ -0,0 +1,144 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from django.db.models import base
+
+from dj_cqrs.constants import ALL_BASIC_FIELDS
+from dj_cqrs.registries import MasterRegistry, ReplicaRegistry
+from dj_cqrs.signals import MasterSignals
+from dj_cqrs.tracker import CQRSTracker
+
+
+class MasterMeta(base.ModelBase):
+    def __new__(mcs, name, bases, attrs, **kwargs):
+        model_cls = super(MasterMeta, mcs).__new__(mcs, name, bases, attrs, **kwargs)
+
+        if name != 'MasterMixin':
+            mcs.register(model_cls)
+
+        return model_cls
+
+    @staticmethod
+    def register(model_cls):
+        _MetaUtils.check_cqrs_id(model_cls)
+        MasterMeta._check_correct_configuration(model_cls)
+
+        if model_cls.CQRS_TRACKED_FIELDS is not None:
+            MasterMeta._check_cqrs_tracked_fields(model_cls)
+            CQRSTracker.add_to_model(model_cls)
+
+        if model_cls.CQRS_SERIALIZER is None:
+            MasterMeta._check_cqrs_fields(model_cls)
+
+        MasterRegistry.register_model(model_cls)
+        MasterSignals.register_model(model_cls)
+        return model_cls
+
+    @staticmethod
+    def _check_cqrs_tracked_fields(model_cls):
+        """Check that the CQRS_TRACKED_FIELDS has correct configuration.
+
+        :param dj_cqrs.mixins.MasterMixin model_cls: CQRS Master Model.
+        :raises: AssertionError
+        """
+        tracked_fields = model_cls.CQRS_TRACKED_FIELDS
+        if isinstance(tracked_fields, (list, tuple)):
+            _MetaUtils._check_no_duplicate_names(
+                model_cls,
+                tracked_fields,
+                'CQRS_TRACKED_FIELDS',
+            )
+            _MetaUtils._check_unexisting_names(model_cls, tracked_fields, 'CQRS_TRACKED_FIELDS')
+            return
+
+        e = 'Model {0}: Invalid configuration for CQRS_TRACKED_FIELDS'.format(model_cls.__name__)
+        assert isinstance(tracked_fields, str) and tracked_fields == ALL_BASIC_FIELDS, e
+
+    @staticmethod
+    def _check_correct_configuration(model_cls):
+        """Check that model has correct CQRS configuration.
+
+        :param dj_cqrs.mixins.MasterMixin model_cls: CQRS Master Model.
+        :raises: AssertionError
+        """
+        if model_cls.CQRS_FIELDS != ALL_BASIC_FIELDS:
+            e = "Model {0}: CQRS_FIELDS can't be set together with CQRS_SERIALIZER.".format(
+                model_cls.__name__,
+            )
+            assert model_cls.CQRS_SERIALIZER is None, e
+
+    @staticmethod
+    def _check_cqrs_fields(model_cls):
+        """Check that model has correct CQRS fields configuration.
+
+        :param dj_cqrs.mixins.MasterMixin model_cls: CQRS Master Model.
+        :raises: AssertionError
+        """
+        if model_cls.CQRS_FIELDS != ALL_BASIC_FIELDS:
+            cqrs_field_names = list(model_cls.CQRS_FIELDS)
+            _MetaUtils.check_cqrs_field_setting(model_cls, cqrs_field_names, 'CQRS_FIELDS')
+
+
+class ReplicaMeta(base.ModelBase):
+    def __new__(mcs, *args):
+        model_cls = super(ReplicaMeta, mcs).__new__(mcs, *args)
+
+        if args[0] != 'ReplicaMixin':
+            mcs.register(model_cls)
+
+        return model_cls
+
+    @staticmethod
+    def register(model_cls):
+        _MetaUtils.check_cqrs_id(model_cls)
+        ReplicaMeta._check_cqrs_mapping(model_cls)
+        ReplicaRegistry.register_model(model_cls)
+
+    @staticmethod
+    def _check_cqrs_mapping(model_cls):
+        """Check that model has correct CQRS mapping configuration.
+
+        :param dj_cqrs.mixins.ReplicaMixin model_cls: CQRS Replica Model.
+        :raises: AssertionError
+        """
+        cqrs_mapping = getattr(model_cls, 'CQRS_MAPPING', None)
+        if cqrs_mapping is not None:
+            cqrs_field_names = list(cqrs_mapping.values())
+            _MetaUtils.check_cqrs_field_setting(model_cls, cqrs_field_names, 'CQRS_MAPPING')
+
+
+class _MetaUtils:
+    @classmethod
+    def check_cqrs_field_setting(cls, model_cls, cqrs_field_names, cqrs_attr):
+        cls._check_no_duplicate_names(model_cls, cqrs_field_names, cqrs_attr)
+        cls._check_id_in_names(model_cls, cqrs_field_names, cqrs_attr)
+        cls._check_unexisting_names(model_cls, cqrs_field_names, cqrs_attr)
+
+    @staticmethod
+    def check_cqrs_id(model_cls):
+        """Check that CQRS Model has CQRS_ID set up."""
+        assert model_cls.CQRS_ID, 'CQRS_ID must be set for every model, that uses CQRS.'
+
+    @staticmethod
+    def _check_no_duplicate_names(model_cls, cqrs_field_names, cqrs_attr):
+        model_name = model_cls.__name__
+
+        e = 'Duplicate names in {0} field for model {1}.'.format(cqrs_attr, model_name)
+        assert len(set(cqrs_field_names)) == len(cqrs_field_names), e
+
+    @staticmethod
+    def _check_unexisting_names(model_cls, cqrs_field_names, cqrs_attr):
+        opts = model_cls._meta
+        model_name = model_cls.__name__
+
+        model_field_names = {f.name for f in opts.fields}
+        e = '{0} field is not correctly set for model {1}.'.format(cqrs_attr, model_name)
+        assert not set(cqrs_field_names) - model_field_names, e
+
+    @staticmethod
+    def _check_id_in_names(model_cls, cqrs_field_names, cqrs_attr):
+        opts = model_cls._meta
+        model_name = model_cls.__name__
+
+        pk_name = opts.pk.name
+        e = 'PK is not in {0} for model {1}.'.format(cqrs_attr, model_name)
+        assert pk_name in cqrs_field_names, e

+ 522 - 0
dj_cqrs/mixins.py

@@ -0,0 +1,522 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+
+from django.conf import settings
+from django.db import router, transaction
+from django.db.models import (
+    DateField,
+    DateTimeField,
+    F,
+    IntegerField,
+    Manager,
+    Model,
+    UUIDField,
+)
+from django.db.models.expressions import CombinedExpression
+from django.utils.module_loading import import_string
+
+from dj_cqrs.constants import ALL_BASIC_FIELDS, FIELDS_TRACKER_FIELD_NAME, TRACKED_FIELDS_ATTR_NAME
+from dj_cqrs.managers import MasterManager, ReplicaManager
+from dj_cqrs.metas import MasterMeta, ReplicaMeta
+from dj_cqrs.signals import MasterSignals, post_bulk_create, post_update
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+class RawMasterMixin(Model):
+
+    """Base class for MasterMixin. **Users shouldn't use this
+    class directly.**"""
+
+    CQRS_ID = None
+    """Unique CQRS identifier for all microservices."""
+
+    CQRS_PRODUCE = True
+    """If false, no cqrs data is sent through the transport."""
+
+    CQRS_FIELDS = ALL_BASIC_FIELDS
+    """
+    List of fields to include in the CQRS payload.
+    You can also set the fields attribute to the special value '__all__'
+    to indicate that all fields in the model should be used.
+    """
+
+    CQRS_SERIALIZER = None
+    """
+    Optional serializer used to create the instance representation.
+    Must be expressed as a module dotted path string like
+    `mymodule.serializers.MasterModelSerializer`.
+    """
+
+    CQRS_TRACKED_FIELDS = None
+    """
+    List of fields of the main model for which you want to track the changes
+    and send the previous values via transport. You can also set the field
+    attribute to the special value "__all__" to indicate that all fields in
+    the model must be used.
+    """
+
+    objects = Manager()
+
+    cqrs = MasterManager()
+    """Manager that adds needed CQRS queryset methods."""
+
+    cqrs_revision = IntegerField(
+        default=0,
+        help_text='This field must be incremented on any model update. '
+        "It's used to for CQRS sync.",
+    )
+    cqrs_updated = DateTimeField(
+        auto_now=True,
+        help_text='This field must be incremented on every model update. '
+        "It's used to for CQRS sync.",
+    )
+
+    class Meta:
+        abstract = True
+
+    @property
+    def cqrs_saves_count(self):
+        """Shows how many times this instance has been saved within the transaction."""
+        return getattr(self, '_cqrs_saves_count', 0)
+
+    @property
+    def is_initial_cqrs_save(self):
+        """This flag is used to check if instance has already been registered for CQRS update."""
+        return self.cqrs_saves_count < 2
+
+    def reset_cqrs_saves_count(self):
+        """This method is used to automatically reset instance CQRS counters on transaction commit.
+        But this can also be used to control custom behaviour within transaction
+        or in case of rollback,
+        when several sequential transactions are used to change the same instance.
+        """
+        if hasattr(self, '_cqrs_saves_count'):
+            self._cqrs_saves_count = 0
+
+    def save(self, *args, **kwargs):
+        update_fields = kwargs.pop('update_fields', None)
+        update_cqrs_fields = kwargs.pop('update_cqrs_fields', self._update_cqrs_fields_default)
+
+        using = kwargs.get('using') or router.db_for_write(self.__class__, instance=self)
+        connection = transaction.get_connection(using)
+        if connection.in_atomic_block:
+            _cqrs_saves_count = self.cqrs_saves_count
+            self._cqrs_saves_count = _cqrs_saves_count + 1
+        else:
+            self.reset_cqrs_saves_count()
+
+        if (not update_fields) and self.is_initial_cqrs_save and (not self._state.adding):
+            self.cqrs_revision = F('cqrs_revision') + 1
+        elif update_fields and update_cqrs_fields:
+            self.cqrs_revision = F('cqrs_revision') + 1
+            update_fields = set(update_fields)
+            update_fields.update({'cqrs_revision', 'cqrs_updated'})
+
+        kwargs['update_fields'] = update_fields
+
+        self.save_tracked_fields()
+
+        return super(RawMasterMixin, self).save(*args, **kwargs)
+
+    def save_tracked_fields(self):
+        if hasattr(self, FIELDS_TRACKER_FIELD_NAME):
+            tracker = getattr(self, FIELDS_TRACKER_FIELD_NAME)
+            if self.is_initial_cqrs_save:
+                if self._state.adding:
+                    data = tracker.changed_initial()
+                else:
+                    data = tracker.changed()
+                setattr(self, TRACKED_FIELDS_ATTR_NAME, data)
+
+    @property
+    def _update_cqrs_fields_default(self):
+        return settings.CQRS['master']['CQRS_AUTO_UPDATE_FIELDS']
+
+    def to_cqrs_dict(self, using: str = None, sync: bool = False) -> dict:
+        """CQRS serialization for transport payload.
+
+        Args:
+            using (str): The using argument can be used to force the database to use,
+                defaults to None.
+            sync (bool): optional
+
+        Returns:
+            (dict): The serialized instance data.
+        """
+        if self.CQRS_SERIALIZER:
+            data = self._class_serialization(using, sync=sync)
+        else:
+            self._refresh_f_expr_values(using)
+            data = self._common_serialization(using)
+        return data
+
+    def get_tracked_fields_data(self) -> dict:
+        """CQRS serialization for tracked fields to include
+        in the transport payload.
+
+        Returns:
+            (dict): Previous values for tracked fields.
+        """
+        return getattr(self, TRACKED_FIELDS_ATTR_NAME, None)
+
+    def cqrs_sync(self, using: str = None, queue: str = None) -> bool:
+        """Manual instance synchronization.
+
+        Args:
+            using (str): The using argument can be used to force the database
+                to use, defaults to None.
+            queue (str): Syncing can be executed just for a single queue, defaults to None
+                 (all queues).
+
+        Returns:
+            (bool): True if instance can be synced, False otherwise.
+        """
+        if self._state.adding:
+            return False
+
+        if not self.CQRS_SERIALIZER:
+            try:
+                self.refresh_from_db()
+            except self._meta.model.DoesNotExist:
+                return False
+
+        MasterSignals.post_save(
+            self._meta.model,
+            instance=self,
+            using=using,
+            queue=queue,
+            sync=True,
+        )
+        return True
+
+    def is_sync_instance(self) -> bool:
+        """
+        This method can be overridden to apply syncing only to instances by some rules.
+        For example, only objects with special status or after some creation date, etc.
+
+        Returns:
+            (bool): True if this instance needs to be synced, False otherwise.
+        """
+        return True
+
+    def get_cqrs_meta(self, **kwargs: dict) -> dict:
+        """
+        This method can be overridden to collect model/instance specific metadata.
+
+        Args:
+            kwargs (dict): Signal type, payload data, etc.
+
+        Returns:
+            (dict): Metadata dictionary if it's provided.
+        """
+        generic_meta_func = settings.CQRS['master']['meta_function']
+        if generic_meta_func:
+            return generic_meta_func(obj=self, **kwargs)
+
+        return {}
+
+    @classmethod
+    def relate_cqrs_serialization(cls, queryset):
+        """
+        This method shoud be overriden to optimize database access
+        for example using `select_related` and `prefetch_related`
+        when related models must be included into the master model
+        representation.
+
+        Args:
+            queryset (django.db.models.QuerySet): The initial queryset.
+
+        Returns:
+            (django.db.models.QuerySet): The optimized queryset.
+
+        """
+        return queryset
+
+    def get_custom_cqrs_delete_data(self):
+        """This method should be overridden when additional data is needed in DELETE payload."""
+        pass
+
+    @classmethod
+    def call_post_bulk_create(cls, instances: list, using=None):
+        """Post bulk create signal caller (django doesn't support it by default).
+
+        ``` py3
+
+            # Used automatically by cqrs.bulk_create()
+            instances = model.cqrs.bulk_create(instances)
+        ```
+        """
+        post_bulk_create.send(cls, instances=instances, using=using)
+
+    @classmethod
+    def call_post_update(cls, instances, using=None):
+        """Post bulk update signal caller (django doesn't support it by default).
+
+        ``` py3
+
+            # Used automatically by cqrs.bulk_update()
+            qs = model.objects.filter(k1=v1)
+            model.cqrs.bulk_update(qs, k2=v2)
+        ```
+        """
+        post_update.send(cls, instances=instances, using=using)
+
+    def _common_serialization(self, using):
+        opts = self._meta
+
+        if isinstance(self.CQRS_FIELDS, str) and self.CQRS_FIELDS == ALL_BASIC_FIELDS:
+            included_fields = None
+        else:
+            included_fields = self.CQRS_FIELDS
+
+        data = {}
+        for f in opts.fields:
+            if included_fields and (f.name not in included_fields):
+                continue
+
+            value = f.value_from_object(self)
+            if value is not None and isinstance(f, (DateField, DateTimeField, UUIDField)):
+                value = str(value)
+
+            data[f.name] = value
+
+        # We need to include additional fields for synchronisation, f.e. to prevent de-duplication
+        data['cqrs_revision'] = self.cqrs_revision
+        data['cqrs_updated'] = str(self.cqrs_updated)
+
+        return data
+
+    def _class_serialization(self, using, sync=False):
+        if sync:
+            instance = self
+        else:
+            db = using if using is not None else self._state.db
+            qs = self.__class__._default_manager.using(db)
+            instance = self.relate_cqrs_serialization(qs).get(pk=self.pk)
+
+        data = self._cqrs_serializer_cls(instance).data
+        data['cqrs_revision'] = instance.cqrs_revision
+        data['cqrs_updated'] = str(instance.cqrs_updated)
+
+        return data
+
+    def _refresh_f_expr_values(self, using):
+        opts = self._meta
+        fields_to_refresh = []
+        if isinstance(self.cqrs_revision, CombinedExpression):
+            fields_to_refresh.append('cqrs_revision')
+
+        if isinstance(self.CQRS_FIELDS, str) and self.CQRS_FIELDS == ALL_BASIC_FIELDS:
+            included_fields = None
+        else:
+            included_fields = self.CQRS_FIELDS
+
+        for f in opts.fields:
+            if included_fields and (f.name not in included_fields):
+                continue
+
+            value = f.value_from_object(self)
+
+            if value is not None and isinstance(value, CombinedExpression):
+                fields_to_refresh.append(f.name)
+
+        if fields_to_refresh:
+            self.refresh_from_db(fields=fields_to_refresh)
+
+    @property
+    def _cqrs_serializer_cls(self):
+        """Serialization class loader."""
+        if hasattr(self.__class__, '_cqrs_serializer_class'):
+            return self.__class__._cqrs_serializer_class
+
+        try:
+            serializer = import_string(self.CQRS_SERIALIZER)
+            self.__class__._cqrs_serializer_class = serializer
+            return serializer
+        except ImportError:
+            raise ImportError(
+                "Model {0}: CQRS_SERIALIZER can't be imported.".format(self.__class__),
+            )
+
+
+class MasterMixin(RawMasterMixin, metaclass=MasterMeta):
+    """
+    Mixin for the master CQRS model, that will send data updates to it's replicas.
+    """
+
+    class Meta:
+        abstract = True
+
+
+class RawReplicaMixin:
+    CQRS_ID = None
+    CQRS_NO_DB_OPERATIONS = True
+    CQRS_META = False
+    CQRS_ONLY_DIRECT_SYNCS = False
+
+    @classmethod
+    def cqrs_save(cls, master_data, **kwargs):
+        raise NotImplementedError
+
+    @classmethod
+    def cqrs_delete(cls, master_data, **kwargs):
+        raise NotImplementedError
+
+    @staticmethod
+    def should_retry_cqrs(current_retry: int, exception=None) -> bool:
+        """Checks if we should retry the message after current attempt.
+
+        Args:
+            current_retry (int): Current number of message retries.
+            exception (Exception): Exception instance raised during message consume.
+
+        Returns:
+            (bool): True if message should be retried, False otherwise.
+        """
+        max_retries = settings.CQRS['replica']['CQRS_MAX_RETRIES']
+        if max_retries is None:
+            # Infinite
+            return True
+
+        return current_retry < max_retries
+
+    @staticmethod
+    def get_cqrs_retry_delay(current_retry: int) -> int:
+        """Returns number of seconds to wait before requeuing the message.
+
+        Args:
+            current_retry (int): Current number of message retries.
+
+        Returns:
+            (int): Delay in seconds.
+        """
+        return settings.CQRS['replica']['CQRS_RETRY_DELAY']
+
+
+class ReplicaMixin(RawReplicaMixin, Model, metaclass=ReplicaMeta):
+    """
+    Mixin for the replica CQRS model, that will receive data updates from master. Models, using
+    this mixin should be readonly, but this is not enforced (f.e. for admin).
+    """
+
+    CQRS_ID = None
+    """Unique CQRS identifier for all microservices."""
+
+    CQRS_MAPPING = None
+    """Mapping of master data field name to replica model field name."""
+
+    CQRS_CUSTOM_SERIALIZATION = False
+    """Set it to True to skip default data check."""
+
+    CQRS_SELECT_FOR_UPDATE = False
+    """Set it to True to acquire lock on instance creation/update."""
+
+    CQRS_NO_DB_OPERATIONS = False
+    """Set it to True to disable any default DB operations for this model."""
+
+    CQRS_META = False
+    """Set it to True to receive meta data for this model."""
+
+    CQRS_ONLY_DIRECT_SYNCS = False
+    """Set it to True to ignore broadcast sync packages and to receive only direct queue syncs."""
+
+    objects = Manager()
+    cqrs = ReplicaManager()
+    """Manager that adds needed CQRS queryset methods."""
+
+    cqrs_revision = IntegerField()
+    cqrs_updated = DateTimeField()
+
+    class Meta:
+        abstract = True
+
+    @classmethod
+    def cqrs_save(
+        cls,
+        master_data: dict,
+        previous_data: dict = None,
+        sync: bool = False,
+        meta: dict = None,
+    ):
+        """This method saves (creates or updates) model instance from CQRS master instance data.
+        This method must not be overridden. Otherwise, sync checks need to be implemented manually.
+
+        Args:
+            master_data (dict): CQRS master instance data.
+            previous_data (dict): Previous values for tracked fields.
+            sync (bool): Sync package flag.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (django.db.models.Model): Model instance.
+        """
+        if cls.CQRS_NO_DB_OPERATIONS:
+            return super().cqrs_save(master_data, previous_data=previous_data, sync=sync, meta=meta)
+
+        return cls.cqrs.save_instance(master_data, previous_data, sync, meta)
+
+    @classmethod
+    def cqrs_create(
+        cls,
+        sync: bool,
+        mapped_data: dict,
+        previous_data: dict = None,
+        meta: dict = None,
+    ):
+        """This method creates model instance from CQRS mapped instance data. It must be overridden
+        by replicas of master models with custom serialization.
+
+        Args:
+            sync (dict): Sync package flag.
+            mapped_data (dict): CQRS mapped instance data.
+            previous_data (dict): Previous mapped values for tracked fields.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (django.db.models.Model): Model instance.
+        """
+        return cls._default_manager.create(**mapped_data)
+
+    def cqrs_update(
+        self,
+        sync: bool,
+        mapped_data: dict,
+        previous_data: dict = None,
+        meta: dict = None,
+    ):
+        """This method updates model instance from CQRS mapped instance data. It must be overridden
+        by replicas of master models with custom serialization.
+
+        Args:
+            sync (dict): Sync package flag.
+            mapped_data (dict): CQRS mapped instance data.
+            previous_data (dict): Previous mapped values for tracked fields.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (django.db.models.Model): Model instance.
+        """
+
+        for key, value in mapped_data.items():
+            setattr(self, key, value)
+        self.save()
+        return self
+
+    @classmethod
+    def cqrs_delete(cls, master_data: dict, meta: dict = None) -> bool:
+        """This method deletes model instance from mapped CQRS master instance data.
+
+        Args:
+            master_data (dict): CQRS master instance data.
+            meta (dict): Payload metadata, if exists.
+
+        Returns:
+            (bool): Flag, if delete operation is successful (even if nothing was deleted).
+        """
+        if cls.CQRS_NO_DB_OPERATIONS:
+            return super().cqrs_delete(master_data, meta=meta)
+
+        return cls.cqrs.delete_instance(master_data)

+ 51 - 0
dj_cqrs/registries.py

@@ -0,0 +1,51 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+
+from django.conf import settings
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+class RegistryMixin:
+    @classmethod
+    def register_model(cls, model_cls):
+        """Registration of CQRS model identifiers."""
+
+        e = "Two models can't have the same CQRS_ID: {0}.".format(model_cls.CQRS_ID)
+        assert model_cls.CQRS_ID not in cls.models, e
+
+        cls.models[model_cls.CQRS_ID] = model_cls
+
+    @classmethod
+    def get_model_by_cqrs_id(cls, cqrs_id):
+        """
+        Returns the model class given its CQRS_ID.
+
+        Args:
+            cqrs_id (str): The CQRS_ID of the model to be retrieved.
+
+        Returns:
+            (django.db.models.Model): The model that correspond to the given CQRS_ID or None if it
+                 has not been registered.
+        """
+        if cqrs_id in cls.models:
+            return cls.models[cqrs_id]
+
+        logger.error('No model with such CQRS_ID: {0}.'.format(cqrs_id))
+
+
+class MasterRegistry(RegistryMixin):
+    models = {}
+
+
+class ReplicaRegistry(RegistryMixin):
+    models = {}
+
+    @classmethod
+    def register_model(cls, model_cls):
+        e = 'CQRS queue must be set for the service, that has replica models.'
+        assert getattr(settings, 'CQRS', {}).get('queue') is not None, e
+
+        super(ReplicaRegistry, cls).register_model(model_cls)

+ 169 - 0
dj_cqrs/signals.py

@@ -0,0 +1,169 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+
+from django.db import models, transaction
+from django.dispatch import Signal
+from django.utils.timezone import now
+
+from dj_cqrs.constants import SignalType
+from dj_cqrs.controller import producer
+from dj_cqrs.dataclasses import TransportPayload
+from dj_cqrs.utils import get_message_expiration_dt
+
+
+logger = logging.getLogger('django-cqrs')
+
+post_bulk_create = Signal()
+"""
+Signal sent after a bulk create.
+See dj_cqrs.mixins.RawMasterMixin.call_post_bulk_create.
+"""
+
+post_update = Signal()
+"""
+Signal sent after a bulk update.
+See dj_cqrs.mixins.RawMasterMixin.call_post_update.
+"""
+
+
+class MasterSignals:
+    """Signals registry and handlers for CQRS master models."""
+
+    @classmethod
+    def register_model(cls, model_cls):
+        """
+        Registers signals for a model.
+
+        Args:
+            model_cls (dj_cqrs.mixins.MasterMixin): Model class inherited from CQRS MasterMixin.
+        """
+
+        models.signals.post_save.connect(cls.post_save, sender=model_cls)
+        models.signals.post_delete.connect(cls.post_delete, sender=model_cls)
+
+        post_bulk_create.connect(cls.post_bulk_create, sender=model_cls)
+        post_update.connect(cls.post_bulk_update, sender=model_cls)
+
+    @classmethod
+    def post_save(cls, sender, **kwargs):
+        """
+        Args:
+            sender (dj_cqrs.mixins.MasterMixin): Class or instance inherited from CQRS MasterMixin.
+        """
+        if not sender.CQRS_PRODUCE:
+            return
+
+        update_fields = kwargs.get('update_fields')
+        if update_fields and ('cqrs_revision' not in update_fields):
+            return
+
+        instance = kwargs['instance']
+        if not instance.is_sync_instance():
+            return
+
+        using = kwargs['using']
+
+        sync = kwargs.get('sync', False)
+        queue = kwargs.get('queue', None)
+
+        connection = transaction.get_connection(using)
+        if not connection.in_atomic_block or instance.is_initial_cqrs_save:
+            transaction.on_commit(
+                lambda: cls._post_save_produce(sender, instance, using, sync, queue),
+            )
+
+    @classmethod
+    def _post_save_produce(cls, sender, instance, using, sync, queue):
+        # As this method may run 'on_commit', the instance may not exist. In that case, log the
+        # error but don't raise an exception.
+        try:
+            instance.reset_cqrs_saves_count()
+            instance_data = instance.to_cqrs_dict(using, sync=sync)
+            previous_data = instance.get_tracked_fields_data()
+            signal_type = SignalType.SYNC if sync else SignalType.SAVE
+            meta = instance.get_cqrs_meta(
+                instance_data=instance_data,
+                previous_data=previous_data,
+                signal_type=signal_type,
+            )
+        except sender.DoesNotExist:
+            logger.error(
+                f"Can't produce message from master model '{sender.__name__}': "
+                f"The instance doesn't exist (pk={instance.pk})",
+            )
+            return
+
+        payload = TransportPayload(
+            signal_type,
+            sender.CQRS_ID,
+            instance_data,
+            instance.pk,
+            queue,
+            previous_data,
+            expires=get_message_expiration_dt(),
+            meta=meta,
+        )
+        producer.produce(payload)
+
+    @classmethod
+    def post_delete(cls, sender, **kwargs):
+        """
+        Args:
+            sender (dj_cqrs.mixins.MasterMixin): Class or instance inherited from CQRS MasterMixin.
+        """
+        if not sender.CQRS_PRODUCE:
+            return
+
+        instance = kwargs['instance']
+        if not instance.is_sync_instance():
+            return
+
+        instance_data = {
+            'id': instance.pk,
+            'cqrs_revision': instance.cqrs_revision + 1,
+            'cqrs_updated': str(now()),
+        }
+
+        data = instance.get_custom_cqrs_delete_data()
+        if data:
+            instance_data['custom'] = data
+
+        signal_type = SignalType.DELETE
+
+        meta = instance.get_cqrs_meta(
+            instance_data=instance_data,
+            signal_type=signal_type,
+        )
+
+        payload = TransportPayload(
+            signal_type,
+            sender.CQRS_ID,
+            instance_data,
+            instance.pk,
+            expires=get_message_expiration_dt(),
+            meta=meta,
+        )
+        # Delete is always in transaction!
+        transaction.on_commit(lambda: producer.produce(payload))
+
+    @classmethod
+    def post_bulk_create(cls, sender, **kwargs):
+        """
+        Args:
+            sender (dj_cqrs.mixins.MasterMixin): Class or instance inherited from CQRS MasterMixin.
+        """
+        cls._post_bulk(sender, **kwargs)
+
+    @classmethod
+    def post_bulk_update(cls, sender, **kwargs):
+        """
+        Args:
+            sender (dj_cqrs.mixins.MasterMixin): Class or instance inherited from CQRS MasterMixin.
+        """
+        cls._post_bulk(sender, **kwargs)
+
+    @classmethod
+    def _post_bulk(cls, sender, **kwargs):
+        for instance in kwargs['instances']:
+            cls.post_save(sender, instance=instance, using=kwargs['using'])

+ 53 - 0
dj_cqrs/tracker.py

@@ -0,0 +1,53 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from model_utils import FieldTracker
+from model_utils.tracker import FieldInstanceTracker
+
+from dj_cqrs.constants import ALL_BASIC_FIELDS, FIELDS_TRACKER_FIELD_NAME
+from dj_cqrs.utils import get_json_valid_value
+
+
+class _CQRSTrackerInstance(FieldInstanceTracker):
+    def __init__(self, instance, fields, field_map):
+        super().__init__(instance, fields, field_map)
+        self._attr_to_field_map = {
+            f.attname: f.name for f in instance._meta.concrete_fields if f.is_relation
+        }
+
+    def changed(self):
+        changed_fields = super().changed()
+        return {self._attr_to_field_map.get(k, k): v for k, v in changed_fields.items()}
+
+    def changed_initial(self):
+        return {field: None for field in self.fields if self.get_field_value(field) is not None}
+
+    def get_field_value(self, field):
+        value = super().get_field_value(field)
+
+        return get_json_valid_value(value)
+
+
+class CQRSTracker(FieldTracker):
+    tracker_class = _CQRSTrackerInstance
+
+    @classmethod
+    def add_to_model(cls, model_cls):
+        """
+        Add the CQRSTracker to a model.
+
+        :param model_cls: the model class to which add the CQRSTracker.
+        :type model_cls: django.db.models.Model
+        """
+        opts = model_cls._meta
+        fields_to_track = []
+        declared = model_cls.CQRS_TRACKED_FIELDS
+
+        for field in opts.concrete_fields:
+            if declared == ALL_BASIC_FIELDS or field.name in declared:
+                fields_to_track.append(
+                    field.attname if field.is_relation else field.name,
+                )
+
+        tracker = cls(fields=fields_to_track)
+        model_cls.add_to_class(FIELDS_TRACKER_FIELD_NAME, tracker)
+        tracker.finalize_class(model_cls)

+ 17 - 0
dj_cqrs/transport/__init__.py

@@ -0,0 +1,17 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from django.conf import settings
+from django.utils.module_loading import import_string
+
+from dj_cqrs.transport.base import BaseTransport
+from dj_cqrs.transport.kombu import KombuTransport
+from dj_cqrs.transport.rabbit_mq import RabbitMQTransport
+
+
+try:
+    current_transport = import_string(settings.CQRS['transport'])
+except (AttributeError, ImportError, KeyError):
+    current_transport = None
+
+
+__all__ = ['BaseTransport', 'KombuTransport', 'RabbitMQTransport', current_transport]

+ 38 - 0
dj_cqrs/transport/base.py

@@ -0,0 +1,38 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+
+class BaseTransport:
+    """
+    CQRS pattern can be implemented over any transport (AMQP, HTTP, etc.)
+    All transports need to inherit from this base class.
+    Transport must be set in Django settings:
+
+    ``` py3
+
+        CQRS = {
+            'transport': 'dj_cqrs.transport.rabbit_mq.RabbitMQTransport',
+        }
+    ```
+    """
+
+    consumers = {}
+
+    @staticmethod
+    def produce(payload):
+        """
+        Send data from master model to replicas.
+
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        raise NotImplementedError
+
+    @staticmethod
+    def consume(*args, **kwargs):
+        """Receive data from master model."""
+        raise NotImplementedError
+
+    @staticmethod
+    def clean_connection(*args, **kwargs):
+        """Clean transport connection. Here you can close all connections that you have"""
+        raise NotImplementedError

+ 225 - 0
dj_cqrs/transport/kombu.py

@@ -0,0 +1,225 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+
+import ujson
+from django.conf import settings
+from kombu import (
+    Connection,
+    Exchange,
+    Producer,
+    Queue,
+)
+from kombu.exceptions import KombuError
+from kombu.mixins import ConsumerMixin
+
+from dj_cqrs.constants import SignalType
+from dj_cqrs.controller import consumer
+from dj_cqrs.dataclasses import TransportPayload
+from dj_cqrs.registries import ReplicaRegistry
+from dj_cqrs.transport import BaseTransport
+from dj_cqrs.transport.mixins import LoggingMixin
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+class _KombuConsumer(ConsumerMixin):
+    def __init__(self, url, exchange_name, queue_name, prefetch_count, callback, cqrs_ids=None):
+        self.connection = Connection(url)
+        self.exchange = Exchange(
+            exchange_name,
+            type='topic',
+            durable=True,
+        )
+        self.queue_name = queue_name
+        self.prefetch_count = prefetch_count
+        self.callback = callback
+        self.queues = []
+        self.cqrs_ids = cqrs_ids
+
+        self._init_queues()
+
+    def _init_queues(self):
+        channel = self.connection.channel()
+        for cqrs_id in ReplicaRegistry.models.keys():
+            if (not self.cqrs_ids) or (cqrs_id in self.cqrs_ids):
+                q = Queue(
+                    self.queue_name,
+                    exchange=self.exchange,
+                    routing_key=cqrs_id,
+                )
+                q.maybe_bind(channel)
+                q.declare()
+                self.queues.append(q)
+
+                sync_q = Queue(
+                    self.queue_name,
+                    exchange=self.exchange,
+                    routing_key='cqrs.{0}.{1}'.format(self.queue_name, cqrs_id),
+                )
+                sync_q.maybe_bind(channel)
+                sync_q.declare()
+                self.queues.append(sync_q)
+
+    def get_consumers(self, Consumer, channel):
+        return [
+            Consumer(
+                queues=self.queues,
+                callbacks=[self.callback],
+                prefetch_count=self.prefetch_count,
+                auto_declare=True,
+            ),
+        ]
+
+
+class KombuTransport(LoggingMixin, BaseTransport):
+    """Transport class for Kombu."""
+
+    CONSUMER_RETRY_TIMEOUT = 5
+
+    @classmethod
+    def clean_connection(cls):
+        """Nothing to do here"""
+        pass
+
+    @classmethod
+    def consume(cls, cqrs_ids=None):
+        """Receive data from master model.
+
+        Args:
+            cqrs_ids (str): cqrs ids.
+        """
+        queue_name, prefetch_count = cls._get_consumer_settings()
+        url, exchange_name = cls._get_common_settings()
+
+        consumer = _KombuConsumer(
+            url,
+            exchange_name,
+            queue_name,
+            prefetch_count,
+            cls._consume_message,
+            cqrs_ids=cqrs_ids,
+        )
+        consumer.run()
+
+    @classmethod
+    def produce(cls, payload):
+        """
+        Send data from master model to replicas.
+
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        url, exchange_name = cls._get_common_settings()
+
+        connection = None
+        try:
+            # Decided not to create context-manager to stay within the class
+            connection, channel = cls._get_producer_kombu_objects(url, exchange_name)
+            exchange = cls._create_exchange(exchange_name)
+            cls._produce_message(channel, exchange, payload)
+            cls.log_produced(payload)
+        except KombuError:
+            logger.error(
+                "CQRS couldn't be published: pk = {0} ({1}).".format(
+                    payload.pk,
+                    payload.cqrs_id,
+                ),
+            )
+        finally:
+            if connection:
+                connection.close()
+
+    @classmethod
+    def _consume_message(cls, body, message):
+        try:
+            dct = ujson.loads(body)
+        except ValueError:
+            logger.error("CQRS couldn't be parsed: {0}.".format(body))
+            message.reject()
+            return
+
+        required_keys = {'instance_pk', 'signal_type', 'cqrs_id', 'instance_data'}
+        for key in required_keys:
+            if key not in dct:
+                msg = "CQRS couldn't proceed, %s isn't found in body: %s."
+                logger.error(msg, key, body)
+                message.reject()
+                return
+
+        payload = TransportPayload(
+            dct['signal_type'],
+            dct['cqrs_id'],
+            dct['instance_data'],
+            dct.get('instance_pk'),
+            previous_data=dct.get('previous_data'),
+            correlation_id=dct.get('correlation_id'),
+        )
+
+        cls.log_consumed(payload)
+        instance = consumer.consume(payload)
+
+        if instance:
+            message.ack()
+            cls.log_consumed_accepted(payload)
+        else:
+            message.reject()
+            cls.log_consumed_denied(payload)
+
+    @classmethod
+    def _produce_message(cls, channel, exchange, payload):
+        routing_key = cls._get_produced_message_routing_key(payload)
+        producer = Producer(
+            channel,
+            exchange=exchange,
+            auto_declare=True,
+        )
+        producer.publish(
+            ujson.dumps(payload.to_dict()),
+            routing_key=routing_key,
+            mandatory=True,
+            content_type='text/plain',
+            delivery_mode=2,
+        )
+
+    @staticmethod
+    def _get_produced_message_routing_key(payload):
+        routing_key = payload.cqrs_id
+
+        if payload.signal_type == SignalType.SYNC and payload.queue:
+            routing_key = 'cqrs.{0}.{1}'.format(payload.queue, routing_key)
+
+        return routing_key
+
+    @classmethod
+    def _get_producer_kombu_objects(cls, url, exchange_name):
+        connection = Connection(url)
+        channel = connection.channel()
+        return connection, channel
+
+    @staticmethod
+    def _create_exchange(exchange_name):
+        return Exchange(
+            exchange_name,
+            type='topic',
+            durable=True,
+        )
+
+    @staticmethod
+    def _get_common_settings():
+        url = settings.CQRS.get('url', 'amqp://localhost')
+        exchange = settings.CQRS.get('exchange', 'cqrs')
+        return (
+            url,
+            exchange,
+        )
+
+    @staticmethod
+    def _get_consumer_settings():
+        queue_name = settings.CQRS['queue']
+        consumer_prefetch_count = settings.CQRS.get('consumer_prefetch_count', 10)
+        return (
+            queue_name,
+            consumer_prefetch_count,
+        )

+ 97 - 0
dj_cqrs/transport/mixins.py

@@ -0,0 +1,97 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+class LoggingMixin:
+    _BASE_PAYLOAD_LOG_TEMPLATE = 'CQRS is %s: pk = %s (%s), correlation_id = %s.'
+
+    @staticmethod
+    def log_consumed(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = 'CQRS is received: pk = %s (%s), correlation_id = %s.'
+        logger.info(msg, payload.pk, payload.cqrs_id, payload.correlation_id)
+
+    @staticmethod
+    def log_consumed_accepted(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = 'CQRS is applied: pk = %s (%s), correlation_id = %s.'
+        logger.info(msg, payload.pk, payload.cqrs_id, payload.correlation_id)
+
+    @staticmethod
+    def log_consumed_denied(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = 'CQRS is denied: pk = %s (%s), correlation_id = %s.'
+        logger.warning(msg, payload.pk, payload.cqrs_id, payload.correlation_id)
+
+    @staticmethod
+    def log_consumed_failed(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = ('CQRS is failed: pk = %s (%s), correlation_id = %s, retries = %s.',)
+        logger.warning(
+            msg,
+            payload.pk,
+            payload.cqrs_id,
+            payload.correlation_id,
+            payload.retries,
+        )
+
+    @staticmethod
+    def log_dead_letter(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = 'CQRS is added to dead letter queue: pk = %s (%s), correlation_id = %s.'
+        logger.warning(msg, payload.pk, payload.cqrs_id, payload.correlation_id)
+
+    @staticmethod
+    def log_delayed(payload, delay, eta):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+            delay (int): Seconds to wait before requeuing message.
+            eta (datetime): Requeuing datetime.
+        """
+        msg = ('CQRS is delayed: pk = %s (%s), correlation_id = %s, delay = %s sec, eta = %s.',)
+        logger.warning(
+            msg,
+            payload.pk,
+            payload.cqrs_id,
+            payload.correlation_id,
+            delay,
+            eta,
+        )
+
+    @staticmethod
+    def log_requeued(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = ('CQRS is requeued: pk = %s (%s), correlation_id = %s.',)
+        logger.warning(msg, payload.pk, payload.cqrs_id, payload.correlation_id)
+
+    @staticmethod
+    def log_produced(payload):
+        """
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        msg = 'CQRS is published: pk = %s (%s), correlation_id = %s.'
+        logger.info(msg, payload.pk, payload.cqrs_id, payload.correlation_id)

+ 13 - 0
dj_cqrs/transport/mock.py

@@ -0,0 +1,13 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from dj_cqrs.transport import BaseTransport
+
+
+class TransportMock(BaseTransport):
+    @staticmethod
+    def produce(payload):
+        return TransportMock.consume(payload)
+
+    @staticmethod
+    def consume(payload=None, **kwargs):
+        return payload

+ 439 - 0
dj_cqrs/transport/rabbit_mq.py

@@ -0,0 +1,439 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+import time
+from datetime import timedelta
+from socket import gaierror
+from urllib.parse import unquote, urlparse
+
+import ujson
+from django.conf import settings
+from django.utils import timezone
+from pika import (
+    BasicProperties,
+    BlockingConnection,
+    ConnectionParameters,
+    credentials,
+    exceptions,
+)
+from pika.adapters.utils.connection_workflow import AMQPConnectorException
+
+from dj_cqrs.constants import DEFAULT_DEAD_MESSAGE_TTL, SignalType
+from dj_cqrs.controller import consumer
+from dj_cqrs.dataclasses import TransportPayload
+from dj_cqrs.delay import DelayMessage, DelayQueue
+from dj_cqrs.registries import ReplicaRegistry
+from dj_cqrs.transport import BaseTransport
+from dj_cqrs.transport.mixins import LoggingMixin
+from dj_cqrs.utils import get_delay_queue_max_size, get_messages_prefetch_count_per_worker
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+class RabbitMQTransport(LoggingMixin, BaseTransport):
+    """Transport class for RabbitMQ."""
+
+    CONSUMER_RETRY_TIMEOUT = 5
+    PRODUCER_RETRIES = 1
+
+    _producer_connection = None
+    _producer_channel = None
+
+    @classmethod
+    def clean_connection(cls):
+        """Clean the RabbitMQ connection."""
+        connection = cls._producer_connection
+        if connection and not connection.is_closed:
+            try:
+                connection.close()
+            except (exceptions.StreamLostError, exceptions.ConnectionClosed, ConnectionError):
+                logger.warning('Connection was closed or is closing. Skip it...')
+
+        cls._producer_connection = None
+        cls._producer_channel = None
+
+    @classmethod
+    def consume(cls, cqrs_ids=None):
+        """Receive data from master model.
+
+        Args:
+            cqrs_ids (str): cqrs ids.
+        """
+        consumer_rabbit_settings = cls._get_consumer_settings()
+        common_rabbit_settings = cls._get_common_settings()
+
+        while True:
+            connection = None
+            try:
+                delay_queue = DelayQueue(max_size=get_delay_queue_max_size())
+                connection, channel, consumer_generator = cls._get_consumer_rmq_objects(
+                    *(common_rabbit_settings + consumer_rabbit_settings),
+                    cqrs_ids=cqrs_ids,
+                )
+
+                for method_frame, properties, body in consumer_generator:
+                    if method_frame is not None:
+                        cls._consume_message(
+                            channel,
+                            method_frame,
+                            properties,
+                            body,
+                            delay_queue,
+                        )
+                    cls._process_delay_messages(channel, delay_queue)
+            except (
+                exceptions.AMQPError,
+                exceptions.ChannelError,
+                exceptions.ReentrancyError,
+                gaierror,
+            ):
+                logger.warning('AMQP connection error. Reconnecting...', exc_info=True)
+                time.sleep(cls.CONSUMER_RETRY_TIMEOUT)
+            finally:
+                if connection and not connection.is_closed:
+                    connection.close()
+
+    @classmethod
+    def produce(cls, payload):
+        """
+        Send data from master model to replicas.
+
+        Args:
+            payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model.
+        """
+        cls._produce_with_retries(payload, retries=cls.PRODUCER_RETRIES)
+
+    @classmethod
+    def _produce_with_retries(cls, payload, retries):
+        try:
+            rmq_settings = cls._get_common_settings()
+            exchange = rmq_settings[-1]
+            # Decided not to create context-manager to stay within the class
+            _, channel = cls._get_producer_rmq_objects(
+                *rmq_settings,
+                signal_type=payload.signal_type,
+            )
+
+            cls._produce_message(channel, exchange, payload)
+            cls.log_produced(payload)
+        except (
+            exceptions.AMQPError,
+            exceptions.ChannelError,
+            exceptions.ReentrancyError,
+            AMQPConnectorException,
+            AssertionError,
+        ) as e:
+            # in case of any error - close connection and try to reconnect
+            cls.clean_connection()
+
+            base_log_message = "CQRS couldn't be published: pk = {0} ({1}).".format(
+                payload.pk,
+                payload.cqrs_id,
+            )
+            if not retries:
+                logger.exception(base_log_message)
+                return
+
+            logger.warning(
+                '{0} Error: {1}. Reconnect...'.format(
+                    base_log_message,
+                    e.__class__.__name__,
+                ),
+            )
+
+            cls._produce_with_retries(payload, retries - 1)
+
+    @classmethod
+    def _consume_message(cls, ch, method, properties, body, delay_queue):
+        try:
+            dct = ujson.loads(body)
+        except ValueError:
+            logger.error("CQRS couldn't be parsed: {0}.".format(body))
+            ch.basic_reject(delivery_tag=method.delivery_tag, requeue=False)
+            return
+
+        required_keys = {'instance_pk', 'signal_type', 'cqrs_id', 'instance_data'}
+        for key in required_keys:
+            if key not in dct:
+                msg = "CQRS couldn't proceed, %s isn't found in body: %s."
+                logger.error(msg, key, body)
+                ch.basic_reject(delivery_tag=method.delivery_tag, requeue=False)
+                return
+
+        payload = TransportPayload.from_message(dct)
+        cls.log_consumed(payload)
+
+        delivery_tag = method.delivery_tag
+        if payload.is_expired():
+            cls._add_to_dead_letter_queue(ch, payload)
+            cls._nack(ch, delivery_tag)
+            return
+
+        instance, exception = None, None
+        try:
+            instance = consumer.consume(payload)
+        except Exception as e:
+            exception = e
+            logger.error('CQRS service exception', exc_info=True)
+
+        if instance and exception is None:
+            cls._ack(ch, delivery_tag, payload)
+        else:
+            cls._fail_message(
+                ch,
+                delivery_tag,
+                payload,
+                exception,
+                delay_queue,
+            )
+
+    @classmethod
+    def _fail_message(cls, channel, delivery_tag, payload, exception, delay_queue):
+        cls.log_consumed_failed(payload)
+        model_cls = ReplicaRegistry.get_model_by_cqrs_id(payload.cqrs_id)
+        if model_cls is None:
+            logger.error('Model for cqrs_id {0} is not found.'.format(payload.cqrs_id))
+            cls._nack(channel, delivery_tag)
+            return
+
+        if model_cls.should_retry_cqrs(payload.retries, exception):
+            delay = model_cls.get_cqrs_retry_delay(payload.retries)
+            cls._delay_message(channel, delivery_tag, payload, delay, delay_queue)
+        else:
+            cls._add_to_dead_letter_queue(channel, payload)
+            cls._nack(channel, delivery_tag)
+
+    @classmethod
+    def _delay_message(cls, channel, delivery_tag, payload, delay, delay_queue):
+        if delay_queue.full():
+            # Memory limits handling, requeuing message with lowest ETA
+            requeue_message = delay_queue.get()
+            cls._requeue_message(
+                channel,
+                requeue_message.delivery_tag,
+                requeue_message.payload,
+            )
+
+        eta = timezone.now() + timedelta(seconds=delay)
+        delay_message = DelayMessage(delivery_tag, payload, eta)
+        delay_queue.put(delay_message)
+        cls.log_delayed(payload, delay, delay_message.eta)
+
+    @classmethod
+    def _add_to_dead_letter_queue(cls, channel, payload):
+        replica_settings = settings.CQRS.get('replica', {})
+        dead_message_ttl = DEFAULT_DEAD_MESSAGE_TTL
+        if 'dead_message_ttl' in replica_settings:
+            dead_message_ttl = replica_settings['dead_message_ttl']
+
+        expiration = None
+        if dead_message_ttl is not None:
+            expiration = str(dead_message_ttl * 1000)  # milliseconds
+
+        payload.is_dead_letter = True
+        exchange = cls._get_common_settings()[-1]
+        cls._produce_message(channel, exchange, payload, expiration)
+        cls.log_dead_letter(payload)
+
+    @classmethod
+    def _requeue_message(cls, channel, delivery_tag, payload):
+        payload.retries += 1
+        payload.is_requeue = True
+
+        cls.produce(payload)
+        cls._nack(channel, delivery_tag)
+        cls.log_requeued(payload)
+
+    @classmethod
+    def _process_delay_messages(cls, channel, delay_queue):
+        for delay_message in delay_queue.get_ready():
+            cls._requeue_message(channel, delay_message.delivery_tag, delay_message.payload)
+
+    @classmethod
+    def _produce_message(cls, channel, exchange, payload, expiration=None):
+        routing_key = cls._get_produced_message_routing_key(payload)
+
+        channel.basic_publish(
+            exchange=exchange,
+            routing_key=routing_key,
+            body=ujson.dumps(payload.to_dict()),
+            mandatory=True,
+            properties=BasicProperties(
+                content_type='text/plain',
+                delivery_mode=2,  # make message persistent
+                expiration=expiration,
+            ),
+        )
+
+    @classmethod
+    def _get_produced_message_routing_key(cls, payload):
+        routing_key = payload.cqrs_id
+
+        if payload.signal_type == SignalType.SYNC and payload.queue:
+            routing_key = 'cqrs.{0}.{1}'.format(payload.queue, routing_key)
+        elif getattr(payload, 'is_dead_letter', False):
+            dead_letter_queue_name = cls._get_consumer_settings()[1]
+            routing_key = 'cqrs.{0}.{1}'.format(dead_letter_queue_name, routing_key)
+        elif getattr(payload, 'is_requeue', False):
+            queue = cls._get_consumer_settings()[0]
+            routing_key = 'cqrs.{0}.{1}'.format(queue, routing_key)
+
+        return routing_key
+
+    @classmethod
+    def _get_consumer_rmq_objects(
+        cls,
+        host,
+        port,
+        creds,
+        exchange,
+        queue_name,
+        dead_letter_queue_name,
+        prefetch_count,
+        cqrs_ids=None,
+    ):
+        connection = BlockingConnection(
+            ConnectionParameters(host=host, port=port, credentials=creds),
+        )
+        channel = connection.channel()
+        channel.basic_qos(prefetch_count=prefetch_count)
+        cls._declare_exchange(channel, exchange)
+
+        channel.queue_declare(queue_name, durable=True, exclusive=False)
+        channel.queue_declare(dead_letter_queue_name, durable=True, exclusive=False)
+
+        for cqrs_id, _ in ReplicaRegistry.models.items():
+            if cqrs_ids and cqrs_id not in cqrs_ids:
+                continue
+
+            channel.queue_bind(exchange=exchange, queue=queue_name, routing_key=cqrs_id)
+
+            # Every service must have specific SYNC or requeue routes
+            channel.queue_bind(
+                exchange=exchange,
+                queue=queue_name,
+                routing_key='cqrs.{0}.{1}'.format(queue_name, cqrs_id),
+            )
+
+            # Dead letter
+            channel.queue_bind(
+                exchange=exchange,
+                queue=dead_letter_queue_name,
+                routing_key='cqrs.{0}.{1}'.format(dead_letter_queue_name, cqrs_id),
+            )
+
+        delay_queue_check_timeout = 1  # seconds
+        consumer_generator = channel.consume(
+            queue=queue_name,
+            auto_ack=False,
+            exclusive=False,
+            inactivity_timeout=delay_queue_check_timeout,
+        )
+        return connection, channel, consumer_generator
+
+    @classmethod
+    def _get_producer_rmq_objects(cls, host, port, creds, exchange, signal_type=None):
+        """
+        Use shared connection in case of sync mode, otherwise create new connection for each
+        message
+        """
+        if signal_type == SignalType.SYNC:
+            if cls._producer_connection is None:
+                connection, channel = cls._create_connection(host, port, creds, exchange)
+
+                cls._producer_connection = connection
+                cls._producer_channel = channel
+
+            return cls._producer_connection, cls._producer_channel
+        else:
+            return cls._create_connection(host, port, creds, exchange)
+
+    @classmethod
+    def _create_connection(cls, host, port, creds, exchange):
+        connection = BlockingConnection(
+            ConnectionParameters(
+                host=host,
+                port=port,
+                credentials=creds,
+                blocked_connection_timeout=10,
+            ),
+        )
+        channel = connection.channel()
+        channel.basic_qos(prefetch_count=get_messages_prefetch_count_per_worker())
+        cls._declare_exchange(channel, exchange)
+
+        return connection, channel
+
+    @staticmethod
+    def _declare_exchange(channel, exchange):
+        channel.exchange_declare(
+            exchange=exchange,
+            exchange_type='topic',
+            durable=True,
+        )
+
+    @staticmethod
+    def _parse_url(url):
+        scheme = urlparse(url).scheme
+        assert scheme == 'amqp', 'Scheme must be "amqp" for RabbitMQTransport.'
+
+        schemeless = url[len(scheme) + 3 :]
+        parts = urlparse('http://' + schemeless)
+
+        return (
+            unquote(parts.hostname or '') or ConnectionParameters.DEFAULT_HOST,
+            parts.port or ConnectionParameters.DEFAULT_PORT,
+            unquote(parts.username or '') or ConnectionParameters.DEFAULT_USERNAME,
+            unquote(parts.password or '') or ConnectionParameters.DEFAULT_PASSWORD,
+        )
+
+    @classmethod
+    def _get_common_settings(cls):
+        if 'url' in settings.CQRS:
+            host, port, user, password = cls._parse_url(settings.CQRS.get('url'))
+        else:
+            host = settings.CQRS.get('host', ConnectionParameters.DEFAULT_HOST)
+            port = settings.CQRS.get('port', ConnectionParameters.DEFAULT_PORT)
+            user = settings.CQRS.get('user', ConnectionParameters.DEFAULT_USERNAME)
+            password = settings.CQRS.get('password', ConnectionParameters.DEFAULT_PASSWORD)
+        exchange = settings.CQRS.get('exchange', 'cqrs')
+        return (
+            host,
+            port,
+            credentials.PlainCredentials(user, password, erase_on_connect=True),
+            exchange,
+        )
+
+    @staticmethod
+    def _get_consumer_settings():
+        queue_name = settings.CQRS['queue']
+
+        replica_settings = settings.CQRS.get('replica', {})
+        dead_letter_queue_name = 'dead_letter_{0}'.format(queue_name)
+        if 'dead_letter_queue' in replica_settings:
+            dead_letter_queue_name = replica_settings['dead_letter_queue']
+
+        if 'consumer_prefetch_count' in settings.CQRS:
+            logger.warning(
+                "The 'consumer_prefetch_count' setting is ignored for RabbitMQTransport.",
+            )
+        prefetch_count = get_messages_prefetch_count_per_worker()
+
+        return (
+            queue_name,
+            dead_letter_queue_name,
+            prefetch_count,
+        )
+
+    @classmethod
+    def _ack(cls, channel, delivery_tag, payload=None):
+        channel.basic_ack(delivery_tag)
+        if payload is not None:
+            cls.log_consumed_accepted(payload)
+
+    @classmethod
+    def _nack(cls, channel, delivery_tag, payload=None):
+        channel.basic_nack(delivery_tag, requeue=False)
+        if payload is not None:
+            cls.log_consumed_denied(payload)

+ 82 - 0
dj_cqrs/utils.py

@@ -0,0 +1,82 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import logging
+from datetime import date, datetime, timedelta
+from uuid import UUID
+
+from django.conf import settings
+from django.db import transaction
+from django.utils import timezone
+
+from dj_cqrs.constants import DB_VENDOR_PG, SUPPORTED_TIMEOUT_DB_VENDORS
+from dj_cqrs.logger import install_last_query_capturer
+
+
+logger = logging.getLogger('django-cqrs')
+
+
+def get_message_expiration_dt(message_ttl=None):
+    """Calculates when message should expire.
+
+    :param int or None message_ttl:
+    :return: Expiration datetime or None if infinite
+    :rtype: datetime.datetime or None
+    """
+    message_ttl = message_ttl or settings.CQRS['master']['CQRS_MESSAGE_TTL']
+    if message_ttl is None:
+        # Infinite
+        return
+
+    return timezone.now() + timedelta(seconds=message_ttl)
+
+
+def get_delay_queue_max_size():
+    """Returns max allowed number of "waiting" messages in the delay queue.
+
+    :return: Positive integer number or None if infinite
+    :rtype: int
+    """
+    if 'replica' not in settings.CQRS:
+        return None
+
+    return settings.CQRS['replica']['delay_queue_max_size']
+
+
+def get_messages_prefetch_count_per_worker():
+    """Returns max allowed number of unacked messages, that can be consumed by a single worker.
+
+    :return: Positive integer number or 0 if infinite
+    :rtype: int
+    """
+    delay_queue_max_size = get_delay_queue_max_size()
+    if delay_queue_max_size is None:
+        # Infinite
+        return 0
+
+    return delay_queue_max_size + 1
+
+
+def get_json_valid_value(value):
+    return str(value) if isinstance(value, (date, datetime, UUID)) else value
+
+
+def apply_query_timeouts(model_cls):  # pragma: no cover
+    query_timeout = int(settings.CQRS['replica'].get('CQRS_QUERY_TIMEOUT', 0))
+    if query_timeout <= 0:
+        return
+
+    model_db = model_cls._default_manager.db
+    conn = transaction.get_connection(using=model_db)
+    conn_vendor = getattr(conn, 'vendor', '')
+    if conn_vendor not in SUPPORTED_TIMEOUT_DB_VENDORS:
+        return
+
+    if conn_vendor == DB_VENDOR_PG:
+        statement = 'SET statement_timeout TO %s'
+    else:
+        statement = 'SET SESSION MAX_EXECUTION_TIME=%s'
+
+    with conn.cursor() as cursor:
+        cursor.execute(statement, params=(query_timeout,))
+
+    install_last_query_capturer(model_cls)

+ 24 - 0
docs/admin.md

@@ -0,0 +1,24 @@
+Django Admin
+============
+
+# Synchronize items
+
+Add action to synchronize master items from Django Admin page.
+
+``` py3
+from django.db import models
+from django.contrib import admin
+
+from dj_cqrs.admin_mixins import CQRSAdminMasterSyncMixin
+
+
+class AccountAdmin(CQRSAdminMasterSyncMixin, admin.ModelAdmin):
+    pass
+
+
+admin.site.register(models.Account, AccountAdmin)
+```
+
+-   If necessary, override `_cqrs_sync_queryset` from
+    `CQRSAdminMasterSyncMixin` to adjust the QuerySet and use it for
+    synchronization.

+ 20 - 0
docs/css/custom.css

@@ -0,0 +1,20 @@
+:root>* {
+    --md-primary-fg-color: #1565c0;
+    --md-primary-fg-color--light: #1565c0;
+    --md-primary-fg-color--dark: #1565c0;
+}
+
+div.autodoc-docstring {
+    padding-left: 20px;
+    margin-bottom: 30px;
+    border-left: 5px solid rgba(230, 230, 230);
+}
+
+div.autodoc-members {
+    padding-left: 20px;
+    margin-bottom: 15px;
+}
+
+:not([data-md-state="blur"]) + nav {
+    display: none;
+}

+ 75 - 0
docs/custom_serialization.md

@@ -0,0 +1,75 @@
+Custom serialization
+====================
+
+By default, **django-cqrs** serializes all the fields declared
+for the master model or the subset specified by the `CQRS_FIELDS`
+attribute.
+
+Sometimes you want to customize how the master model will be serialized,
+for example including some other fields from related models.
+
+!!! warning
+
+    When there are master models with related entities in CQRS_SERIALIZER,
+    it's important to have operations within atomic transactions. CQRS sync
+    will happen on transaction commit. Please, avoid saving master model
+    within transaction more then once to reduce syncing and potential racing
+    on replica side. Updating of related model won't trigger CQRS automatic
+    synchronization for master model. This needs to be done manually.
+
+# Master service
+
+In this case you can control how an instance of the master model is
+serialized providing a serializer class to be used for that:
+
+``` py3
+class MyMasterModel(MasterMixin):
+    CQRS_ID = 'my_model'
+    CQRS_SERIALIZER = 'mymodule.serializers.MyMasterModelSerializer'
+
+    @classmethod
+    def relate_cqrs_serialization(cls, queryset):
+        # Optimize related models fetching here
+        return queryset
+```
+
+If you would to serialize fields from related models, you can optimize
+database access overriding the `relate_cqrs_serialization` method using
+the [select_related](https://docs.djangoproject.com/en/3.0/ref/models/querysets/#select-related)
+and [prefetch_related](https://docs.djangoproject.com/en/3.0/ref/models/querysets/#prefetch-related)
+methods of the [QuerySet](https://docs.djangoproject.com/en/3.0/ref/models/querysets/#queryset-api-reference)
+object.
+
+# Replica service
+
+If you provide a serializer to customize serialization, you must handle
+yourself deserialization for the replica model.
+
+``` py3
+class MyReplicaModel(ReplicaMixin):
+    CQRS_ID = 'my_model'
+    CQRS_CUSTOM_SERIALIZATION = True # bypass default deserialization.
+
+    @classmethod
+    def cqrs_create(cls, sync, mapped_data, previous_data=None, meta=None):
+        # Custom deserialization logic here
+        pass
+
+    def cqrs_update(self, sync, mapped_data, previous_data=None, meta=None):
+        # Custom deserialization logic here
+        pass
+```
+
+!!! note
+
+    A serializer class must follow these rules:
+
+    > -   The constructor must accept the model instance as the only
+    >     positional argument
+    > -   Must have a `data` property that returns a python dictionary as
+    >     the instance representation.
+
+    If your service exposes a RESTful API written using [Django REST
+    framework](https://www.django-rest-framework.org/api-guide/serializers/)
+    you can use your model serializers out of the box also for CQRS
+    serialization.

+ 221 - 0
docs/getting_started.md

@@ -0,0 +1,221 @@
+Getting started
+===============
+
+!!! note
+
+    This guide assumes that you have at least a single instance of
+    [RabbitMQ](https://www.rabbitmq.com/) up and running. For other
+    messaging brokers/transports please see [transports](../transports).
+
+# Requirements
+
+**django-cqrs** works with Python 3.8 or later and has the
+following dependencies:
+
+> -   Django \>= 3.2
+> -   pika \>= 1.0.0
+> -   kombu \>= 4.6
+> -   ujson \>= 3.0.0
+> -   django-model-utils \>= 4.0.0
+> -   python-dateutil \>= 2.4
+
+# Install
+
+**django-cqrs** can be installed from pypi.org with pip:
+
+``` shell
+$ pip install django-cqrs
+```
+
+# Master service
+
+## Configure master service
+
+Add dj_cqrs to Django `INSTALLED_APPS`:
+
+``` py3
+INSTALLED_APPS = [
+    ...
+    'dj_cqrs',
+    ...
+]
+```
+
+and add the **django-cqrs** configuration:
+
+``` py3
+CQRS = {
+    'transport': 'dj_cqrs.transport.RabbitMQTransport',
+    'url': 'amqp://guest:guest@rabbit:5672/'
+}
+```
+
+## Setup master models
+
+To setup master models add the `dj_cqrs.mixins.MasterMixin` to your model.
+
+For example:
+
+``` py3
+from django.db import models
+
+from dj_cqrs.mixins import MasterMixin
+
+
+class MyMasterModel(MasterMixin, models.Model):
+
+    CQRS_ID = 'my_model'  # each model must have its unique CQRS_ID
+
+    my_field = models.CharField(max_length=100)
+    ....
+```
+
+## Create and run migrations for master
+
+Since the `MasterMixin` adds the `cqrs_revision` and `cqrs_updated`
+fields to the model, you must create a new migration for it:
+
+``` shell
+$ ./manage.py makemigrations
+$ ./manage.py migrate
+```
+
+## Run your django application
+
+``` shell
+$ ./manage.py runserver
+```
+
+# Replica service
+
+## Configure replica service
+
+Add dj_cqrs to Django `INSTALLED_APPS`:
+
+``` py3
+INSTALLED_APPS = [
+    ...
+    'dj_cqrs',
+    ...
+]
+```
+
+and add the **django-cqrs** configuration:
+
+``` py3
+CQRS = {
+    'transport': 'dj_cqrs.transport.RabbitMQTransport',
+    'url': 'amqp://guest:guest@rabbit:5672/',
+    'queue': 'my_replica', # Each replica service must have a unique queue.
+}
+```
+
+## Setup replica models
+
+To setup replica models add the `dj_cqrs.mixins.ReplicaMixin` to each
+model.
+
+For example:
+
+``` py3
+from django.db import models
+
+from dj_cqrs.mixins import ReplicaMixin
+
+
+class MyReplicaModel(ReplicaMixin, models.Model):
+
+    CQRS_ID = 'my_model' 
+
+    my_field = models.CharField(max_length=100)
+    ....
+```
+
+## Create and run migrations for replica
+
+Since the `ReplicaMixin` adds the `cqrs_revision` and `cqrs_updated`
+fields to the model, you must create a new migration for it:
+
+``` shell
+$ ./manage.py makemigrations
+$ ./manage.py migrate
+```
+
+## Run consumer process
+
+``` shell
+$ ./manage.py cqrs_consume -w 2
+```
+
+And that's all!
+
+Now every time you modify your master model, changes are replicated to
+all services that have a replica model with the same `CQRS_ID`.
+
+# Use of customized meta data
+
+The library allow us to send customized metadata from the Master models
+to the Replica ones.
+
+## Configuring the metadata for Master model
+
+There are two ways to specify what we want to include in this metadata,
+overriding the master function or setting a default generic function
+that will be executed for all masters.
+
+### Override master function
+
+Inside the Master model class you have to add the **get_cqrs_meta**
+function that will replace the default one (that returns an empty dict).
+For instance if you want to return the access of a given model instance
+inside the metadata you could do the following:
+
+``` py3
+def get_cqrs_meta(self, **kwargs):
+    meta = super().get_cqrs_meta(**kwargs)
+    if self.is_owner():
+        meta['access']['owner'] = True
+        meta['access']['others'] = False
+    else:
+        meta['access']['owner'] = False
+        meta['access']['others'] = True
+    return meta
+```
+
+### Setting a default generic function
+
+In the django settings you could configure a function that will be
+executed everytime an event is emitted in any Master:
+
+``` py3
+from ... import get_cqrs_meta
+
+CQRS = {
+    ...
+    'master': {
+        ...
+        'meta_function': get_cqrs_meta,
+    },
+}
+```
+
+## Retrieving the metadata from the Replica model
+
+From the replica model you will now receive an additional parameter
+called **meta** that will contain all metadata set in the Master model.
+These data will be present in the following class functions:
+
+* cqrs_update
+* cqrs_create
+* cqrs_delete
+
+For instance replacing the **cqrs_update** we could do something like:
+
+``` py3
+def cqrs_update(self, sync, mapped_data, previous_data=None, meta=None):
+    if meta and not meta['access']['owner']:
+        # Call asynchronously external system to update some resource.
+    else:
+        # Call asynchronously internal system to update some resource.
+    return super().cqrs_update(sync, mapped_data, previous_data, meta)
+```

BIN
docs/images/favicon.ico


BIN
docs/images/lifecycle.png


BIN
docs/images/logo_full.png


+ 26 - 0
docs/index.md

@@ -0,0 +1,26 @@
+# Welcome to Django CQRS's documentation!
+
+**django-cqrs** is an Django application, that implements CQRS
+data synchronisation between several Django microservices.
+
+## CQRS
+
+In [CloudBlue Connect](https://connect.cloudblue.com) we have a rather
+complex Domain Model. There are many microservices, that are [decomposed
+by subdomain](https://microservices.io/patterns/decomposition/decompose-by-subdomain.html)
+and which follow [database-per-service](https://microservices.io/patterns/data/database-per-service.html)
+pattern. These microservices have rich and consistent APIs. They are
+deployed in cloud k8s cluster and scale automatically under load. Many
+of these services aggregate data from other ones and usually [API
+Composition](https://microservices.io/patterns/data/api-composition.html)
+is totally enough. But, some services are working too slowly with API
+JOINS, so another pattern needs to be applied.
+
+The pattern, that solves this issue is called [CQRS - Command Query
+Responsibility Segregation](https://microservices.io/patterns/data/cqrs.html). Core
+idea behind this pattern is that view databases (replicas) are defined
+for efficient querying and DB joins. Applications keep their replicas up
+to date by subscribing to [Domain events](https://microservices.io/patterns/data/domain-event.html)
+published by the service that owns the data. Data is [eventually
+consistent](https://en.wikipedia.org/wiki/Eventual_consistency) and
+that's okay for non-critical business transactions.

+ 152 - 0
docs/lifecycle.md

@@ -0,0 +1,152 @@
+Message lifecycle
+=================
+
+!!! warning
+
+    Expiration, retrying and 'dead letters' queueing supported in
+    `RabbitMQTransport` only (**on** by default).
+
+**django-cqrs** since version 1.11 provides mechanism for reliable message delivery.
+
+![Message lifecycle](images/lifecycle.png)
+
+# Expiration
+
+| Name               | Default  | Description                                                                           |
+| ------------------ | ---------| ------------------------------------------------------------------------------------- |
+|  CQRS_MESSAGE_TTL  |  86400   | Limits message lifetime in **seconds**, then it will be moved to 'dead letters' queue.|
+
+
+``` py3
+# settings.py
+
+CQRS = {
+    ...
+    'master': {
+        'CQRS_MESSAGE_TTL': 86400, # 1 day
+    },
+}
+```
+
+# Fail
+
+Message assumed as failed when a consumer raises an exception or returns
+negative boolean value (*False*, *None*, etc).
+
+``` py3
+# models.py
+
+class Example(ReplicaMixin, models.Model):
+    CQRS_ID = 'example'
+    ...
+
+    @classmethod
+    def cqrs_create(cls, sync, mapped_data, previous_data=None, meta=None):
+        raise Exception("Some issue during create") # exception could be caught at should_retry_cqrs() method
+
+    def cqrs_update(self, sync, mapped_data, previous_data=None, meta=None):
+        return None # returning negative boolean for retrying
+```
+
+# Retrying
+
+| Name                      | Default  | Description                                                                |
+| ------------------------- | ---------| -------------------------------------------------------------------------- |
+| CQRS_MAX_RETRIES          | 30       | Maximum number of retry attempts. Infinite if *None*, 0 to disable retries.|
+| CQRS_RETRY_DELAY          | 2        | Constant delay in **seconds** between message failure and requeueing.      |
+| CQRS_DELAY_QUEUE_MAX_SIZE | 1000     | Maximum number of delayed messages per worker. Infinite if *None*.         |
+
+
+``` py3
+# settings.py
+
+CQRS = {
+    ...
+    'replica': {
+        'CQRS_MAX_RETRIES': 30, # attempts
+        'CQRS_RETRY_DELAY': 2,  # seconds
+        'CQRS_DELAY_QUEUE_MAX_SIZE': 1000,
+    },
+}
+```
+
+## Customization
+
+The `dj_cqrs.mixins.ReplicaMixin` allows to take full control on retrying.
+
+``` py3
+# models.py
+
+class Example(ReplicaMixin, models.Model):
+    CQRS_ID = 'example'
+    ...
+
+    @classmethod
+    def get_cqrs_retry_delay(cls, current_retry=0):
+        # Linear delay growth
+        return (current_retry + 1) * 60
+
+    @classmethod
+    def should_retry_cqrs(cls, current_retry, exception=None):
+        # Retry 10 times or until we have troubles with database
+        return (
+            current_retry < 10
+            or isinstance(exception, django.db.OperationalError)
+        )
+```
+
+# Dead letters
+
+Expired or failed messages which should not be retried are moved to
+'dead letters' queue.
+
+|  Name               | Default                   | Description                                 |
+| ------------------- | ------------------------- | ------------------------------------------- |
+| dead_letter_queue   | \'dead_letter\_\' + queue | Queue name for dead letters.                |
+| dead_message_ttl    | 864000                    | Expiration **seconds**. Infinite if *None*. |
+
+``` py3
+# settings.py
+
+CQRS = {
+    ...
+    'queue': 'example',
+    'replica': {
+        ...
+        'dead_letter_queue': 'dead_letter_example', # generated from CQRS.queue
+        'dead_message_ttl': 864000, # 10 days
+    },
+}
+```
+
+## Commands
+
+### Dump
+
+Dumps all dead letters to stdout.
+
+``` console
+$ python manage.py cqrs_dead_letters dump
+{"signal_type":"SAVE","cqrs_id":"example","instance_data":{"id":1,"cqrs_revision":0,"cqrs_updated":"2021-04-30 11:50:05.164341+00:00"},"previous_data":null,"instance_pk":135,"correlation_id":null,"retries":30,"expires":"2021-05-01T11:50:00+00:00"}
+```
+
+### Retry
+
+Retry all dead letters. Message body retries and expires fields are downgraded.
+
+``` console
+$ python manage.py cqrs_dead_letters retry
+Total dead letters: 1
+Retrying: 1/1
+{"signal_type":"SAVE","cqrs_id":"example","instance_data":{"id":1,"cqrs_revision":0,"cqrs_updated":"2021-04-30 11:50:05.164341+00:00"},"previous_data":null,"instance_pk":135,"correlation_id":null,"retries":0,"expires":"2021-05-02T12:30:00+00:00"}
+```
+
+### Purge
+
+Removes all dead letters.
+
+``` console
+$ python manage.py cqrs_dead_letters purge
+Total dead letters: 1
+Purged
+```

+ 16 - 0
docs/macros.py

@@ -0,0 +1,16 @@
+import django
+from django.conf import settings
+
+
+def define_env(env):
+    """
+    This is the hook for defining variables, macros and filters
+
+    - variables: the dictionary that contains the environment variables
+    - macro: a decorator function, to declare a macro.
+    - filter: a function with one of more arguments,
+        used to perform a transformation
+    """
+    if not settings.configured:
+        settings.configure(DEBUG=True)
+        django.setup()

+ 117 - 0
docs/reference.md

@@ -0,0 +1,117 @@
+## Django Admin
+
+### dj_cqrs.admin.<strong>CQRSAdminMasterSyncMixin</strong>
+
+::: dj_cqrs.admin.CQRSAdminMasterSyncMixin
+    options:
+      members:
+        - sync_items
+        - _cqrs_sync_queryset
+      heading_level: 3
+
+## Mixins
+
+### dj_cqrs.mixins.<strong>MasterMixin</strong>
+
+::: dj_cqrs.mixins.RawMasterMixin
+    options:
+        heading_level: 3
+
+::: dj_cqrs.mixins.MasterMixin
+    options:
+        heading_level: 3
+
+### dj_cqrs.mixins.<strong>ReplicaMixin</strong>
+
+::: dj_cqrs.mixins.RawReplicaMixin
+    options:
+        heading_level: 3
+
+::: dj_cqrs.mixins.ReplicaMixin
+    options:
+        heading_level: 3
+
+## Managers
+
+### dj_cqrs.managers.<strong>MasterManager</strong>
+
+::: dj_cqrs.managers.MasterManager
+    options:
+        heading_level: 3
+
+### dj_cqrs.managers.<strong>ReplicaManager</strong>
+
+::: dj_cqrs.managers.ReplicaManager
+    options:
+        heading_level: 3
+
+## Signals
+
+### dj_cqrs.<strong>signals</strong>
+
+::: dj_cqrs.signals
+    options:
+      members:
+        - post_bulk_create
+        - post_update
+      heading_level: 3
+
+### dj_cqrs.signals.<strong>MasterSignals</strong>
+
+::: dj_cqrs.signals.MasterSignals
+    options:
+        heading_level: 3
+
+## Transports
+
+### dj_cqrs.transport.<strong>RabbitMQTransport</strong>
+
+::: dj_cqrs.transport.RabbitMQTransport
+    options:
+        heading_level: 3
+        members:
+          - clean_connection
+          - consume
+          - produce
+
+### dj_cqrs.transport.<strong>KombuTransport</strong>
+
+::: dj_cqrs.transport.KombuTransport
+    options:
+        heading_level: 3
+        members:
+          - clean_connection
+          - consume
+          - produce
+
+### dj_cqrs.constants.<strong>SignalType</strong>
+
+::: dj_cqrs.constants.SignalType
+    options:
+        heading_level: 3
+
+### dj_cqrs.dataclasses.<strong>TransportPayload</strong>
+
+::: dj_cqrs.dataclasses.TransportPayload
+    options:
+        heading_level: 3
+
+## Registries
+
+### dj_cqrs.registries.<strong>MasterRegistry</strong>
+
+::: dj_cqrs.registries.RegistryMixin
+    options:
+      members:
+        - register_model
+        - get_model_by_cqrs_id
+      heading_level: 3
+
+### dj_cqrs.registries.<strong>ReplicaRegistry</strong>
+
+::: dj_cqrs.registries.RegistryMixin
+    options:
+      members:
+        - register_model
+        - get_model_by_cqrs_id
+      heading_level: 3

+ 51 - 0
docs/track_fields_changes.md

@@ -0,0 +1,51 @@
+Keep track of changes to fields
+===============================
+
+In some circumstances, you want to keep track of changes made on some
+fields of the master model.
+
+**django-cqrs** can send previous values of the tracked fields
+to replicas.
+
+To do so, you can use the `CQRS_TRACKED_FIELDS` attribute to specify
+which fields to track:
+
+``` py3
+class MyMasterModel(MasterMixin):
+
+    CQRS_ID = 'my_model'
+    CQRS_TRACKED_FIELDS = ('char_field', 'parent', 'status')
+
+
+    char_field = models.CharField(max_length=100)
+    status = models.CharField(max_length=15, choices=STATUSES)
+
+    parent = models.ForeignKey(ParentMode, on_delete=models.CASCADE)
+```
+
+This way, you can override the `cqrs_save` and apply your persistence
+logic based o tracked fields before accessing your database:
+
+``` py3
+class MyReplicaModel(ReplicaMixin):
+
+    CQRS_ID = 'my_model'
+
+    @classmethod
+    def cqrs_save(cls, master_data, previous_data=None, sync=False):
+        # Custom logic based on previous_data here.
+        pass
+```
+
+!!! note
+
+    The fields tracking features honors the `CQRS_MAPPING` attribute.
+
+
+!!! note
+
+    The fields tracking features relies on the
+    [FieldTracker](https://django-model-utils.readthedocs.io/en/latest/utilities.html#field-tracker)
+    utility class from the
+    [django-model-utils](https://github.com/jazzband/django-model-utils)
+    library.

+ 52 - 0
docs/transports.md

@@ -0,0 +1,52 @@
+Transports
+==========
+
+**django-cqrs** ships with two transport that allow users to
+choose the messaging broker that best fit their needs.
+
+# RabbitMQ transport
+
+The `dj_cqrs.transport.RabbitMQTransport` transport is based on the
+[pika](https://pika.readthedocs.io/en/stable/) messaging library.
+
+To configure the `RabbitMQTransport` you must provide the rabbitmq
+connection url:
+
+``` py3
+CQRS = {
+    'transport': 'dj_cqrs.transport.RabbitMQTransport',
+    'url': 'amqp://guest:guest@rabbit:5672/'
+}
+```
+
+!!! warning
+
+    Previous versions of the `RabbitMQTransport` use the attributes `host`,
+    `port`, `user`, `password` to configure the connection with rabbitmq.
+    These attributes are deprecated and will be removed in future versions
+    of **django-cqrs**.
+
+# Kombu transport
+
+The `dj_cqrs.transport.KombuTransport` transport is based on the
+[kombu](https://kombu.readthedocs.io/en/master/index.html) messaging
+library.
+
+Kombu supports different messaging brokers like RabbitMQ, Redis, Amazon
+SQS etc.
+
+To configure the `KombuTransport` you must provide the rabbitmq
+connection url:
+
+``` py3
+CQRS = {
+    'transport': 'dj_cqrs.transport.KombuTransport',
+    'url': 'redis://redis:6379/'
+}
+```
+
+Please read [Transport
+Comparison](https://kombu.readthedocs.io/en/master/introduction.html#transport-comparison)
+and [URLs](https://kombu.readthedocs.io/en/master/userguide/connections.html#urls)
+articles for Kombu to get more information on supported brokers and
+configuration urls.

+ 36 - 0
docs/utilities.md

@@ -0,0 +1,36 @@
+Utilities
+=========
+
+# Bulk synchronizer without transport
+
+Usage example: it may be used for initial configuration and/or may be
+used at planned downtime.
+
+On master service:
+
+``` shell
+$ python manage.py cqrs_bulk_dump --cqrs-id=author --output author.dump
+```
+
+On replica service:
+
+``` shell
+$ python manage.py cqrs_bulk_load --input=author.dump
+```
+
+# Filter synchronizer over transport
+
+Usage example: sync some specific records to a given replica. Can be
+used dynamically.
+
+To sync all replicas:
+
+``` shell
+$ python manage.py cqrs_sync --cqrs-id=author --filter="{\"id__in\": [1, 2]}"
+```
+
+To sync all instances only with one replica:
+
+``` shell
+$ python manage.py cqrs_sync --cqrs-id=author --filter="{}" --queue=replica
+```

+ 47 - 0
examples/demo_project/README.md

@@ -0,0 +1,47 @@
+# CQRS demo project
+
+It's a simple demo project contains 2 services:
+
+- master: source of domain models. Stores models in PostgreSQL.
+- replica: service which get models from master by CQRS. Stores replicated models in MySQL and Redis
+
+## Start project:
+
+```
+docker-compose up -d db_pgsql db_mysql
+docker-compose run master ./manage.py migrate
+docker-compose run replica ./manage.py migrate
+docker-compose up -d
+docker-compose run master ./manage.py cqrs_sync --cqrs-id=user -f={}
+docker-compose run master ./manage.py cqrs_sync --cqrs-id=product -f={}
+```
+
+It starts master WEB app on [http://127.0.0.1:8000](http://127.0.0.1:8000) and replica on [http://127.0.0.1:8001](http://127.0.0.1:8001)
+
+You can do something with model instances via WEB interface or django shell on master and see how data changes in replica too.
+
+
+## Domain models:
+
+### User:
+
+The most common and simple way for replication is used for this model.
+
+### ProductType:
+
+This model isn't being synchronized separately, only with related Product.
+
+### Product:
+
+This models uses custom own written serializer and relation optimization.
+
+### Purchase:
+
+This models uses Django REST Framework serializer. Replica service stores this model in redis.
+
+
+## Monitoring
+
+You can monitor CQRS queue by tools provided by chosen transport backend.
+
+For this demo we use RabbitMQ with management plugin. You can find it on [http://127.0.0.1:15672](http://127.0.0.1:15672) with credentials `rabbitmq / password`.

+ 95 - 0
examples/demo_project/docker-compose.yml

@@ -0,0 +1,95 @@
+version: "3"
+
+
+services:
+  # database for master
+  db_pgsql:
+    image: postgres:12
+    environment:
+      POSTGRES_USER: master_service
+      POSTGRES_PASSWORD: password
+      POSTGRES_DB: master_service
+    volumes:
+      - pgsql_data:/var/lib/postgresql/data
+
+  # database for replica
+  db_mysql:
+    image: mysql:8.0
+    environment:
+      MYSQL_ROOT_PASSWORD: password
+      MYSQL_USER: replica_service
+      MYSQL_PASSWORD: password
+      MYSQL_DATABASE: replica_service
+    volumes:
+      - mysql_data:/var/lib/mysql
+    command: --default-authentication-plugin=mysql_native_password
+
+  # cache storage for replica
+  redis:
+    image: redis
+
+  # CQRS transport backend
+  rabbitmq:
+    image: rabbitmq:3-management-alpine
+    environment:
+      RABBITMQ_DEFAULT_USER: rabbitmq
+      RABBITMQ_DEFAULT_PASS: password
+    ports:
+      - 15672:15672
+
+  # Domain models provider
+  master:
+    build:
+      context: master_service
+    ports:
+      - 8000:8000
+    depends_on:
+      - db_pgsql
+      - rabbitmq
+    volumes:
+      - ./master_service:/app
+    command: >
+      dockerize -wait tcp://rabbitmq:5672 -timeout 30s
+      dockerize -wait tcp://db_pgsql:5432 -timeout 30s
+      ./manage.py runserver 0.0.0.0:8000
+
+  # replica WEB app
+  replica:
+    build:
+      context: replica_service
+    ports:
+      - 8001:8000
+    depends_on:
+      - db_mysql
+      - redis
+      - rabbitmq
+    volumes:
+      - ./replica_service:/app
+    command: >
+      dockerize -wait tcp://rabbitmq:5672 -timeout 30s
+      dockerize -wait tcp://db_mysql:3306 -timeout 30s
+      dockerize -wait tcp://redis:6379 -timeout 30s
+      ./manage.py runserver 0.0.0.0:8000
+
+  # replica CQRS consumer worker
+  replica_cqrs_consumer:
+    build:
+      context: replica_service
+    depends_on:
+      - db_mysql
+      - rabbitmq
+    volumes:
+      - ./replica_service:/app
+    command: >
+      dockerize -wait tcp://rabbitmq:5672 -timeout 30s
+      dockerize -wait tcp://db_mysql:3306 -timeout 30s
+      dockerize -wait tcp://redis:6379 -timeout 30s
+      ./manage.py cqrs_consume -w2
+
+
+volumes:
+  pgsql_data:
+    driver: local
+
+  mysql_data:
+    driver: local

+ 16 - 0
examples/demo_project/master_service/Dockerfile

@@ -0,0 +1,16 @@
+FROM python:3
+
+WORKDIR /app
+
+COPY requirements.txt .
+
+RUN python -mpip install -r requirements.txt
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+COPY . .
+
+CMD ./manage.py runserver 0.0.0.0:8000

+ 1 - 0
examples/demo_project/master_service/__init__.py

@@ -0,0 +1 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.

+ 0 - 0
examples/demo_project/master_service/app/__init__.py


+ 19 - 0
examples/demo_project/master_service/app/asgi.py

@@ -0,0 +1,19 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+"""
+ASGI config for master_service project.
+
+It exposes the ASGI callable as a module-level variable named ``application``.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
+"""
+
+import os
+
+from django.core.asgi import get_asgi_application
+
+
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'master_service.settings')
+
+application = get_asgi_application()

+ 230 - 0
examples/demo_project/master_service/app/migrations/0001_initial.py

@@ -0,0 +1,230 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import django.contrib.auth.validators
+import django.utils.timezone
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+    initial = True
+
+    dependencies = [
+        ('auth', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='User',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                ('password', models.CharField(max_length=128, verbose_name='password')),
+                (
+                    'last_login',
+                    models.DateTimeField(blank=True, null=True, verbose_name='last login'),
+                ),
+                (
+                    'is_superuser',
+                    models.BooleanField(
+                        default=False,
+                        help_text='Designates that this user has all permissions without explicitly assigning them.',
+                        verbose_name='superuser status',
+                    ),
+                ),
+                (
+                    'username',
+                    models.CharField(
+                        error_messages={'unique': 'A user with that username already exists.'},
+                        help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
+                        max_length=150,
+                        unique=True,
+                        validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
+                        verbose_name='username',
+                    ),
+                ),
+                (
+                    'first_name',
+                    models.CharField(blank=True, max_length=150, verbose_name='first name'),
+                ),
+                (
+                    'last_name',
+                    models.CharField(blank=True, max_length=150, verbose_name='last name'),
+                ),
+                (
+                    'email',
+                    models.EmailField(blank=True, max_length=254, verbose_name='email address'),
+                ),
+                (
+                    'is_staff',
+                    models.BooleanField(
+                        default=False,
+                        help_text='Designates whether the user can log into this admin site.',
+                        verbose_name='staff status',
+                    ),
+                ),
+                (
+                    'is_active',
+                    models.BooleanField(
+                        default=True,
+                        help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.',
+                        verbose_name='active',
+                    ),
+                ),
+                (
+                    'date_joined',
+                    models.DateTimeField(
+                        default=django.utils.timezone.now,
+                        verbose_name='date joined',
+                    ),
+                ),
+                (
+                    'cqrs_revision',
+                    models.IntegerField(
+                        default=0,
+                        help_text="This field must be incremented on any model update. It's used to for CQRS sync.",
+                    ),
+                ),
+                (
+                    'cqrs_updated',
+                    models.DateTimeField(
+                        auto_now=True,
+                        help_text="This field must be incremented on every model update. It's used to for CQRS sync.",
+                    ),
+                ),
+                (
+                    'groups',
+                    models.ManyToManyField(
+                        blank=True,
+                        help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.',
+                        related_name='user_set',
+                        related_query_name='user',
+                        to='auth.Group',
+                        verbose_name='groups',
+                    ),
+                ),
+                (
+                    'user_permissions',
+                    models.ManyToManyField(
+                        blank=True,
+                        help_text='Specific permissions for this user.',
+                        related_name='user_set',
+                        related_query_name='user',
+                        to='auth.Permission',
+                        verbose_name='user permissions',
+                    ),
+                ),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='Product',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                (
+                    'cqrs_revision',
+                    models.IntegerField(
+                        default=0,
+                        help_text="This field must be incremented on any model update. It's used to for CQRS sync.",
+                    ),
+                ),
+                (
+                    'cqrs_updated',
+                    models.DateTimeField(
+                        auto_now=True,
+                        help_text="This field must be incremented on every model update. It's used to for CQRS sync.",
+                    ),
+                ),
+                ('name', models.CharField(max_length=50)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='ProductType',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                ('name', models.CharField(max_length=50)),
+            ],
+        ),
+        migrations.CreateModel(
+            name='Purchase',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                (
+                    'cqrs_revision',
+                    models.IntegerField(
+                        default=0,
+                        help_text="This field must be incremented on any model update. It's used to for CQRS sync.",
+                    ),
+                ),
+                (
+                    'cqrs_updated',
+                    models.DateTimeField(
+                        auto_now=True,
+                        help_text="This field must be incremented on every model update. It's used to for CQRS sync.",
+                    ),
+                ),
+                ('action_time', models.DateTimeField(auto_now_add=True)),
+                (
+                    'product',
+                    models.ForeignKey(
+                        on_delete=django.db.models.deletion.CASCADE,
+                        to='app.product',
+                    ),
+                ),
+                (
+                    'user',
+                    models.ForeignKey(
+                        on_delete=django.db.models.deletion.CASCADE,
+                        to=settings.AUTH_USER_MODEL,
+                    ),
+                ),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.AddField(
+            model_name='product',
+            name='product_type',
+            field=models.ForeignKey(
+                on_delete=django.db.models.deletion.CASCADE,
+                to='app.producttype',
+            ),
+        ),
+    ]

+ 39 - 0
examples/demo_project/master_service/app/migrations/0002_fixtures.py

@@ -0,0 +1,39 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+from django.db import migrations
+
+
+def create_users(apps, schema_editor):
+    User = apps.get_model('app', 'User')
+    to_create = []
+    for username in ('Mal', 'Zoe', 'Wash', 'Inara', 'Jayne', 'Kaylee', 'Simon', 'River'):
+        to_create.append(User(username=username))
+    User.objects.bulk_create(to_create)
+
+
+def create_products(apps, schema_editor):
+    ProductType = apps.get_model('app', 'ProductType')
+    Product = apps.get_model('app', 'Product')
+
+    products = {
+        'food': ['apple', 'meat', 'banana'],
+        'weapon': ['blaster', 'gun', 'knife'],
+        'starships': ['Serenity'],
+    }
+    to_create = []
+    for key, items in products.items():
+        product_type = ProductType.objects.create(name=key)
+        for product in items:
+            to_create.append(Product(name=product, product_type=product_type))
+    Product.objects.bulk_create(to_create)
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ('app', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.RunPython(create_users, migrations.RunPython.noop),
+        migrations.RunPython(create_products, migrations.RunPython.noop),
+    ]

+ 0 - 0
examples/demo_project/master_service/app/migrations/__init__.py


+ 39 - 0
examples/demo_project/master_service/app/models.py

@@ -0,0 +1,39 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from django.contrib.auth.models import AbstractUser
+from django.db import models
+
+from dj_cqrs.mixins import MasterMixin
+
+
+class User(MasterMixin, AbstractUser):
+    CQRS_ID = 'user'
+    CQRS_PRODUCE = True
+
+
+class ProductType(models.Model):
+    name = models.CharField(max_length=50)
+
+
+class Product(MasterMixin, models.Model):
+    CQRS_ID = 'product'
+    CQRS_SERIALIZER = 'app.serializers.ProductSerializer'
+
+    name = models.CharField(max_length=50)
+    product_type = models.ForeignKey(ProductType, on_delete=models.CASCADE)
+
+    @classmethod
+    def relate_cqrs_serialization(cls, queryset):
+        return queryset.select_related('product_type')
+
+
+class Purchase(MasterMixin, models.Model):
+    CQRS_ID = 'purchase'
+    CQRS_SERIALIZER = 'app.serializers.PurchaseSerializer'
+
+    user = models.ForeignKey(User, on_delete=models.CASCADE)
+    product = models.ForeignKey(Product, on_delete=models.CASCADE)
+    action_time = models.DateTimeField(auto_now_add=True)
+
+    @classmethod
+    def relate_cqrs_serialization(cls, queryset):
+        return queryset.select_related('product', 'product__product_type')

+ 36 - 0
examples/demo_project/master_service/app/serializers.py

@@ -0,0 +1,36 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from rest_framework import serializers
+
+from app.models import Purchase
+
+
+class ProductSerializer:
+    """
+    Simple serializer
+    """
+
+    def __init__(self, instance):
+        self.instance = instance
+
+    @property
+    def data(self):
+        return {
+            'id': self.instance.id,
+            'name': self.instance.name,
+            'product_type': {
+                'id': self.instance.product_type.id,
+                'name': self.instance.product_type.name,
+            },
+        }
+
+
+class PurchaseSerializer(serializers.ModelSerializer):
+    """
+    Django REST Framework serializers are compatible
+    """
+
+    product_name = serializers.CharField(source='product.name')
+
+    class Meta:
+        model = Purchase
+        fields = ('id', 'user_id', 'product_name', 'action_time')

+ 132 - 0
examples/demo_project/master_service/app/settings.py

@@ -0,0 +1,132 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from pathlib import Path
+
+
+# Build paths inside the project like this: BASE_DIR / 'subdir'.
+BASE_DIR = Path(__file__).resolve().parent.parent
+
+
+# Quick-start development settings - unsuitable for production
+# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = 'django-insecure-21-94qjpg#ol-3e1a!*6t$i0)e!v%+1cnb^%pe%!l%1f*6$9jq'
+
+# SECURITY WARNING: don't run with debug turned on in production!
+DEBUG = True
+
+ALLOWED_HOSTS = ['*']
+
+AUTH_USER_MODEL = 'app.User'
+
+
+# Application definition
+
+INSTALLED_APPS = [
+    'django.contrib.admin',
+    'django.contrib.auth',
+    'django.contrib.contenttypes',
+    'django.contrib.sessions',
+    'django.contrib.messages',
+    'django.contrib.staticfiles',
+    'dj_cqrs',
+    'app',
+]
+
+MIDDLEWARE = [
+    'django.middleware.security.SecurityMiddleware',
+    'django.contrib.sessions.middleware.SessionMiddleware',
+    'django.middleware.common.CommonMiddleware',
+    'django.middleware.csrf.CsrfViewMiddleware',
+    'django.contrib.auth.middleware.AuthenticationMiddleware',
+    'django.contrib.messages.middleware.MessageMiddleware',
+    'django.middleware.clickjacking.XFrameOptionsMiddleware',
+]
+
+ROOT_URLCONF = 'app.urls'
+
+TEMPLATES = [
+    {
+        'BACKEND': 'django.template.backends.django.DjangoTemplates',
+        'DIRS': ['templates'],
+        'APP_DIRS': True,
+        'OPTIONS': {
+            'context_processors': [
+                'django.template.context_processors.debug',
+                'django.template.context_processors.request',
+                'django.contrib.auth.context_processors.auth',
+                'django.contrib.messages.context_processors.messages',
+            ],
+        },
+    },
+]
+
+WSGI_APPLICATION = 'app.wsgi.application'
+
+
+# Database
+# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
+
+DATABASES = {
+    'default': {
+        'ENGINE': 'django.db.backends.postgresql',
+        'NAME': 'master_service',
+        'USER': 'master_service',
+        'PASSWORD': 'password',
+        'HOST': 'db_pgsql',
+        'PORT': '5432',
+        'CONN_MAX_AGE': 0,
+    },
+}
+
+
+# Password validation
+# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
+
+AUTH_PASSWORD_VALIDATORS = [
+    {
+        'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
+    },
+    {
+        'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
+    },
+    {
+        'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
+    },
+    {
+        'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
+    },
+]
+
+
+# Internationalization
+# https://docs.djangoproject.com/en/3.2/topics/i18n/
+
+LANGUAGE_CODE = 'en-us'
+
+TIME_ZONE = 'UTC'
+
+USE_I18N = True
+
+USE_L10N = True
+
+USE_TZ = True
+
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/3.2/howto/static-files/
+
+STATIC_URL = '/static/'
+
+# Default primary key field type
+# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
+
+DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
+
+CQRS = {
+    'transport': 'dj_cqrs.transport.rabbit_mq.RabbitMQTransport',
+    'host': 'rabbitmq',
+    'port': '5672',
+    'user': 'rabbitmq',
+    'password': 'password',
+}

+ 25 - 0
examples/demo_project/master_service/app/urls.py

@@ -0,0 +1,25 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from django.urls import path
+
+from app.views import (
+    main_view,
+    product_create_view,
+    product_delete_view,
+    purchase_create_view,
+    purchase_delete_view,
+    user_create_view,
+    user_delete_view,
+    user_update_view,
+)
+
+
+urlpatterns = [
+    path('', main_view),
+    path('users/', user_create_view),
+    path('users/<int:pk>/update/', user_update_view),
+    path('users/<int:pk>/delete/', user_delete_view),
+    path('products/', product_create_view),
+    path('products/<int:pk>/delete/', product_delete_view),
+    path('purchases/', purchase_create_view),
+    path('purchases/<int:pk>/delete/', purchase_delete_view),
+]

+ 95 - 0
examples/demo_project/master_service/app/views.py

@@ -0,0 +1,95 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from django.http import HttpResponseNotAllowed
+from django.shortcuts import get_object_or_404, redirect, render
+from django.views.decorators.http import require_http_methods
+
+from app.models import (
+    Product,
+    ProductType,
+    Purchase,
+    User,
+)
+
+
+def _render_page(request, **kwargs):
+    return render(
+        request,
+        'main.html',
+        {
+            'users': User.objects.order_by('pk'),
+            'product_types': ProductType.objects.order_by('pk'),
+            'products': Product.objects.order_by('pk'),
+            'purchases': Purchase.objects.order_by('pk'),
+            **kwargs,
+        },
+    )
+
+
+def render_main_page_if_get(f):
+    def wrap(request, *args, **kwargs):
+        if request.method == 'GET':
+            return _render_page(request)
+        if request.method != 'POST':
+            return HttpResponseNotAllowed(['GET', 'POST'])
+        return f(request, *args, **kwargs)
+
+    return wrap
+
+
+@require_http_methods(['GET'])
+def main_view(request):
+    return _render_page(request)
+
+
+@render_main_page_if_get
+def user_create_view(request):
+    username = request.POST.get('username')
+    if User.objects.filter(username=username).exists():
+        return _render_page(request, user_error='Username must be unique')
+    User.objects.create(username=request.POST.get('username'))
+    return redirect('/')
+
+
+@render_main_page_if_get
+def user_update_view(request, pk):
+    user = get_object_or_404(User, pk=pk)
+    user.username += '1'
+    user.save()
+    return redirect('/')
+
+
+@render_main_page_if_get
+def user_delete_view(request, pk):
+    user = get_object_or_404(User, pk=pk)
+    user.delete()
+    return redirect('/')
+
+
+@render_main_page_if_get
+def product_create_view(request):
+    product_type_id = request.POST.get('product_type')
+    name = request.POST.get('name')
+    Product.objects.create(product_type_id=product_type_id, name=name)
+    return redirect('/')
+
+
+@render_main_page_if_get
+def product_delete_view(request, pk):
+    product = get_object_or_404(Product, pk=pk)
+    product.delete()
+    return redirect('/')
+
+
+@render_main_page_if_get
+def purchase_create_view(request):
+    user_id = request.POST.get('user')
+    product_id = request.POST.get('product')
+    Purchase.objects.create(user_id=user_id, product_id=product_id)
+    return redirect('/')
+
+
+@render_main_page_if_get
+def purchase_delete_view(request, pk):
+    purchase = get_object_or_404(Purchase, pk=pk)
+    purchase.delete()
+    return redirect('/')

+ 18 - 0
examples/demo_project/master_service/app/wsgi.py

@@ -0,0 +1,18 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+"""
+WSGI config for master_service project.
+
+It exposes the WSGI callable as a module-level variable named ``application``.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
+"""
+
+import os
+
+from django.core.wsgi import get_wsgi_application
+
+
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'master_service.settings')
+
+application = get_wsgi_application()

+ 22 - 0
examples/demo_project/master_service/manage.py

@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+"""Django's command-line utility for administrative tasks."""
+import os
+import sys
+
+
+def main():
+    """Run administrative tasks."""
+    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+    try:
+        from django.core.management import execute_from_command_line
+    except ImportError as exc:
+        raise ImportError(
+            "Couldn't import Django. Are you sure it's installed and "
+            'available on your PYTHONPATH environment variable? Did you '
+            'forget to activate a virtual environment?',
+        ) from exc
+    execute_from_command_line(sys.argv)
+
+
+if __name__ == '__main__':
+    main()

+ 4 - 0
examples/demo_project/master_service/requirements.txt

@@ -0,0 +1,4 @@
+django==3.2.20
+psycopg2==2.9.1
+djangorestframework==3.12.4
+django-cqrs

+ 193 - 0
examples/demo_project/master_service/templates/main.html

@@ -0,0 +1,193 @@
+<html>
+    <head>
+        <title>Master service</title>
+        <style>
+            html, body {
+                color: #333;
+                padding: 20px;
+            }
+            .block {
+                flex-grow: 1;
+                max-width: 100%;
+                padding-right: 40px;
+            }
+            .container {
+                display: flex;
+                justify-content: space-between;
+                max-width: 1200px;
+                flex-wrap: wrap;
+            }
+            .table {
+                padding: 20px;
+                border: 1px solid #dadada;
+            }
+            .table__header,
+            .table__row {
+                display: flex;
+            }
+            .table__header {
+                padding-bottom: 10px;
+                font-weight: bold;
+            }
+            .form-error {
+                color: #FF3333;
+            }
+            .table__col-narrow {
+                width: 100px;
+            }
+            .table__col-wide {
+                width: 200px;
+            }
+            .table__col-actions {
+                width: 120px;
+                display: flex;
+                justify-content: space-between;
+            }
+            .table__col-narrow:last-child,
+            .table__col-wide:last-child {
+                width: auto;
+            }
+            .form__input {
+                width: 200px;
+                padding: 5px;
+            }
+            .form__submit {
+                padding: 5px;
+            }
+            .form__select {
+                padding: 5px;
+                min-width: 200px;
+            }
+        </style>
+    </head>
+    <body>
+        <h1>CQRS Demo: master service</h1>
+        <div class="container">
+            <div class="block">
+                <h2>Users:</h2>
+                <div class="table">
+                    <div class="table__header">
+                        <div class="table__col-narrow">ID</div>
+                        <div class="table__col-wide">Username</div>
+                        <div class="table__col-wide">Actions</div>
+                    </div>
+                    <div class="table__body">
+                        {% for user in users %}
+                            <div class="table__row">
+                                <div class="table__col-narrow">{{ user.id }}</div>
+                                <div class="table__col-wide">{{ user.username }}</div>
+                                <div class="table__col-actions">
+                                    <form action="/users/{{ user.id }}/update/" method="POST">
+                                        <input type="submit" value="Update" />
+                                        {% csrf_token %}
+                                    </form>
+                                    <form action="/users/{{ user.id }}/delete/" method="POST">
+                                        <input type="submit" value="Delete" />
+                                        {% csrf_token %}
+                                    </form>
+                                </div>
+                            </div>
+                        {% endfor %}
+                    </div>
+                </div>
+                <br />
+                <form method="POST" action="/users/">
+                    <input type="text" name="username" placeholder="username" class="form__input" required />
+                    <input type="submit" value="Create" class="form__submit" />
+                    {% csrf_token %}
+                </form>
+                <div class="form-error">
+                    {{ user_error }}
+                </div>
+            </div>
+            <div class="block">
+                <h2>Products:</h2>
+                <div class="table">
+                    <div class="table__header">
+                        <div class="table__col-narrow">ID</div>
+                        <div class="table__col-wide">Type</div>
+                        <div class="table__col-wide">Name</div>
+                        <div class="table__col-wide">Actions</div>
+                    </div>
+                    <div class="table__body">
+                        {% for product in products %}
+                            <div class="table__row">
+                                <div class="table__col-narrow">{{ product.id }}</div>
+                                <div class="table__col-wide">{{ product.product_type.name }}</div>
+                                <div class="table__col-wide">{{ product.name }}</div>
+                                <div class="table__col-wide">
+                                    <form action="/products/{{ product.id }}/delete/" method="POST">
+                                        <input type="submit" value="Delete" />
+                                        {% csrf_token %}
+                                    </form>
+                                </div>
+                            </div>
+                        {% endfor %}
+                    </div>
+                </div>
+                <br />
+                <form method="POST" action="/products/">
+                    <select class="form__select" name="product_type" required>
+                        <option value="" disabled selected>Select product type</option>
+                        {% for product_type in product_types %}
+                            <option value="{{ product_type.id }}">
+                                {{ product_type.id }} {{ product_type.name }}
+                            </option>
+                        {% endfor %}
+                    </select>
+                    <input type="text" name="name" placeholder="name" class="form__input" required />
+                    <input type="submit" value="Create" class="form__submit" />
+                    {% csrf_token %}
+                </form>
+            </div>
+            <div class="block">
+                <h2>User purchases:</h2>
+                <div class="table">
+                    <div class="table__header">
+                        <div class="table__col-narrow">ID</div>
+                        <div class="table__col-narrow">User ID</div>
+                        <div class="table__col-wide">Product</div>
+                        <div class="table__col-wide">Time</div>
+                        <div class="table__col-wide">Actions</div>
+                    </div>
+                    <div class="table__body">
+                        {% for purchase in purchases %}
+                            <div class="table__row">
+                                <div class="table__col-narrow">{{ purchase.id }}</div>
+                                <div class="table__col-narrow">{{ purchase.user_id }}</div>
+                                <div class="table__col-wide">{{ purchase.product.name }}</div>
+                                <div class="table__col-wide">{{ purchase.action_time }}</div>
+                                <div class="table__col-wide">
+                                    <form action="/purchases/{{ purchase.id }}/delete/" method="POST">
+                                        <input type="submit" value="Delete" />
+                                        {% csrf_token %}
+                                    </form>
+                                </div>
+                            </div>
+                        {% endfor %}
+                    </div>
+                </div>
+                <br />
+                <form method="POST" action="/purchases/">
+                    <select class="form__select" name="product" required>
+                        <option value="" disabled selected>Select product</option>
+                        {% for product in products %}
+                            <option value="{{ product.id }}">{{ product.id }} {{ product.name }}</option>
+                        {% endfor %}
+                    </select>
+                    <select class="form__select" name="user" required>
+                        <option value="" disabled selected>Select user</option>
+                        {% for user in users %}
+                            <option value="{{ user.id }}">{{ user.id }} {{ user.username }}</option>
+                        {% endfor %}
+                    </select>
+                    <input type="submit" value="Buy" class="form__submit" />
+                    {% csrf_token %}
+                </form>
+                <div class="form-error">
+                    {{ purchase_error }}
+                </div>
+            </div>
+        </div>
+    </body>
+</html>

+ 16 - 0
examples/demo_project/replica_service/Dockerfile

@@ -0,0 +1,16 @@
+FROM python:3
+
+WORKDIR /app
+
+COPY requirements.txt .
+
+RUN python -mpip install -r requirements.txt
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+COPY . .
+
+CMD ./manage.py runserver 0.0.0.0:8000

+ 0 - 0
examples/demo_project/replica_service/__init__.py


+ 0 - 0
examples/demo_project/replica_service/app/__init__.py


+ 17 - 0
examples/demo_project/replica_service/app/asgi.py

@@ -0,0 +1,17 @@
+"""
+ASGI config for replica_service project.
+
+It exposes the ASGI callable as a module-level variable named ``application``.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
+"""
+
+import os
+
+from django.core.asgi import get_asgi_application
+
+
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+
+application = get_asgi_application()

+ 158 - 0
examples/demo_project/replica_service/app/migrations/0001_initial.py

@@ -0,0 +1,158 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+
+import django.contrib.auth.validators
+import django.utils.timezone
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+    initial = True
+
+    dependencies = [
+        ('auth', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='ProductType',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                ('name', models.CharField(max_length=50)),
+            ],
+        ),
+        migrations.CreateModel(
+            name='Product',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                ('cqrs_revision', models.IntegerField()),
+                ('cqrs_updated', models.DateTimeField()),
+                ('name', models.CharField(max_length=100)),
+                (
+                    'product_type',
+                    models.ForeignKey(
+                        on_delete=django.db.models.deletion.CASCADE,
+                        to='app.producttype',
+                    ),
+                ),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='User',
+            fields=[
+                (
+                    'id',
+                    models.BigAutoField(
+                        auto_created=True,
+                        primary_key=True,
+                        serialize=False,
+                        verbose_name='ID',
+                    ),
+                ),
+                ('password', models.CharField(max_length=128, verbose_name='password')),
+                (
+                    'last_login',
+                    models.DateTimeField(blank=True, null=True, verbose_name='last login'),
+                ),
+                (
+                    'is_superuser',
+                    models.BooleanField(
+                        default=False,
+                        help_text='Designates that this user has all permissions without explicitly assigning them.',
+                        verbose_name='superuser status',
+                    ),
+                ),
+                (
+                    'username',
+                    models.CharField(
+                        error_messages={'unique': 'A user with that username already exists.'},
+                        help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
+                        max_length=150,
+                        unique=True,
+                        validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
+                        verbose_name='username',
+                    ),
+                ),
+                (
+                    'first_name',
+                    models.CharField(blank=True, max_length=150, verbose_name='first name'),
+                ),
+                (
+                    'last_name',
+                    models.CharField(blank=True, max_length=150, verbose_name='last name'),
+                ),
+                (
+                    'email',
+                    models.EmailField(blank=True, max_length=254, verbose_name='email address'),
+                ),
+                (
+                    'is_staff',
+                    models.BooleanField(
+                        default=False,
+                        help_text='Designates whether the user can log into this admin site.',
+                        verbose_name='staff status',
+                    ),
+                ),
+                (
+                    'is_active',
+                    models.BooleanField(
+                        default=True,
+                        help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.',
+                        verbose_name='active',
+                    ),
+                ),
+                (
+                    'date_joined',
+                    models.DateTimeField(
+                        default=django.utils.timezone.now,
+                        verbose_name='date joined',
+                    ),
+                ),
+                ('cqrs_revision', models.IntegerField()),
+                ('cqrs_updated', models.DateTimeField()),
+                (
+                    'groups',
+                    models.ManyToManyField(
+                        blank=True,
+                        help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.',
+                        related_name='user_set',
+                        related_query_name='user',
+                        to='auth.Group',
+                        verbose_name='groups',
+                    ),
+                ),
+                (
+                    'user_permissions',
+                    models.ManyToManyField(
+                        blank=True,
+                        help_text='Specific permissions for this user.',
+                        related_name='user_set',
+                        related_query_name='user',
+                        to='auth.Permission',
+                        verbose_name='user permissions',
+                    ),
+                ),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+    ]

+ 0 - 0
examples/demo_project/replica_service/app/migrations/__init__.py


+ 81 - 0
examples/demo_project/replica_service/app/models.py

@@ -0,0 +1,81 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from django.contrib.auth.models import AbstractUser
+from django.core.cache import cache
+from django.db import models
+
+from dj_cqrs.mixins import ReplicaMixin
+
+
+class User(ReplicaMixin, AbstractUser):
+    """
+    Simple replica which sync all fields
+    """
+
+    CQRS_ID = 'user'
+
+
+class ProductType(models.Model):
+    name = models.CharField(max_length=50)
+
+
+class Product(ReplicaMixin, models.Model):
+    """
+    Replica with custom serialization and relation control
+    """
+
+    CQRS_ID = 'product'
+    CQRS_CUSTOM_SERIALIZATION = True
+
+    name = models.CharField(max_length=100)
+    product_type = models.ForeignKey(ProductType, on_delete=models.CASCADE)
+
+    @staticmethod
+    def _handle_product_type(mapped_data):
+        product_type, _ = ProductType.objects.update_or_create(
+            id=mapped_data['id'],
+            defaults=mapped_data,
+        )
+        return product_type
+
+    @classmethod
+    def cqrs_create(cls, sync, mapped_data, previous_data=None, meta=None):
+        product_type = cls._handle_product_type(mapped_data['product_type'])
+        return Product.objects.create(
+            id=mapped_data['id'],
+            product_type_id=product_type.id,
+            name=mapped_data['name'],
+            cqrs_revision=mapped_data['cqrs_revision'],
+            cqrs_updated=mapped_data['cqrs_updated'],
+        )
+
+    def cqrs_update(self, sync, mapped_data, previous_data=None, meta=None):
+        product_type = self._handle_product_type(mapped_data['product_type'])
+        self.name = mapped_data['name']
+        self.product_type_id = product_type.id
+        self.save()
+        return self
+
+
+class Purchase(ReplicaMixin):
+    """
+    Replica model with custom storage mechanism.
+
+    To simplify we use redis cache storage for this demo, but any SQL and NoSQL storage can
+    be used.
+    """
+
+    CQRS_ID = 'purchase'
+    CQRS_CUSTOM_SERIALIZATION = True
+
+    class Meta:
+        abstract = True
+
+    @classmethod
+    def cqrs_save(cls, master_data, previous_data=None, sync=False, meta=None):
+        cache.set('purchase_' + str(master_data['id']), master_data)
+        return True
+
+    @classmethod
+    def cqrs_delete(cls, master_data, meta=None):
+        cache.delete('purchase_' + str(master_data['id']))
+        return True

+ 143 - 0
examples/demo_project/replica_service/app/settings.py

@@ -0,0 +1,143 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from pathlib import Path
+
+
+# Build paths inside the project like this: BASE_DIR / 'subdir'.
+BASE_DIR = Path(__file__).resolve().parent.parent
+
+
+# Quick-start development settings - unsuitable for production
+# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = 'django-insecure-4acv=f5e@@hsxe42)c2ldt&!cwej5@&-h+qt_o21+&&6ynr7_a'
+
+# SECURITY WARNING: don't run with debug turned on in production!
+DEBUG = True
+
+ALLOWED_HOSTS = ['*']
+
+AUTH_USER_MODEL = 'app.User'
+
+
+# Application definition
+
+INSTALLED_APPS = [
+    'django.contrib.admin',
+    'django.contrib.auth',
+    'django.contrib.contenttypes',
+    'django.contrib.sessions',
+    'django.contrib.messages',
+    'django.contrib.staticfiles',
+    'dj_cqrs',
+    'app',
+]
+
+MIDDLEWARE = [
+    'django.middleware.security.SecurityMiddleware',
+    'django.contrib.sessions.middleware.SessionMiddleware',
+    'django.middleware.common.CommonMiddleware',
+    'django.middleware.csrf.CsrfViewMiddleware',
+    'django.contrib.auth.middleware.AuthenticationMiddleware',
+    'django.contrib.messages.middleware.MessageMiddleware',
+    'django.middleware.clickjacking.XFrameOptionsMiddleware',
+]
+
+ROOT_URLCONF = 'app.urls'
+
+TEMPLATES = [
+    {
+        'BACKEND': 'django.template.backends.django.DjangoTemplates',
+        'DIRS': ['templates'],
+        'APP_DIRS': True,
+        'OPTIONS': {
+            'context_processors': [
+                'django.template.context_processors.debug',
+                'django.template.context_processors.request',
+                'django.contrib.auth.context_processors.auth',
+                'django.contrib.messages.context_processors.messages',
+            ],
+        },
+    },
+]
+
+WSGI_APPLICATION = 'app.wsgi.application'
+
+
+# Database
+# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
+
+DATABASES = {
+    'default': {
+        'ENGINE': 'django.db.backends.mysql',
+        'NAME': 'replica_service',
+        'USER': 'replica_service',
+        'PASSWORD': 'password',
+        'HOST': 'db_mysql',
+        'PORT': '3306',
+    },
+}
+
+
+CACHES = {
+    'default': {
+        'BACKEND': 'django_redis.cache.RedisCache',
+        'LOCATION': 'redis://redis:6379/0',
+        'OPTIONS': {
+            'CLIENT_CLASS': 'django_redis.client.DefaultClient',
+        },
+    },
+}
+
+
+# Password validation
+# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
+
+AUTH_PASSWORD_VALIDATORS = [
+    {
+        'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
+    },
+    {
+        'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
+    },
+    {
+        'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
+    },
+    {
+        'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
+    },
+]
+
+
+# Internationalization
+# https://docs.djangoproject.com/en/3.2/topics/i18n/
+
+LANGUAGE_CODE = 'en-us'
+
+TIME_ZONE = 'UTC'
+
+USE_I18N = True
+
+USE_L10N = True
+
+USE_TZ = True
+
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/3.2/howto/static-files/
+
+STATIC_URL = '/static/'
+
+# Default primary key field type
+# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
+
+DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
+
+CQRS = {
+    'transport': 'dj_cqrs.transport.rabbit_mq.RabbitMQTransport',
+    'queue': 'blog_replica',
+    'host': 'rabbitmq',
+    'port': '5672',
+    'user': 'rabbitmq',
+    'password': 'password',
+}

+ 9 - 0
examples/demo_project/replica_service/app/urls.py

@@ -0,0 +1,9 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from django.urls import path
+
+from app.views import main_page_view
+
+
+urlpatterns = [
+    path('', main_page_view),
+]

+ 17 - 0
examples/demo_project/replica_service/app/views.py

@@ -0,0 +1,17 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.
+from django.core.cache import cache
+from django.shortcuts import render
+
+from app.models import Product, User
+
+
+def main_page_view(request):
+    return render(
+        request,
+        'main.html',
+        {
+            'users': User.objects.order_by('pk'),
+            'products': Product.objects.select_related('product_type').order_by('pk'),
+            'purchases': [cache.get(key) for key in cache.keys('purchase_*')],
+        },
+    )

+ 17 - 0
examples/demo_project/replica_service/app/wsgi.py

@@ -0,0 +1,17 @@
+"""
+WSGI config for replica_service project.
+
+It exposes the WSGI callable as a module-level variable named ``application``.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
+"""
+
+import os
+
+from django.core.wsgi import get_wsgi_application
+
+
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+
+application = get_wsgi_application()

+ 22 - 0
examples/demo_project/replica_service/manage.py

@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+"""Django's command-line utility for administrative tasks."""
+import os
+import sys
+
+
+def main():
+    """Run administrative tasks."""
+    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+    try:
+        from django.core.management import execute_from_command_line
+    except ImportError as exc:
+        raise ImportError(
+            "Couldn't import Django. Are you sure it's installed and "
+            'available on your PYTHONPATH environment variable? Did you '
+            'forget to activate a virtual environment?',
+        ) from exc
+    execute_from_command_line(sys.argv)
+
+
+if __name__ == '__main__':
+    main()

+ 5 - 0
examples/demo_project/replica_service/requirements.txt

@@ -0,0 +1,5 @@
+django==3.2.20
+djangorestframework==3.12.4
+mysqlclient
+django-redis
+django-cqrs

+ 116 - 0
examples/demo_project/replica_service/templates/main.html

@@ -0,0 +1,116 @@
+<html>
+    <head>
+        <title>Replica service</title>
+        <style>
+            html, body {
+                color: #333;
+                padding: 20px;
+            }
+            .table {
+                padding: 20px;
+                border: 1px solid #dadada;
+            }
+            .table__header,
+            .table__row {
+                display: flex;
+                padding: 2px 0;
+            }
+            .table__header {
+                padding-bottom: 10px;
+                font-weight: bold;
+            }
+            .container {
+                display: flex;
+                justify-content: space-between;
+                max-width: 1200px;
+                flex-wrap: wrap;
+            }
+            .block {
+                flex-grow: 1;
+                max-width: 100%;
+                padding-right: 40px;
+            }
+            .table__col-narrow {
+                width: 100px;
+            }
+            .table__col-wide {
+                width: 200px;
+            }
+            .table__col-narrow:last-child,
+            .table__col-wide:last-child {
+                width: auto;
+            }
+        </style>
+    </head>
+    <body>
+        <h1>CQRS Demo: replica service</h1>
+        <div class="container">
+            <div class="block">
+                <h2>Users:</h2>
+                <p>
+                    Model with default synchroniaztion mechanish
+                </p>
+                <div class="table">
+                    <div class="table__header">
+                        <div class="table__col-narrow">ID</div>
+                        <div class="table__col-wide">Username</div>
+                    </div>
+                    <div class="table__body">
+                        {% for user in users %}
+                            <div class="table__row">
+                                <div class="table__col-narrow">{{ user.id }}</div>
+                                <div class="table__col-wide">{{ user.username }}</div>
+                            </div>
+                        {% endfor %}
+                    </div>
+                </div>
+            </div>
+            <div class="block">
+                <h2>Products:</h2>
+                <p>
+                    Model with custom serializer and relation control.
+                </p>
+                <div class="table">
+                    <div class="table__header">
+                        <div class="table__col-narrow">ID</div>
+                        <div class="table__col-wide">Type</div>
+                        <div class="table__col-wide">Name</div>
+                    </div>
+                    <div class="table__body">
+                        {% for product in products %}
+                            <div class="table__row">
+                                <div class="table__col-narrow">{{ product.id }}</div>
+                                <div class="table__col-wide">{{ product.product_type.name }}</div>
+                                <div class="table__col-wide">{{ product.name }}</div>
+                            </div>
+                        {% endfor %}
+                    </div>
+                </div>
+            </div>
+            <div class="block">
+                <h2>Purchases:</h2>
+                <p>
+                    Custom storage (redis cache)
+                </p>
+                <div class="table">
+                    <div class="table__header">
+                        <div class="table__col-narrow">ID</div>
+                        <div class="table__col-narrow">User ID</div>
+                        <div class="table__col-wide">Product</div>
+                        <div class="table__col-wide">Time</div>
+                    </div>
+                    <div class="table__body">
+                        {% for purchase in purchases %}
+                            <div class="table__row">
+                                <div class="table__col-narrow">{{ purchase.id }}</div>
+                                <div class="table__col-narrow">{{ purchase.user_id }}</div>
+                                <div class="table__col-wide">{{ purchase.product_name }}</div>
+                                <div class="table__col-wide">{{ purchase.action_time }}</div>
+                            </div>
+                        {% endfor %}
+                    </div>
+                </div>
+            </div>
+        </div>
+    </body>
+</html>

+ 2 - 0
integration_tests/.dockerignore

@@ -0,0 +1,2 @@
+**/__pycache__
+**/*.pyc

+ 13 - 0
integration_tests/Dockerfile

@@ -0,0 +1,13 @@
+FROM python:3.10
+
+WORKDIR /app
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+COPY . .
+
+RUN pip install -r requirements/dev.txt -r requirements/test.txt &&\
+    pip install mysqlclient psycopg2

+ 26 - 0
integration_tests/Dockerfile.Master

@@ -0,0 +1,26 @@
+FROM python:3.10
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+
+ENV PYTHONUNBUFFERED 1
+ENV PYTHONPATH /master
+
+RUN mkdir /master
+COPY ./requirements/ /master/requirements
+#COPY pyproject.toml pyproject.toml
+
+RUN pip install -r /master/requirements/dev.txt -r /master/requirements/test.txt && pip install psycopg2-binary redis
+#RUN pip install poetry
+#RUN poetry config virtualenvs.create false
+#RUN poetry install && pip install psycopg2-binary redis
+
+COPY . /master/
+ADD integration_tests/setup.cfg /master/
+ADD integration_tests/run_integration_tests.sh /master/
+RUN chmod +x /master/run_integration_tests.sh
+
+WORKDIR /master/

+ 26 - 0
integration_tests/Dockerfile.MasterV1

@@ -0,0 +1,26 @@
+FROM python:3.10
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+
+ENV PYTHONUNBUFFERED 1
+ENV PYTHONPATH /master
+
+RUN mkdir /master
+COPY ./requirements/ /master/requirements
+#COPY pyproject.toml pyproject.toml
+
+RUN pip install -r /master/requirements/dev.txt -r /master/requirements/test.txt && pip install psycopg2-binary redis django-cqrs==1.3.1
+#RUN pip install poetry
+#RUN poetry config virtualenvs.create false
+#RUN poetry install && pip install psycopg2-binary redis django-cqrs==1.3.1
+
+COPY . /master/
+ADD integration_tests/setup.cfg /master/
+ADD integration_tests/run_integration_tests.sh /master/
+RUN chmod +x /master/run_integration_tests.sh
+
+WORKDIR /master/

+ 24 - 0
integration_tests/Dockerfile.Replica

@@ -0,0 +1,24 @@
+FROM python:3.10
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+
+ENV PYTHONUNBUFFERED 1
+ENV PYTHONPATH /replica
+
+RUN mkdir /replica
+COPY ./requirements/ /replica/requirements
+#COPY pyproject.toml pyproject.toml
+RUN pip install -r /replica/requirements/dev.txt -r /replica/requirements/test.txt && pip install psycopg2-binary redis
+#RUN pip install poetry
+#RUN poetry install
+#RUN poetry config virtualenvs.create false
+#RUN pip install psycopg2-binary redis
+
+COPY . /replica/
+ADD integration_tests/manage.py /replica/
+
+WORKDIR /replica/

+ 23 - 0
integration_tests/Dockerfile.ReplicaV1

@@ -0,0 +1,23 @@
+FROM python:3.10
+
+ENV DOCKERIZE_VERSION v0.6.1
+RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
+    && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
+
+
+ENV PYTHONUNBUFFERED 1
+ENV PYTHONPATH /replica
+
+RUN mkdir /replica
+COPY ./requirements/ /replica/requirements
+#COPY pyproject.toml pyproject.toml
+RUN pip install -r /replica/requirements/dev.txt -r /replica/requirements/test.txt && pip install psycopg2-binary redis django-cqrs==1.3.1
+#RUN pip install poetry
+#RUN poetry config virtualenvs.create false
+#RUN poetry install && pip install psycopg2-binary redis django-cqrs==1.3.1
+
+COPY . /replica/
+ADD integration_tests/manage.py /replica/
+
+WORKDIR /replica/

+ 42 - 0
integration_tests/Makefile

@@ -0,0 +1,42 @@
+.PHONY: build test
+
+.DEFAULT_GOAL := pika
+
+build: 
+	docker-compose build
+
+build_master_v1:
+	docker-compose -f docker-compose.yml -f masterV1.yml build
+
+build_replica_v1:
+	docker-compose -f docker-compose.yml -f replicaV1.yml build
+
+pika: build
+	@echo "Run PIKA integration tests..."
+	docker-compose run master
+	@echo "Stopping running containers..."
+	docker-compose down --remove-orphans
+	@echo "Done!"
+
+kombu: build
+	@echo "Run KOMBU integration tests..."
+	docker-compose -f docker-compose.yml -f kombu.yml run master
+	@echo "Stopping running containers..."
+	docker-compose -f docker-compose.yml -f kombu.yml down --remove-orphans
+	@echo "Done!"
+
+master_v1: build_master_v1
+	@echo "Run regression tests Master v1.3.1..."
+	docker-compose -f docker-compose.yml -f masterV1.yml run master
+	@echo "Stopping running containers..."
+	docker-compose -f docker-compose.yml -f masterV1.yml down --remove-orphans
+	@echo "Done!"
+
+replica_v1: build_replica_v1
+	@echo "Run regression tests Replica v1.3.1..."
+	docker-compose -f docker-compose.yml -f replicaV1.yml run master
+	@echo "Stopping running containers..."
+	docker-compose -f docker-compose.yml -f replicaV1.yml down --remove-orphans
+	@echo "Done!"
+
+all: pika kombu master_v1 replica_v1

+ 1 - 0
integration_tests/__init__.py

@@ -0,0 +1 @@
+#  Copyright © 2023 Ingram Micro Inc. All rights reserved.

+ 61 - 0
integration_tests/docker-compose.yml

@@ -0,0 +1,61 @@
+version: '3'
+
+services:
+
+  mq:
+    image: rabbitmq:latest
+
+  postgres:
+    image: postgres:latest
+    environment:
+      - POSTGRES_HOST=postgres
+      - POSTGRES_USER=user
+      - POSTGRES_PASSWORD=pswd
+      - POSTGRES_DB=replica
+      - POSTGRES_HOST_AUTH_METHOD=md5
+      - POSTGRES_INITDB_ARGS=--auth-host=md5
+
+  replica:
+    build:
+      context: ..
+      dockerfile: integration_tests/Dockerfile.Replica
+    restart: always
+    command: >
+      bash -c "
+      dockerize -wait tcp://mq:5672 -wait tcp://postgres:5432 -timeout 60s && 
+      python manage.py makemigrations --settings=integration_tests.replica_settings &&
+      python manage.py makemigrations dj_replica --settings=integration_tests.replica_settings &&
+      python manage.py migrate --settings=integration_tests.replica_settings &&
+      python manage.py cqrs_consume -w 2 --settings=integration_tests.replica_settings
+      "
+    depends_on:
+      - mq
+      - postgres
+    volumes:
+      - ../dj_cqrs:/replica/dj_cqrs
+    environment:
+      - POSTGRES_HOST=postgres
+      - POSTGRES_USER=user
+      - POSTGRES_PASSWORD=pswd
+      - POSTGRES_DB=replica
+      - CQRS_REPLICA_TRANSPORT=tests.dj.transport.RabbitMQTransportWithEvents
+      - CQRS_BROKER_URL=amqp://mq:5672/
+
+  master:
+    build:
+      context: ..
+      dockerfile: integration_tests/Dockerfile.Master
+    command: >
+      bash -c "
+      dockerize -wait tcp://mq:5672 -wait tcp://postgres:5432 -timeout 60s && 
+      ./run_integration_tests.sh
+      "
+    depends_on:
+      - mq
+      - replica
+    volumes:
+      - ./tests/:/master/integration_tests/tests
+      - ../dj_cqrs:/master/dj_cqrs
+    environment:
+      - CQRS_MASTER_TRANSPORT=dj_cqrs.transport.RabbitMQTransport
+      - CQRS_BROKER_URL=amqp://mq:5672/

+ 68 - 0
integration_tests/kombu.yml

@@ -0,0 +1,68 @@
+version: '3'
+
+services:
+
+  mq:
+    image: redis:latest
+    expose:
+      - '6379'
+
+  postgres:
+    image: postgres:latest
+    expose:
+      - '5432'
+    environment:
+      - POSTGRES_HOST=postgres
+      - POSTGRES_USER=user
+      - POSTGRES_PASSWORD=pswd
+      - POSTGRES_DB=replica
+      - POSTGRES_HOST_AUTH_METHOD=md5
+      - POSTGRES_INITDB_ARGS=--auth-host=md5
+
+  replica:
+    build:
+      context: ..
+      dockerfile: integration_tests/Dockerfile.Replica
+    restart: always
+    command: >
+      bash -c "
+      dockerize -wait tcp://mq:6379 -wait tcp://postgres:5432 -timeout 60s && 
+      python manage.py makemigrations --settings=integration_tests.replica_settings &&
+      python manage.py makemigrations dj_replica --settings=integration_tests.replica_settings &&
+      python manage.py migrate --settings=integration_tests.replica_settings &&
+      python manage.py cqrs_consume -w 2 --settings=integration_tests.replica_settings
+      "
+    container_name: django_cqrs_test_replica
+    depends_on:
+      - mq
+      - postgres
+    volumes:
+      - ../dj_cqrs:/replica/dj_cqrs
+    environment:
+      - POSTGRES_HOST=postgres
+      - POSTGRES_USER=user
+      - POSTGRES_PASSWORD=pswd
+      - POSTGRES_DB=replica
+      - CQRS_REPLICA_TRANSPORT=tests.dj.transport.KombuTransportWithEvents
+      - CQRS_BROKER_URL=redis://mq:6379/
+
+  master:
+    build:
+      context: ..
+      dockerfile: integration_tests/Dockerfile.Master
+    command: >
+      bash -c "
+      dockerize -wait tcp://mq:6379 -wait tcp://postgres:5432 -timeout 60s && 
+      ./run_integration_tests.sh
+      "
+      
+    container_name: django_cqrs_test_master
+    depends_on:
+      - mq
+      - replica
+    volumes:
+      - ./tests/:/master/integration_tests/tests
+      - ../dj_cqrs:/master/dj_cqrs
+    environment:
+      - CQRS_MASTER_TRANSPORT=dj_cqrs.transport.KombuTransport
+      - CQRS_BROKER_URL=redis://mq:6379/

+ 0 - 0
integration_tests/manage.py


Some files were not shown because too many files changed in this diff