diff --git a/performance_test/create_data.py b/performance_test/create_data.py index d0228e40..6edde525 100644 --- a/performance_test/create_data.py +++ b/performance_test/create_data.py @@ -6,14 +6,21 @@ ObjectRecordFactory as _ObjectRecordFactory, ObjectTypeFactory, ) -from objects.token.tests.factories import TokenAuthFactory +from objects.token.constants import PermissionModes +from objects.token.tests.factories import PermissionFactory, TokenAuthFactory object_type = ObjectTypeFactory.create( service__api_root="http://localhost:8001/api/v2/", uuid="f1220670-8ab7-44f1-a318-bd0782e97662", ) -token = TokenAuthFactory(token="secret", is_superuser=True) +token = TokenAuthFactory(token="secret", is_superuser=False) +PermissionFactory.create( + object_type=object_type, + mode=PermissionModes.read_only, + token_auth=token, + use_fields=False, +) class ObjectRecordFactory(_ObjectRecordFactory): @@ -31,6 +38,7 @@ def add_timestamp(obj, create, extracted, **kwargs): ObjectRecordFactory.create_batch( 5000, object__object_type=object_type, + _object_type=object_type, start_at="2020-01-01", version=1, data={"identifier": "63f473de-a7a6-4000-9421-829e146499e3", "foo": "bar"}, @@ -38,6 +46,7 @@ def add_timestamp(obj, create, extracted, **kwargs): ) ObjectRecordFactory.create( object__object_type=object_type, + _object_type=object_type, start_at="2020-01-01", version=1, data={"identifier": "ec5cde18-40a0-4135-8d97-3500d1730e60", "foo": "bar"}, diff --git a/performance_test/test_objects_list.py b/performance_test/test_objects_list.py index 1920d615..944e74da 100644 --- a/performance_test/test_objects_list.py +++ b/performance_test/test_objects_list.py @@ -98,3 +98,25 @@ def make_request(): assert result.json()["count"] == 1 benchmark_assertions(mean=1, max=1) + + +@pytest.mark.benchmark(max_time=60, min_rounds=5) +def test_objects_api_list_filter_by_object_type(benchmark, benchmark_assertions): + """ + Regression test for maykinmedia/objects-api#677 + """ + params = { + "pageSize": 100, + "type": "http://localhost:8001/api/v2/objecttypes/f1220670-8ab7-44f1-a318-bd0782e97662", + "ordering": "-record__data__nested__timestamp", + } + + def make_request(): + return requests.get((BASE_URL / "objects").set(params), headers=AUTH_HEADERS) + + result = benchmark(make_request) + + assert result.status_code == 200 + assert result.json()["count"] == 5001 + + benchmark_assertions(mean=1, max=1) diff --git a/requirements/base.txt b/requirements/base.txt index d36e32d1..033a0bee 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -117,7 +117,7 @@ django-axes==6.5.1 # via open-api-framework django-cors-headers==4.4.0 # via open-api-framework -django-csp==3.8 +django-csp==4.0 # via open-api-framework django-filter==24.2 # via @@ -249,14 +249,16 @@ notifications-api-common==0.7.3 # via # -r requirements/base.in # commonground-api-common -open-api-framework==0.12.0 +open-api-framework==0.13.0 # via -r requirements/base.in orderedmultidict==1.0.1 # via furl oyaml==1.0 # via commonground-api-common packaging==25.0 - # via kombu + # via + # django-csp + # kombu phonenumberslite==8.13.30 # via django-two-factor-auth prometheus-client==0.20.0 diff --git a/requirements/ci.txt b/requirements/ci.txt index 3e2f13b0..847fef3f 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -191,7 +191,7 @@ django-cors-headers==4.4.0 # via # -c requirements/base.txt # -r requirements/base.txt -django-csp==3.8 +django-csp==4.0 # via # -c requirements/base.txt # -r requirements/base.txt @@ -460,7 +460,7 @@ notifications-api-common==0.7.3 # -c requirements/base.txt # -r requirements/base.txt # commonground-api-common -open-api-framework==0.12.0 +open-api-framework==0.13.0 # via # -c requirements/base.txt # -r requirements/base.txt @@ -478,6 +478,7 @@ packaging==25.0 # via # -c requirements/base.txt # -r requirements/base.txt + # django-csp # kombu # pytest # sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 0be9f63f..7571abb8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -231,7 +231,7 @@ django-cors-headers==4.4.0 # via # -c requirements/ci.txt # -r requirements/ci.txt -django-csp==3.8 +django-csp==4.0 # via # -c requirements/ci.txt # -r requirements/ci.txt @@ -556,7 +556,7 @@ notifications-api-common==0.7.3 # -c requirements/ci.txt # -r requirements/ci.txt # commonground-api-common -open-api-framework==0.12.0 +open-api-framework==0.13.0 # via # -c requirements/ci.txt # -r requirements/ci.txt @@ -575,6 +575,7 @@ packaging==25.0 # -c requirements/ci.txt # -r requirements/ci.txt # build + # django-csp # kombu # pytest # sphinx diff --git a/src/objects/api/kanalen.py b/src/objects/api/kanalen.py index 8c6160d4..86e9b660 100644 --- a/src/objects/api/kanalen.py +++ b/src/objects/api/kanalen.py @@ -32,7 +32,7 @@ def get_kenmerken( data = data or {} return { kenmerk: ( - data.get("type") or obj.object.object_type.url + data.get("type") or obj._object_type.url if kenmerk == "object_type" else data.get(kenmerk, getattr(obj, kenmerk)) ) diff --git a/src/objects/api/serializers.py b/src/objects/api/serializers.py index 74d898a9..e845cf90 100644 --- a/src/objects/api/serializers.py +++ b/src/objects/api/serializers.py @@ -100,7 +100,7 @@ class ObjectSerializer(DynamicFieldsMixin, serializers.HyperlinkedModelSerialize type = ObjectTypeField( min_length=1, max_length=1000, - source="object.object_type", + source="_object_type", queryset=ObjectType.objects.all(), help_text=_("Url reference to OBJECTTYPE in Objecttypes API"), validators=[IsImmutableValidator()], @@ -119,7 +119,9 @@ class Meta: @transaction.atomic def create(self, validated_data): - object_data = validated_data.pop("object") + object_data = validated_data.pop("object", {}) + if object_type := validated_data.pop("_object_type"): + object_data["object_type"] = object_type object = Object.objects.create(**object_data) validated_data["object"] = object @@ -156,7 +158,7 @@ def update(self, instance, validated_data): logger.info( "object_updated", object_uuid=str(record.object.uuid), - objecttype_uuid=str(record.object.object_type.uuid), + objecttype_uuid=str(record._object_type.uuid), objecttype_version=record.version, token_identifier=token_auth.identifier, token_application=token_auth.application, diff --git a/src/objects/api/v2/filters.py b/src/objects/api/v2/filters.py index f03be542..25aac9c1 100644 --- a/src/objects/api/v2/filters.py +++ b/src/objects/api/v2/filters.py @@ -132,7 +132,7 @@ def clean(self): class ObjectRecordFilterSet(FilterSet): type = ObjectTypeFilter( - field_name="object__object_type", + field_name="_object_type", help_text=_("Url reference to OBJECTTYPE in Objecttypes API"), queryset=ObjectType.objects.all(), min_length=1, diff --git a/src/objects/api/v2/views.py b/src/objects/api/v2/views.py index 40752397..732b67f8 100644 --- a/src/objects/api/v2/views.py +++ b/src/objects/api/v2/views.py @@ -80,13 +80,16 @@ class ObjectViewSet( ObjectNotificationMixin, SearchMixin, GeoMixin, viewsets.ModelViewSet ): - queryset = ObjectRecord.objects.select_related( - "object", - "object__object_type", - "object__object_type__service", - "correct", - "corrected", - ).order_by("-pk") + queryset = ( + ObjectRecord.objects.select_related( + "_object_type", + "_object_type__service", + "correct", + "corrected", + ) + .prefetch_related("object") + .order_by("-pk") + ) serializer_class = ObjectSerializer filterset_class = ObjectRecordFilterSet filter_backends = [FilterBackend, OrderingBackend] @@ -105,7 +108,7 @@ def get_queryset(self): # prefetch permissions for DB optimization. Used in DynamicFieldsMixin base = base.prefetch_related( models.Prefetch( - "object__object_type__permissions", + "_object_type__permissions", queryset=Permission.objects.filter(token_auth=token_auth), to_attr="token_permissions", ), diff --git a/src/objects/api/validators.py b/src/objects/api/validators.py index 523a9136..69f394a1 100644 --- a/src/objects/api/validators.py +++ b/src/objects/api/validators.py @@ -21,16 +21,16 @@ def __call__(self, attrs, serializer): # create if not instance: - object_type = attrs.get("object", {}).get("object_type") + object_type = attrs.get("_object_type") version = attrs.get("version") data = attrs.get("data", {}) # update else: object_type = ( - attrs.get("object", {}).get("object_type") - if "object" in attrs - else instance.object.object_type + attrs.get("_object_type") + if "_object_type" in attrs + else instance._object_type ) version = attrs.get("version") if "version" in attrs else instance.version data = attrs.get("data", {}) if "data" in attrs else instance.data @@ -124,9 +124,7 @@ class GeometryValidator: def __call__(self, attrs, serializer): instance = getattr(serializer, "instance", None) - object_type = ( - attrs.get("object", {}).get("object_type") or instance.object.object_type - ) + object_type = attrs.get("_object_type") or instance._object_type geometry = attrs.get("geometry") if not geometry: diff --git a/src/objects/core/migrations/0032_objectrecord__object_type.py b/src/objects/core/migrations/0032_objectrecord__object_type.py new file mode 100644 index 00000000..9c9c607b --- /dev/null +++ b/src/objects/core/migrations/0032_objectrecord__object_type.py @@ -0,0 +1,24 @@ +# Generated by Django 5.2.3 on 2025-09-29 09:54 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0031_object_created_on_object_modified_on_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="objectrecord", + name="_object_type", + field=models.ForeignKey( + blank=True, + help_text="OBJECTTYPE in Objecttypes API", + null=True, + on_delete=django.db.models.deletion.PROTECT, + to="core.objecttype", + ), + ), + ] diff --git a/src/objects/core/migrations/0033_objectrecord__backfill_denormalized_fields.py b/src/objects/core/migrations/0033_objectrecord__backfill_denormalized_fields.py new file mode 100644 index 00000000..74b4c66f --- /dev/null +++ b/src/objects/core/migrations/0033_objectrecord__backfill_denormalized_fields.py @@ -0,0 +1,50 @@ +# Generated by Django 5.2.3 on 2025-09-29 08:19 + +import os + +from django.db import connection, migrations + +from structlog import get_logger + +logger = get_logger(__name__) + + +BATCH_SIZE = int(os.getenv("OBJECTRECORD_BATCH_SIZE", 200_000)) + + +def backfill_object_type_batch(apps, cursor, batch_size): + cursor.execute( + """ + WITH batch AS ( + SELECT r.id + FROM core_objectrecord r + WHERE r._object_type_id IS NULL + LIMIT %s + ) + UPDATE core_objectrecord r + SET _object_type_id = o.object_type_id + FROM core_object o, batch + WHERE r.id = batch.id + AND r.object_id = o.id; + """, + [batch_size], + ) + return cursor.rowcount + + +def forward(apps, schema_editor): + with connection.cursor() as cursor: + while True: + num_updated = backfill_object_type_batch(apps, cursor, BATCH_SIZE) + if num_updated == 0: + break + + logger.info("backfilled_object_type_for_records", num_records=num_updated) + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0032_objectrecord__object_type"), + ] + + operations = [migrations.RunPython(forward, migrations.RunPython.noop)] diff --git a/src/objects/core/migrations/0034_alter_objectrecord__object_type_and_more.py b/src/objects/core/migrations/0034_alter_objectrecord__object_type_and_more.py new file mode 100644 index 00000000..d69a6691 --- /dev/null +++ b/src/objects/core/migrations/0034_alter_objectrecord__object_type_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 5.2.6 on 2025-10-02 09:37 + +import django.db.models.deletion +from django.contrib.postgres.operations import AddIndexConcurrently +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("core", "0033_objectrecord__backfill_denormalized_fields"), + ] + + operations = [ + migrations.AlterField( + model_name="objectrecord", + name="_object_type", + field=models.ForeignKey( + help_text="OBJECTTYPE in Objecttypes API", + on_delete=django.db.models.deletion.PROTECT, + to="core.objecttype", + ), + ), + AddIndexConcurrently( + model_name="objectrecord", + index=models.Index( + fields=["_object_type_id", "-index"], name="idx_objectrecord_type_index" + ), + ), + AddIndexConcurrently( + model_name="objectrecord", + index=models.Index( + fields=["_object_type_id", "id"], name="idx_objectrecord_type_id" + ), + ), + AddIndexConcurrently( + model_name="objectrecord", + index=models.Index( + fields=["_object_type_id", "start_at", "end_at", "object", "-index"], + name="idx_type_start_end_object_idx", + ), + ), + ] diff --git a/src/objects/core/models.py b/src/objects/core/models.py index be553d4c..5edd16cf 100644 --- a/src/objects/core/models.py +++ b/src/objects/core/models.py @@ -154,11 +154,35 @@ class ObjectRecord(models.Model): auto_now=True, help_text=_("Last modification date") ) + # Denormalized field to avoid unnecessary joins on `Object` + _object_type = models.ForeignKey( + ObjectType, + on_delete=models.PROTECT, + help_text=_("OBJECTTYPE in Objecttypes API"), + null=False, + blank=False, + db_index=True, + ) + objects = ObjectRecordQuerySet.as_manager() class Meta: unique_together = ("object", "index") - indexes = [GinIndex(fields=["data"], name="idx_objectrecord_data_gin")] + indexes = [ + GinIndex(fields=["data"], name="idx_objectrecord_data_gin"), + models.Index( + fields=["_object_type_id", "-index"], + name="idx_objectrecord_type_index", + ), + models.Index( + fields=["_object_type_id", "id"], + name="idx_objectrecord_type_id", + ), + models.Index( + fields=["_object_type_id", "start_at", "end_at", "object", "-index"], + name="idx_type_start_end_object_idx", + ), + ] def __str__(self): return f"{self.version} ({self.start_at})" @@ -167,7 +191,7 @@ def clean(self): super().clean() if hasattr(self.object, "object_type") and self.version and self.data: - check_objecttype_cached(self.object.object_type, self.version, self.data) + check_objecttype_cached(self._object_type, self.version, self.data) def save(self, *args, **kwargs): if not self.id and self.object.last_record: @@ -178,4 +202,6 @@ def save(self, *args, **kwargs): previous_record.end_at = self.start_at previous_record.save() + self._object_type = self.object.object_type + super().save(*args, **kwargs) diff --git a/src/objects/core/query.py b/src/objects/core/query.py index c6ee0d58..2a5005e5 100644 --- a/src/objects/core/query.py +++ b/src/objects/core/query.py @@ -1,6 +1,4 @@ from django.db import models -from django.db.models import F, Window -from django.db.models.functions import RowNumber from vng_api_common.utils import get_uuid_from_path from zgw_consumers.models import Service @@ -37,9 +35,7 @@ def filter_for_token(self, token): return self.all() allowed_object_types = token.permissions.values("object_type") - return self.filter( - object__object_type__in=models.Subquery(allowed_object_types) - ) + return self.filter(_object_type__in=models.Subquery(allowed_object_types)) def keep_max_record_per_object(self): """ @@ -47,18 +43,10 @@ def keep_max_record_per_object(self): """ filtered_records = ( self.filter(object=models.OuterRef("object")) - .annotate( - row_number=Window( - expression=RowNumber(), - partition_by=[F("object")], - order_by=F("index").desc(), - ) - ) - .filter(row_number=1) - .values("index") + .order_by("-index") + .values("index")[:1] ) - - return self.filter(index__in=filtered_records) + return self.filter(index__in=models.Subquery(filtered_records)) def filter_for_date(self, date): """ diff --git a/src/objects/core/tests/test_migrations.py b/src/objects/core/tests/test_migrations.py new file mode 100644 index 00000000..eec6d6fc --- /dev/null +++ b/src/objects/core/tests/test_migrations.py @@ -0,0 +1,171 @@ +import importlib +import threading +import time +from unittest.mock import patch + +from django.core.management import call_command +from django.db import connection +from django.db.migrations.executor import MigrationExecutor +from django.db.migrations.state import StateApps +from django.test import TransactionTestCase + + +# TODO move this to maykin common? +class BaseMigrationTest(TransactionTestCase): + app: str + migrate_from: str # The migration before the one we want to test + migrate_to: str # The migration we want to test + + setting_overrides: dict = {} + + old_app_state: StateApps + app_state: StateApps + + def setUp(self) -> None: + """ + Setup the migration test by reversing to `migrate_from` state, + then applying the `migrate_to` state. + """ + assert self.app is not None, "You must define the `app` attribute" + assert self.migrate_from is not None, "You must define `migrate_from`" + assert self.migrate_to is not None, "You must define `migrate_to`" + + # Step 1: Set up the MigrationExecutor + executor = MigrationExecutor(connection) + + # Step 2: Reverse to the starting migration state + migrate_from = [(self.app, self.migrate_from)] + old_migrate_state = executor.migrate(migrate_from) + + self.old_app_state = old_migrate_state.apps + + def _perform_migration(self) -> None: + migrate_to = [(self.app, self.migrate_to)] + + executor = MigrationExecutor(connection) + executor.loader.build_graph() # reload the graph in case of dependency changes + executor.migrate(migrate_to) + + self.apps = executor.loader.project_state(migrate_to).apps + + @classmethod + def tearDownClass(cls) -> None: + super().tearDownClass() + + # reset to latest migration + call_command("migrate", verbosity=0, database=connection._alias) + + +class TestBackfillDenormalizedObjectType(BaseMigrationTest): + app = "core" + migrate_from = "0032_objectrecord__object_type" + migrate_to = "0033_objectrecord__backfill_denormalized_fields" + + def test_denormalize_object_type_to_object_record(self): + ObjectType = self.old_app_state.get_model("core", "ObjectType") + Object = self.old_app_state.get_model("core", "Object") + ObjectRecord = self.old_app_state.get_model("core", "ObjectRecord") + Service = self.old_app_state.get_model("zgw_consumers", "Service") + + service = Service.objects.create(api_root="http://example.local:8001/api/v2/") + + object_type1 = ObjectType.objects.create( + uuid="5741f306-0b6d-4597-9bab-c7d5dafe6d75", service=service + ) + object_type2 = ObjectType.objects.create( + uuid="89a30410-5d80-4007-a660-50dd94994464", service=service + ) + object1 = Object.objects.create(object_type=object_type1) + object2 = Object.objects.create(object_type=object_type2) + ObjectRecord.objects.create( + object=object1, index=1, version=1, start_at="2025-01-01" + ) + ObjectRecord.objects.create( + object=object1, index=2, version=1, start_at="2025-01-01" + ) + ObjectRecord.objects.create( + object=object2, index=1, version=1, start_at="2025-01-01" + ) + + self._perform_migration() + + ObjectRecord = self.apps.get_model("core", "ObjectRecord") + + records = ObjectRecord.objects.order_by("pk") + + self.assertEqual(records.count(), 3) + + record1, record2, record3 = records + + self.assertEqual(record1._object_type, record1.object.object_type, object_type1) + self.assertEqual(record2._object_type, record2.object.object_type, object_type1) + self.assertEqual(record3._object_type, record3.object.object_type, object_type2) + + def test_concurrently_inserted_records_are_normalized(self): + ObjectType = self.old_app_state.get_model("core", "ObjectType") + Object = self.old_app_state.get_model("core", "Object") + ObjectRecord = self.old_app_state.get_model("core", "ObjectRecord") + Service = self.old_app_state.get_model("zgw_consumers", "Service") + + service = Service.objects.create(api_root="http://example.local:8001/api/v2/") + + object_type1 = ObjectType.objects.create( + uuid="5741f306-0b6d-4597-9bab-c7d5dafe6d75", service=service + ) + object_type2 = ObjectType.objects.create( + uuid="89a30410-5d80-4007-a660-50dd94994464", service=service + ) + object1 = Object.objects.create(object_type=object_type1) + object2 = Object.objects.create(object_type=object_type2) + ObjectRecord.objects.create( + object=object1, index=1, version=1, start_at="2025-01-01" + ) + ObjectRecord.objects.create( + object=object1, index=2, version=1, start_at="2025-01-01" + ) + ObjectRecord.objects.create( + object=object2, index=1, version=1, start_at="2025-01-01" + ) + + migration_module = importlib.import_module( + "objects.core.migrations.0033_objectrecord__backfill_denormalized_fields" + ) + + original_batch = migration_module.backfill_object_type_batch + + def delayed_batch(apps, cursor, batch_size): + time.sleep(0.1) # simulate long-running batch + return original_batch(apps, cursor, 1) + + with patch.object( + migration_module, + "backfill_object_type_batch", + side_effect=delayed_batch, + ): + thread = threading.Thread(target=self._perform_migration) + thread.start() + + # Simultaneously insert a new record + ObjectRecord.objects.create( + object=object2, + index=2, + version=1, + start_at="2025-01-01", + _object_type=None, + ) + + thread.join() + + ObjectRecord = self.apps.get_model("core", "ObjectRecord") + + records = ObjectRecord.objects.order_by("pk") + + self.assertEqual(records.count(), 4) + + record1, record2, record3, record4 = records + + self.assertEqual(record1._object_type, record1.object.object_type, object_type1) + self.assertEqual(record2._object_type, record2.object.object_type, object_type1) + self.assertEqual(record3._object_type, record3.object.object_type, object_type2) + # Assert that the inserted row was also backfilled + self.assertEqual(record4._object_type, record4.object.object_type, object_type2) diff --git a/src/objects/core/tests/test_models.py b/src/objects/core/tests/test_models.py new file mode 100644 index 00000000..35c58e90 --- /dev/null +++ b/src/objects/core/tests/test_models.py @@ -0,0 +1,16 @@ +from django.test import TestCase + +from ..models import ObjectRecord +from .factories import ObjectFactory, ObjectTypeFactory + + +class ObjectRecordTestCase(TestCase): + def test_object_type_is_denormalized_on_object_record(self): + object_type1 = ObjectTypeFactory.create() + object = ObjectFactory.create(object_type=object_type1) + + record = ObjectRecord.objects.create( + object=object, version=1, start_at="2025-01-01" + ) + + self.assertEqual(record._object_type, object_type1) diff --git a/src/objects/fixtures/demodata.json b/src/objects/fixtures/demodata.json index b06ee946..4ebd82c8 100644 --- a/src/objects/fixtures/demodata.json +++ b/src/objects/fixtures/demodata.json @@ -1,31 +1,4 @@ [ -{ - "model": "core.objecttype", - "pk": 1, - "fields": { - "service": 1, - "uuid": "feeaa795-d212-4fa2-bb38-2c34996e5702", - "_name": "Boom" - } -}, -{ - "model": "core.objecttype", - "pk": 2, - "fields": { - "service": 1, - "uuid": "3a82fb7f-fc9b-4104-9804-993f639d6d0d", - "_name": "Straatverlichting" - } -}, -{ - "model": "core.objecttype", - "pk": 3, - "fields": { - "service": 1, - "uuid": "ca754b52-3f37-4c49-837c-130e8149e337", - "_name": "Melding" - } -}, { "model": "core.object", "pk": 1, @@ -203,7 +176,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.896787 52.37359492959213)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -223,7 +197,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.896787 52.37359492959213)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -243,7 +218,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.896787 52.37359492959213)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -263,7 +239,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.896787 52.37359492959213)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -283,7 +260,8 @@ "correct": 11, "geometry": "SRID=4326;POINT (4.896787 52.37359492959213)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -340,7 +318,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.905289 52.369918)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -368,7 +347,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.897787 52.37659492959213)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 2 } }, { @@ -417,7 +397,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.901166811673011 52.37626808920047)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -462,7 +443,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.90663497584874 52.37384227041968)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -513,7 +495,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.908187981083221 52.3719808282391)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -569,7 +552,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.908722727852763 52.36991749536178)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -622,7 +606,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.908187009455887 52.36785425886177)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -667,7 +652,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.906633403722665 52.3659930690003)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -695,7 +681,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.897787 52.38340911858636)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -744,7 +731,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.90461710532547 52.38274861943765)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 1 } }, { @@ -763,7 +751,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.886851272147237 52.36991749536178)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 2 } }, { @@ -782,7 +771,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.88738601891678 52.3719808282391)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 2 } }, { @@ -801,7 +791,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.88893902415126 52.37384227041968)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 2 } }, { @@ -820,7 +811,8 @@ "correct": null, "geometry": "SRID=4326;POINT (4.891358354234796 52.3753195751365)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 2 } }, { @@ -839,20 +831,24 @@ "correct": null, "geometry": "SRID=4326;POINT (4.894407188326989 52.37626808920047)", "created_on": "2025-08-29T12:16:12.578Z", - "modified_on": "2025-08-29T12:16:12.582Z" + "modified_on": "2025-08-29T12:16:12.582Z", + "_object_type": 2 } }, { "model": "token.tokenauth", "pk": 1, "fields": { + "identifier": "token-1", + "token": "cd63e158f3aca276ef284e3033d020a22899c728", "contact_person": "test", "email": "let@me.test", "organization": "", "last_modified": "2020-12-23T11:43:16.820Z", "created": "2020-12-22T16:27:00.751Z", - "token": "cd63e158f3aca276ef284e3033d020a22899c728", - "identifier": "token-1" + "application": "", + "administration": "", + "is_superuser": false } }, { @@ -861,7 +857,9 @@ "fields": { "token_auth": 1, "object_type": 2, - "mode": "read_and_write" + "mode": "read_and_write", + "use_fields": false, + "fields": {} } }, { @@ -870,25 +868,65 @@ "fields": { "token_auth": 1, "object_type": 1, - "mode": "read_and_write" + "mode": "read_and_write", + "use_fields": false, + "fields": {} } }, { "model": "zgw_consumers.service", - "pk": 1, "fields": { "label": "Objecttypen API", + "uuid": "2de269e1-112c-4c1d-b386-ee7becbf83a0", "slug": "objecttypen-api", "api_type": "orc", "api_root": "http://localhost:8001/api/v1/", + "api_connection_check_path": "", + "auth_type": "api_key", "client_id": "", "secret": "", - "auth_type": "api_key", + "jwt_valid_for": 43200, "header_key": "Authorization", "header_value": "Token 4271c4a3bb0726c55d44b1e2e68de29b19dfc643", "nlx": "", "user_id": "", - "user_representation": "" + "user_representation": "", + "client_certificate": null, + "server_certificate": null, + "timeout": 10 + } +}, +{ + "model": "core.objecttype", + "pk": 1, + "fields": { + "service": [ + "objecttypen-api" + ], + "uuid": "feeaa795-d212-4fa2-bb38-2c34996e5702", + "_name": "Boom" + } +}, +{ + "model": "core.objecttype", + "pk": 2, + "fields": { + "service": [ + "objecttypen-api" + ], + "uuid": "3a82fb7f-fc9b-4104-9804-993f639d6d0d", + "_name": "Straatverlichting" + } +}, +{ + "model": "core.objecttype", + "pk": 3, + "fields": { + "service": [ + "objecttypen-api" + ], + "uuid": "ca754b52-3f37-4c49-837c-130e8149e337", + "_name": "Melding" } } ] diff --git a/src/objects/token/permissions.py b/src/objects/token/permissions.py index ab2d553d..aa2e318f 100644 --- a/src/objects/token/permissions.py +++ b/src/objects/token/permissions.py @@ -46,7 +46,7 @@ def has_object_permission(self, request, view, obj): return True object_permission = request.auth.get_permission_for_object_type( - obj.object.object_type + obj._object_type ) if not object_permission: return False diff --git a/src/objects/utils/serializers.py b/src/objects/utils/serializers.py index dc4e51c4..4fdc2cca 100644 --- a/src/objects/utils/serializers.py +++ b/src/objects/utils/serializers.py @@ -106,7 +106,7 @@ def to_representation(self, instance): not_allowed = set(get_field_names(data)) - set(get_field_names(result_data)) if not_allowed: self.not_allowed[ - f"{instance.object.object_type.url}({instance.version})" + f"{instance._object_type.url}({instance.version})" ] |= not_allowed else: spec_query = build_spec(query_fields) @@ -121,7 +121,7 @@ def to_representation(self, instance): ) if not_allowed: self.not_allowed[ - f"{instance.object.object_type.url}({instance.version})" + f"{instance._object_type.url}({instance.version})" ] |= not_allowed return result_data @@ -148,11 +148,11 @@ def get_allowed_fields(self, instance) -> list: return ALL_FIELDS # use prefetch_related for DB optimization - if getattr(instance.object.object_type, "token_permissions", None): - permission = instance.object.object_type.token_permissions[0] + if getattr(instance._object_type, "token_permissions", None): + permission = instance._object_type.token_permissions[0] else: permission = request.auth.get_permission_for_object_type( - instance.object.object_type + instance._object_type ) if permission.mode == PermissionModes.read_only and permission.use_fields: return permission.fields.get(str(instance.version), [])