From 9da4dab33681781c444db5153f3d8d4799773c04 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Thu, 4 Jan 2024 18:03:00 -0500 Subject: [PATCH] refactor: Rename key,id,dockey to docID terminology (#1749) ## Relevant issue(s) - Subtask of #1750 [EPIC] - [x] Resolves #1752 - [x] Resolves #1272 ## BREAKING CHANGE: - Use of `_key` to access a document's unique id is now deprecated, instead use `_docID`. - Use of `dockey`/`id` is now deprecated, instead use `docID`. - Use of `dockeys`/`ids` is now deprecated, instead use `docIDs`. ## Description - [x] Rename `_key` to `_docID` everywhere. - [x] Rename `_keys` to `docIDs` in explain the response. - [x] Rename `_newKey` to `_docIDNew` in backup/recover functionality. - [x] Fix `_docID` tests. - [x] Fix explain and backup/recover functionality tests. - [x] Fix the collectionID order for a P2P test (leaving a note as order was reverted). - [x] Update all cids. - [x] Rename all files with `key(s)|dockey(s)` in the name to `doc_id(s)`. - [x] Document breaking change to pass change detector. ## For Reviewers: - Main commits to review are the `PR(MAIN)` commits. - If you have more time `PR(MINOR) and PR(*TEST)` commits are good to go over ## Disclaimer / Discussion: I do not like these non-underscored `docID/docIDs`, would be in favor of underscoring these : ``` query { User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { _docID } } ``` ``` query { Users { Name _docID _version { docID } } } ``` ``` Request: ` { commits(groupBy: [docID], order: {docID: DESC}) { docID } }`, Results: []map[string]any{ { "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { "docID": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", }, }, ``` EDIT: Above was resolved with #2162, to do out of this PR. ## Limitations (out of scope of this PR): - #1467 - #1751 - #1550 - #2162 --- Makefile | 20 +- README.md | 14 +- cli/cli.go | 2 +- cli/collection_delete.go | 34 +- cli/collection_get.go | 6 +- ...ion_keys.go => collection_list_doc_ids.go} | 22 +- cli/collection_update.go | 42 +-- cli/errors.go | 2 +- cli/version_test.go | 18 +- client/collection.go | 99 +++--- client/descriptions.go | 8 +- client/{dockey.go => doc_id.go} | 64 ++-- client/document.go | 71 ++-- client/document_test.go | 12 +- client/errors.go | 27 +- client/mocks/collection.go | 322 +++++++++--------- client/mocks/db.go | 150 +++++--- client/request/commit.go | 2 +- client/request/consts.go | 24 +- client/request/mutation.go | 6 +- client/request/select.go | 8 +- client/request/subscription.go | 10 +- core/crdt/base_test.go | 8 +- core/crdt/composite.go | 8 +- core/crdt/lwwreg.go | 8 +- core/crdt/lwwreg_test.go | 2 +- core/doc.go | 18 +- core/key.go | 52 +-- core/key_test.go | 26 +- db/backup.go | 48 +-- db/backup_test.go | 28 +- db/base/collection_keys.go | 17 +- db/collection.go | 147 ++++---- db/collection_delete.go | 86 ++--- db/collection_get.go | 14 +- db/collection_index.go | 4 +- db/collection_update.go | 76 ++--- db/errors.go | 115 ++----- db/fetcher/encoded_doc.go | 26 +- db/fetcher/fetcher.go | 22 +- db/fetcher/indexer.go | 6 +- db/fetcher/mocks/encoded_document.go | 20 +- db/fetcher/versioned.go | 28 +- db/index.go | 6 +- db/indexed_docs_test.go | 22 +- db/subscriptions.go | 2 +- docs/cli/defradb_client.md | 1 + docs/cli/defradb_client_collection.md | 6 +- docs/cli/defradb_client_collection_delete.md | 12 +- ...md => defradb_client_collection_docIDs.md} | 12 +- docs/cli/defradb_client_collection_get.md | 2 +- docs/cli/defradb_client_collection_update.md | 14 +- docs/cli/defradb_client_document.md | 38 --- docs/cli/defradb_client_document_create.md | 44 --- docs/cli/defradb_client_document_delete.md | 46 --- docs/cli/defradb_client_document_save.md | 42 --- docs/cli/defradb_client_document_update.md | 52 --- docs/cli/defradb_client_index_create.md | 4 +- ...document_get.md => defradb_client_view.md} | 21 +- ...ent_keys.md => defradb_client_view_add.md} | 22 +- .../i1749-rename-key-to-doc-id-terminology.md | 7 + events/db_update.go | 2 +- examples/request/user_creation.graphql | 4 +- examples/request/user_query.graphql | 4 +- http/client_collection.go | 92 ++--- http/errors.go | 10 - http/handler_collection.go | 98 +++--- lens/fetcher.go | 10 +- merkle/clock/clock_test.go | 8 +- merkle/clock/heads_test.go | 2 +- net/client.go | 8 +- net/client_test.go | 6 +- net/dag.go | 10 +- net/dag_test.go | 8 +- net/doc.go | 2 +- net/errors.go | 22 +- net/pb/net.pb.go | 120 +++---- net/pb/net.proto | 6 +- net/pb/net_grpc.pb.go | 2 +- net/pb/net_vtproto.pb.go | 36 +- net/peer.go | 50 +-- net/peer_collection.go | 12 +- net/peer_replicator.go | 4 +- net/peer_test.go | 46 +-- net/process.go | 8 +- net/server.go | 60 ++-- net/server_test.go | 26 +- planner/commit.go | 28 +- planner/create.go | 6 +- planner/delete.go | 10 +- planner/errors.go | 7 +- planner/explain.go | 4 +- planner/group.go | 6 +- planner/mapper/commitSelect.go | 4 +- planner/mapper/mapper.go | 10 +- planner/mapper/targetable.go | 6 +- planner/multi.go | 18 +- planner/scan.go | 2 +- planner/select.go | 44 +-- planner/type_join.go | 16 +- planner/type_join.md | 18 +- planner/update.go | 11 +- request/graphql/parser/commit.go | 4 +- request/graphql/parser/mutation.go | 4 +- request/graphql/parser/query.go | 18 +- request/graphql/schema/collection.go | 16 +- request/graphql/schema/descriptions.go | 40 +-- request/graphql/schema/descriptions_test.go | 66 ++-- request/graphql/schema/generate.go | 40 +-- request/graphql/schema/types/commits.go | 28 +- request/graphql/schema/types/descriptions.go | 10 +- tests/bench/bench_util.go | 14 +- tests/bench/collection/utils.go | 18 +- tests/bench/query/index/simple_test.go | 2 +- tests/bench/query/planner/simple_test.go | 2 +- tests/bench/query/simple/simple_test.go | 2 +- tests/bench/query/simple/utils.go | 34 +- tests/bench/query/simple/with_filter_test.go | 2 +- .../query/simple/with_limit_offset_test.go | 2 +- .../query/simple/with_multi_lookup_test.go | 6 +- tests/bench/query/simple/with_order_test.go | 2 +- .../query/simple/with_single_lookup_test.go | 6 +- tests/clients/cli/wrapper_collection.go | 94 ++--- tests/gen/gen_auto.go | 18 +- tests/gen/gen_auto_configurator.go | 34 +- tests/gen/gen_auto_test.go | 42 +-- .../backup/one_to_many/export_test.go | 6 +- .../backup/one_to_many/import_test.go | 22 +- .../backup/one_to_one/export_test.go | 8 +- .../backup/one_to_one/import_test.go | 34 +- .../backup/self_reference/export_test.go | 4 +- .../backup/self_reference/import_test.go | 16 +- .../integration/backup/simple/export_test.go | 6 +- .../integration/backup/simple/import_test.go | 8 +- .../{with_key_test.go => with_doc_id_test.go} | 22 +- ...with_keys_test.go => with_doc_ids_test.go} | 26 +- .../update/simple/with_filter_test.go | 4 +- .../events/simple/with_create_test.go | 8 +- .../events/simple/with_create_txn_test.go | 6 +- .../events/simple/with_delete_test.go | 8 +- .../events/simple/with_update_test.go | 14 +- tests/integration/events/utils.go | 4 +- .../integration/explain/debug/dagscan_test.go | 20 +- .../integration/explain/debug/delete_test.go | 22 +- .../explain/debug/delete_with_error_test.go | 2 +- ...est.go => group_with_doc_id_child_test.go} | 6 +- ...ckey_test.go => group_with_doc_id_test.go} | 12 +- ...o => type_join_with_filter_doc_id_test.go} | 12 +- .../integration/explain/debug/update_test.go | 16 +- ...key_test.go => with_filter_doc_id_test.go} | 30 +- .../explain/debug/with_sum_join_test.go | 2 +- .../integration/explain/default/basic_test.go | 2 +- .../explain/default/dagscan_test.go | 20 +- .../explain/default/delete_test.go | 34 +- .../explain/default/delete_with_error_test.go | 2 +- tests/integration/explain/default/fixture.go | 2 +- .../default/group_with_average_test.go | 8 +- ...est.go => group_with_doc_id_child_test.go} | 8 +- ...ckey_test.go => group_with_doc_id_test.go} | 12 +- .../default/group_with_filter_child_test.go | 4 +- .../default/group_with_limit_child_test.go | 6 +- .../explain/default/group_with_limit_test.go | 2 +- .../default/group_with_order_child_test.go | 6 +- .../explain/default/group_with_order_test.go | 2 +- .../explain/default/invalid_type_arg_test.go | 2 +- .../explain/default/type_join_many_test.go | 2 +- .../explain/default/type_join_one_test.go | 4 +- .../explain/default/type_join_test.go | 4 +- ...o => type_join_with_filter_doc_id_test.go} | 16 +- .../default/type_join_with_filter_test.go | 4 +- .../explain/default/update_test.go | 24 +- ...key_test.go => with_filter_doc_id_test.go} | 42 +-- .../explain/default/with_sum_join_test.go | 2 +- .../explain/execute/dagscan_test.go | 4 +- .../explain/execute/delete_test.go | 4 +- tests/integration/explain/execute/fixture.go | 12 +- .../execute/query_deleted_docs_test.go | 6 +- .../explain/execute/update_test.go | 4 +- .../integration/explain/simple/basic_test.go | 4 +- tests/integration/index/create_unique_test.go | 6 +- .../one_to_many/with_alias_test.go | 24 +- .../field_kinds/one_to_one/with_alias_test.go | 10 +- .../one_to_one/with_simple_test.go | 22 +- .../one_to_one_to_one/with_txn_test.go | 116 +++---- .../mutation/create/simple_test.go | 12 +- .../mutation/create/with_version_test.go | 2 +- .../one_to_many/with_show_deleted_test.go | 16 +- .../one_to_one_to_one/with_id_test.go | 18 +- .../one_to_one_to_one/with_txn_test.go | 114 +++---- .../mutation/delete/simple_test.go | 4 +- .../delete/with_deleted_field_test.go | 8 +- .../mutation/delete/with_id_alias_test.go | 6 +- .../mutation/delete/with_id_test.go | 12 +- .../mutation/delete/with_id_txn_test.go | 8 +- .../mutation/delete/with_ids_alias_test.go | 10 +- .../mutation/delete/with_ids_filter_test.go | 8 +- .../mutation/delete/with_ids_test.go | 28 +- .../mutation/delete/with_ids_txn_test.go | 12 +- .../delete/with_ids_update_alias_test.go | 10 +- .../integration/mutation/mix/with_txn_test.go | 80 ++--- .../special/invalid_operation_test.go | 2 +- .../field_kinds/one_to_many/simple_test.go | 32 +- .../one_to_many/with_alias_test.go | 48 +-- .../field_kinds/one_to_one/with_alias_test.go | 46 +-- .../one_to_one/with_simple_test.go | 58 ++-- .../update/underscored_schema_test.go | 1 + .../mutation/update/with_delete_test.go | 2 +- .../mutation/update/with_filter_test.go | 2 +- .../mutation/update/with_id_test.go | 10 +- .../mutation/update/with_ids_test.go | 2 +- tests/integration/net/order/tcp_test.go | 2 +- tests/integration/net/order/utils.go | 42 +-- .../peer/subscribe/with_add_get_test.go | 2 + .../simple/replicator/with_create_test.go | 20 +- .../integration/query/commits/simple_test.go | 32 +- .../query/commits/with_cid_test.go | 10 +- .../query/commits/with_depth_test.go | 34 +- ...ey_cid_test.go => with_doc_id_cid_test.go} | 22 +- ...ount_test.go => with_doc_id_count_test.go} | 12 +- ...ield_test.go => with_doc_id_field_test.go} | 36 +- ...est.go => with_doc_id_group_order_test.go} | 12 +- ...st.go => with_doc_id_limit_offset_test.go} | 10 +- ...imit_test.go => with_doc_id_limit_test.go} | 10 +- ...=> with_doc_id_order_limit_offset_test.go} | 10 +- ...rder_test.go => with_doc_id_order_test.go} | 96 +++--- ..._prop_test.go => with_doc_id_prop_test.go} | 12 +- ...ith_dockey_test.go => with_doc_id_test.go} | 76 ++--- ...e_test.go => with_doc_id_typename_test.go} | 12 +- .../query/commits/with_field_test.go | 14 +- .../query/commits/with_group_test.go | 28 +- .../query/latest_commits/simple_test.go | 2 +- .../with_collectionid_prop_test.go | 4 +- ...ield_test.go => with_doc_id_field_test.go} | 26 +- ..._prop_test.go => with_doc_id_prop_test.go} | 10 +- ...ith_dockey_test.go => with_doc_id_test.go} | 22 +- .../query/latest_commits/with_field_test.go | 4 +- ...dockey_test.go => with_cid_doc_id_test.go} | 53 +-- ...ith_dockey_test.go => with_doc_id_test.go} | 6 +- ...h_dockeys_test.go => with_doc_ids_test.go} | 6 +- .../with_filter_related_id_test.go | 6 +- .../with_group_related_id_alias_test.go | 28 +- .../query/one_to_many_to_many/joins_test.go | 66 ++-- .../query/one_to_many_to_one/joins_test.go | 62 ++-- tests/integration/query/simple/simple_test.go | 8 +- ...dockey_test.go => with_cid_doc_id_test.go} | 73 ++-- ...key_test.go => with_doc_id_filter_test.go} | 6 +- ...ith_dockey_test.go => with_doc_id_test.go} | 14 +- ...h_dockeys_test.go => with_doc_ids_test.go} | 24 +- ...ckey_test.go => with_group_doc_id_test.go} | 6 +- ...eys_test.go => with_group_doc_ids_test.go} | 6 +- .../query/simple/with_version_test.go | 30 +- .../schema/aggregates/inline_array_test.go | 2 +- tests/integration/schema/default_fields.go | 20 +- tests/integration/schema/filter_test.go | 12 +- tests/integration/schema/get_schema_test.go | 96 +++--- tests/integration/schema/group_test.go | 4 +- tests/integration/schema/input_type_test.go | 12 +- .../schema/migrations/query/simple_test.go | 48 +-- ...ith_dockey_test.go => with_doc_id_test.go} | 32 +- .../schema/migrations/query/with_p2p_test.go | 10 +- .../migrations/query/with_restart_test.go | 4 +- .../migrations/query/with_set_default_test.go | 12 +- .../schema/migrations/query/with_txn_test.go | 8 +- .../migrations/query/with_update_test.go | 8 +- .../schema/migrations/simple_test.go | 8 +- tests/integration/schema/simple_test.go | 6 +- .../schema/updates/add/field/create_test.go | 22 +- .../updates/add/field/create_update_test.go | 8 +- .../kind/{dockey_test.go => doc_id_test.go} | 12 +- .../field/kind/foreign_object_array_test.go | 4 +- .../add/field/kind/foreign_object_test.go | 4 +- .../schema/updates/add/field/simple_test.go | 16 +- .../schema/updates/move/simple_test.go | 2 +- .../schema/with_update_set_default_test.go | 4 +- .../subscription/subscription_test.go | 42 +-- tests/integration/utils2.go | 20 +- tests/predefined/gen_predefined.go | 26 +- tests/predefined/gen_predefined_test.go | 88 ++--- tests/predefined/util_test.go | 12 +- version/version.go | 20 +- version/version_test.go | 8 +- 281 files changed, 3026 insertions(+), 3212 deletions(-) rename cli/{collection_keys.go => collection_list_doc_ids.go} (65%) rename client/{dockey.go => doc_id.go} (50%) rename docs/cli/{defradb_client_collection_keys.md => defradb_client_collection_docIDs.md} (83%) delete mode 100644 docs/cli/defradb_client_document.md delete mode 100644 docs/cli/defradb_client_document_create.md delete mode 100644 docs/cli/defradb_client_document_delete.md delete mode 100644 docs/cli/defradb_client_document_save.md delete mode 100644 docs/cli/defradb_client_document_update.md rename docs/cli/{defradb_client_document_get.md => defradb_client_view.md} (63%) rename docs/cli/{defradb_client_document_keys.md => defradb_client_view_add.md} (66%) create mode 100644 docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md rename tests/integration/collection/update/simple/{with_key_test.go => with_doc_id_test.go} (81%) rename tests/integration/collection/update/simple/{with_keys_test.go => with_doc_ids_test.go} (79%) rename tests/integration/explain/debug/{group_with_dockey_child_test.go => group_with_doc_id_child_test.go} (80%) rename tests/integration/explain/debug/{group_with_dockey_test.go => group_with_doc_id_test.go} (78%) rename tests/integration/explain/debug/{type_join_with_filter_and_key_test.go => type_join_with_filter_doc_id_test.go} (90%) rename tests/integration/explain/debug/{with_filter_key_test.go => with_filter_doc_id_test.go} (72%) rename tests/integration/explain/default/{group_with_dockey_child_test.go => group_with_doc_id_child_test.go} (86%) rename tests/integration/explain/default/{group_with_dockey_test.go => group_with_doc_id_test.go} (89%) rename tests/integration/explain/default/{type_join_with_filter_and_key_test.go => type_join_with_filter_doc_id_test.go} (92%) rename tests/integration/explain/default/{with_filter_key_test.go => with_filter_doc_id_test.go} (85%) rename tests/integration/query/commits/{with_dockey_cid_test.go => with_doc_id_cid_test.go} (72%) rename tests/integration/query/commits/{with_dockey_count_test.go => with_doc_id_count_test.go} (68%) rename tests/integration/query/commits/{with_dockey_field_test.go => with_doc_id_field_test.go} (66%) rename tests/integration/query/commits/{with_dockey_group_order_test.go => with_doc_id_group_order_test.go} (79%) rename tests/integration/query/commits/{with_dockey_limit_offset_test.go => with_doc_id_limit_offset_test.go} (75%) rename tests/integration/query/commits/{with_dockey_limit_test.go => with_doc_id_limit_test.go} (75%) rename tests/integration/query/commits/{with_dockey_order_limit_offset_test.go => with_doc_id_order_limit_offset_test.go} (74%) rename tests/integration/query/commits/{with_dockey_order_test.go => with_doc_id_order_test.go} (55%) rename tests/integration/query/commits/{with_dockey_prop_test.go => with_doc_id_prop_test.go} (74%) rename tests/integration/query/commits/{with_dockey_test.go => with_doc_id_test.go} (58%) rename tests/integration/query/commits/{with_dockey_typename_test.go => with_doc_id_typename_test.go} (69%) rename tests/integration/query/latest_commits/{with_dockey_field_test.go => with_doc_id_field_test.go} (65%) rename tests/integration/query/latest_commits/{with_dockey_prop_test.go => with_doc_id_prop_test.go} (75%) rename tests/integration/query/latest_commits/{with_dockey_test.go => with_doc_id_test.go} (58%) rename tests/integration/query/one_to_many/{with_cid_dockey_test.go => with_cid_doc_id_test.go} (81%) rename tests/integration/query/one_to_many/{with_dockey_test.go => with_doc_id_test.go} (91%) rename tests/integration/query/one_to_many/{with_dockeys_test.go => with_doc_ids_test.go} (90%) rename tests/integration/query/simple/{with_cid_dockey_test.go => with_cid_doc_id_test.go} (67%) rename tests/integration/query/simple/{with_key_test.go => with_doc_id_filter_test.go} (79%) rename tests/integration/query/simple/{with_dockey_test.go => with_doc_id_test.go} (73%) rename tests/integration/query/simple/{with_dockeys_test.go => with_doc_ids_test.go} (68%) rename tests/integration/query/simple/{with_group_dockey_test.go => with_group_doc_id_test.go} (85%) rename tests/integration/query/simple/{with_group_dockeys_test.go => with_group_doc_ids_test.go} (83%) rename tests/integration/schema/migrations/query/{with_dockey_test.go => with_doc_id_test.go} (81%) rename tests/integration/schema/updates/add/field/kind/{dockey_test.go => doc_id_test.go} (83%) diff --git a/Makefile b/Makefile index 5dddc2872e..0ddde9790f 100644 --- a/Makefile +++ b/Makefile @@ -217,11 +217,26 @@ test\:cli: test\:names: gotestsum --format testname -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) +.PHONY: test\:lens +test\:lens: + @$(MAKE) deps:lens + gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... $(TEST_FLAGS) + +.PHONY: test\:lens-quick +test\:lens-quick: + @$(MAKE) deps:lens + gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... + .PHONY: test\:all test\:all: @$(MAKE) test:names @$(MAKE) test:lens +.PHONY: test\:all-quick +test\:all-quick: + @$(MAKE) test:quick + @$(MAKE) test:lens-quick + .PHONY: test\:verbose test\:verbose: gotestsum --format standard-verbose -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) @@ -246,11 +261,6 @@ test\:bench-short: test\:scripts: @$(MAKE) -C ./tools/scripts/ test -.PHONY: test\:lens -test\:lens: - @$(MAKE) deps:lens - gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... $(TEST_FLAGS) - .PHONY: test\:coverage test\:coverage: @$(MAKE) deps:lens diff --git a/README.md b/README.md index 8ae2ebfb44..acc438273b 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ Submit a `mutation` request to create a document of the `User` type: defradb client query ' mutation { create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key + _docID } } ' @@ -113,13 +113,13 @@ Expected response: { "data": [ { - "_key": "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", + "_docID": "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", } ] } ``` -`_key` is the document's key, a unique identifier of the document, determined by its schema and initial data. +`_docID` is the document's unique identifier determined by its schema and initial data. ## Query documents @@ -129,7 +129,7 @@ Once you have populated your node with data, you can query it: defradb client query ' query { User { - _key + _docID age name points @@ -138,7 +138,7 @@ defradb client query ' ' ``` -This query obtains *all* users and returns their fields `_key, age, name, points`. GraphQL queries only return the exact fields requested. +This query obtains *all* users and returns their fields `_docID, age, name, points`. GraphQL queries only return the exact fields requested. You can further filter results with the `filter` argument. @@ -146,7 +146,7 @@ You can further filter results with the `filter` argument. defradb client query ' query { User(filter: {points: {_ge: 50}}) { - _key + _docID age name points @@ -166,7 +166,7 @@ To get the most recent commit in the MerkleDAG for the document identified as `b ```shell defradb client query ' query { - latestCommits(dockey: "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab") { + latestCommits(docID: "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab") { cid delta height diff --git a/cli/cli.go b/cli/cli.go index 8827424334..2ee882afce 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -91,7 +91,7 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { collection := MakeCollectionCommand(cfg) collection.AddCommand( MakeCollectionGetCommand(), - MakeCollectionKeysCommand(), + MakeCollectionListDocIDsCommand(), MakeCollectionDeleteCommand(), MakeCollectionUpdateCommand(), MakeCollectionCreateCommand(), diff --git a/cli/collection_delete.go b/cli/collection_delete.go index 85539d5eb3..dcd7c9d872 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -17,15 +17,15 @@ import ( ) func MakeCollectionDeleteCommand() *cobra.Command { - var keys []string + var argDocIDs []string var filter string var cmd = &cobra.Command{ - Use: "delete [--filter --key ]", - Short: "Delete documents by key or filter.", - Long: `Delete documents by key or filter and lists the number of documents deleted. + Use: "delete [--filter --docID ]", + Short: "Delete documents by docID or filter.", + Long: `Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by key(s) - defradb client collection delete --name User --key bae-123,bae-456 +Example: delete by docID(s) + defradb client collection delete --name User --docID bae-123,bae-456 Example: delete by filter defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' @@ -37,26 +37,26 @@ Example: delete by filter } switch { - case len(keys) == 1: - docKey, err := client.NewDocKeyFromString(keys[0]) + case len(argDocIDs) == 1: + docID, err := client.NewDocIDFromString(argDocIDs[0]) if err != nil { return err } - res, err := col.DeleteWithKey(cmd.Context(), docKey) + res, err := col.DeleteWithDocID(cmd.Context(), docID) if err != nil { return err } return writeJSON(cmd, res) - case len(keys) > 1: - docKeys := make([]client.DocKey, len(keys)) - for i, v := range keys { - docKey, err := client.NewDocKeyFromString(v) + case len(argDocIDs) > 1: + docIDs := make([]client.DocID, len(argDocIDs)) + for i, v := range argDocIDs { + docID, err := client.NewDocIDFromString(v) if err != nil { return err } - docKeys[i] = docKey + docIDs[i] = docID } - res, err := col.DeleteWithKeys(cmd.Context(), docKeys) + res, err := col.DeleteWithDocIDs(cmd.Context(), docIDs) if err != nil { return err } @@ -68,11 +68,11 @@ Example: delete by filter } return writeJSON(cmd, res) default: - return ErrNoDocKeyOrFilter + return ErrNoDocIDOrFilter } }, } - cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") + cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") return cmd } diff --git a/cli/collection_get.go b/cli/collection_get.go index d908bbdb7a..d753e0a8db 100644 --- a/cli/collection_get.go +++ b/cli/collection_get.go @@ -19,7 +19,7 @@ import ( func MakeCollectionGetCommand() *cobra.Command { var showDeleted bool var cmd = &cobra.Command{ - Use: "get [--show-deleted]", + Use: "get [--show-deleted]", Short: "View document fields.", Long: `View document fields. @@ -33,11 +33,11 @@ Example: return cmd.Usage() } - docKey, err := client.NewDocKeyFromString(args[0]) + docID, err := client.NewDocIDFromString(args[0]) if err != nil { return err } - doc, err := col.Get(cmd.Context(), docKey, showDeleted) + doc, err := col.Get(cmd.Context(), docID, showDeleted) if err != nil { return err } diff --git a/cli/collection_keys.go b/cli/collection_list_doc_ids.go similarity index 65% rename from cli/collection_keys.go rename to cli/collection_list_doc_ids.go index a453c16a86..d7009cb300 100644 --- a/cli/collection_keys.go +++ b/cli/collection_list_doc_ids.go @@ -16,14 +16,14 @@ import ( "github.com/sourcenetwork/defradb/http" ) -func MakeCollectionKeysCommand() *cobra.Command { +func MakeCollectionListDocIDsCommand() *cobra.Command { var cmd = &cobra.Command{ - Use: "keys", - Short: "List all document keys.", - Long: `List all document keys. + Use: "docIDs", + Short: "List all document IDs (docIDs).", + Long: `List all document IDs (docIDs). Example: - defradb client collection keys --name User + defradb client collection docIDs --name User `, RunE: func(cmd *cobra.Command, args []string) error { col, ok := tryGetCollectionContext(cmd) @@ -31,16 +31,16 @@ Example: return cmd.Usage() } - docCh, err := col.GetAllDocKeys(cmd.Context()) + docCh, err := col.GetAllDocIDs(cmd.Context()) if err != nil { return err } - for docKey := range docCh { - results := &http.DocKeyResult{ - Key: docKey.Key.String(), + for docIDResult := range docCh { + results := &http.DocIDResult{ + DocID: docIDResult.ID.String(), } - if docKey.Err != nil { - results.Error = docKey.Err.Error() + if docIDResult.Err != nil { + results.Error = docIDResult.Err.Error() } if err := writeJSON(cmd, results); err != nil { return err diff --git a/cli/collection_update.go b/cli/collection_update.go index 317a2e8119..9fd2deed3f 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -17,24 +17,24 @@ import ( ) func MakeCollectionUpdateCommand() *cobra.Command { - var keys []string + var argDocIDs []string var filter string var updater string var cmd = &cobra.Command{ - Use: "update [--filter --key --updater ] ", - Short: "Update documents by key or filter.", - Long: `Update documents by key or filter. + Use: "update [--filter --docID --updater ] ", + Short: "Update documents by docID or filter.", + Long: `Update documents by docID or filter. Example: update from string - defradb client collection update --name User --key bae-123 '{ "name": "Bob" }' + defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' Example: update by filter defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by keys +Example: update by docIDs defradb client collection update --name User \ - --key bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123,bae-456 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -44,26 +44,26 @@ Example: update by keys } switch { - case len(keys) == 1 && updater != "": - docKey, err := client.NewDocKeyFromString(keys[0]) + case len(argDocIDs) == 1 && updater != "": + docID, err := client.NewDocIDFromString(argDocIDs[0]) if err != nil { return err } - res, err := col.UpdateWithKey(cmd.Context(), docKey, updater) + res, err := col.UpdateWithDocID(cmd.Context(), docID, updater) if err != nil { return err } return writeJSON(cmd, res) - case len(keys) > 1 && updater != "": - docKeys := make([]client.DocKey, len(keys)) - for i, v := range keys { - docKey, err := client.NewDocKeyFromString(v) + case len(argDocIDs) > 1 && updater != "": + docIDs := make([]client.DocID, len(argDocIDs)) + for i, v := range argDocIDs { + docID, err := client.NewDocIDFromString(v) if err != nil { return err } - docKeys[i] = docKey + docIDs[i] = docID } - res, err := col.UpdateWithKeys(cmd.Context(), docKeys, updater) + res, err := col.UpdateWithDocIDs(cmd.Context(), docIDs, updater) if err != nil { return err } @@ -74,12 +74,12 @@ Example: update by keys return err } return writeJSON(cmd, res) - case len(keys) == 1 && len(args) == 1: - docKey, err := client.NewDocKeyFromString(keys[0]) + case len(argDocIDs) == 1 && len(args) == 1: + docID, err := client.NewDocIDFromString(argDocIDs[0]) if err != nil { return err } - doc, err := col.Get(cmd.Context(), docKey, true) + doc, err := col.Get(cmd.Context(), docID, true) if err != nil { return err } @@ -88,11 +88,11 @@ Example: update by keys } return col.Update(cmd.Context(), doc) default: - return ErrNoDocKeyOrFilter + return ErrNoDocIDOrFilter } }, } - cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") + cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") return cmd diff --git a/cli/errors.go b/cli/errors.go index ee89a63249..bb124bc7f9 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -22,7 +22,7 @@ const ( var ( ErrNoDocOrFile = errors.New("document or file must be defined") ErrInvalidDocument = errors.New("invalid document") - ErrNoDocKeyOrFilter = errors.New("document key or filter must be defined") + ErrNoDocIDOrFilter = errors.New("docID or filter must be defined") ErrInvalidExportFormat = errors.New("invalid export format") ErrNoLensConfig = errors.New("lens config cannot be empty") ErrInvalidLensConfig = errors.New("invalid lens configuration") diff --git a/cli/version_test.go b/cli/version_test.go index 4f62f3659b..fdc6aba6e6 100644 --- a/cli/version_test.go +++ b/cli/version_test.go @@ -42,7 +42,7 @@ func TestVersionFull(t *testing.T) { assert.NoError(t, err) t.Log(buf.String()) assert.Contains(t, buf.String(), "* HTTP API") - assert.Contains(t, buf.String(), "* DocKey versions") + assert.Contains(t, buf.String(), "* DocID versions") assert.Contains(t, buf.String(), "* P2P multicodec") } @@ -59,11 +59,11 @@ func TestVersionJSON(t *testing.T) { { "release": "", "commit": "", - "commitdate": "", + "commitDate": "", "go": "", - "httpapi": "v0", - "dockeyversions": "1", - "netprotocol": "/defra/0.0.1" + "httpAPI": "v0", + "docIDVersions": "1", + "netProtocol": "/defra/0.0.1" }`) } @@ -80,10 +80,10 @@ func TestVersionJSONFull(t *testing.T) { { "release": "", "commit": "", - "commitdate": "", + "commitDate": "", "go": "", - "httpapi": "v0", - "dockeyversions": "1", - "netprotocol": "/defra/0.0.1" + "httpAPI": "v0", + "docIDVersions": "1", + "netProtocol": "/defra/0.0.1" }`) } diff --git a/client/collection.go b/client/collection.go index 3e6bb64cc4..3a42871c62 100644 --- a/client/collection.go +++ b/client/collection.go @@ -33,26 +33,32 @@ type CollectionDefinition struct { type Collection interface { // Name returns the name of this collection. Name() string + // ID returns the ID of this Collection. ID() uint32 + // SchemaRoot returns the Root of the Schema used to define this Collection. SchemaRoot() string // Definition contains the metadata defining what a Collection is. Definition() CollectionDefinition + // Schema returns the SchemaDescription used to define this Collection. Schema() SchemaDescription + // Description returns the CollectionDescription of this Collection. Description() CollectionDescription // Create a new document. // - // Will verify the DocKey/CID to ensure that the new document is correctly formatted. + // Will verify the DocID/CID to ensure that the new document is correctly formatted. Create(context.Context, *Document) error + // CreateMany new documents. // - // Will verify the DocKeys/CIDs to ensure that the new documents are correctly formatted. + // Will verify the DocIDs/CIDs to ensure that the new documents are correctly formatted. CreateMany(context.Context, []*Document) error + // Update an existing document with the new values. // // Any field that needs to be removed or cleared should call doc.Clear(field) before. @@ -60,93 +66,102 @@ type Collection interface { // // Will return a ErrDocumentNotFound error if the given document is not found. Update(context.Context, *Document) error + // Save the given document in the database. // - // If a document exists with the given DocKey it will update it. Otherwise a new document + // If a document exists with the given DocID it will update it. Otherwise a new document // will be created. Save(context.Context, *Document) error - // Delete will attempt to delete a document by key. + + // Delete will attempt to delete a document by DocID. // // Will return true if a deletion is successful, and return false along with an error // if it cannot. If the document doesn't exist, then it will return false and a ErrDocumentNotFound error. - // This operation will hard-delete all state relating to the given DocKey. This includes data, block, and head storage. - Delete(context.Context, DocKey) (bool, error) - // Exists checks if a given document exists with supplied DocKey. + // This operation will hard-delete all state relating to the given DocID. This includes data, block, and head storage. + Delete(context.Context, DocID) (bool, error) + + // Exists checks if a given document exists with supplied DocID. // // Will return true if a matching document exists, otherwise will return false. - Exists(context.Context, DocKey) (bool, error) + Exists(context.Context, DocID) (bool, error) // UpdateWith updates a target document using the given updater type. // - // Target can be a Filter statement, a single docKey, a single document, - // an array of docKeys, or an array of documents. + // Target can be a Filter statement, a single DocID, a single document, + // an array of DocIDs, or an array of documents. // It is recommended to use the respective typed versions of Update - // (e.g. UpdateWithFilter or UpdateWithKey) over this function if you can. + // (e.g. UpdateWithFilter or UpdateWithDocID) over this function if you can. // // Returns an ErrInvalidUpdateTarget error if the target type is not supported. // Returns an ErrInvalidUpdater error if the updater type is not supported. UpdateWith(ctx context.Context, target any, updater string) (*UpdateResult, error) + // UpdateWithFilter updates using a filter to target documents for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. UpdateWithFilter(ctx context.Context, filter any, updater string) (*UpdateResult, error) - // UpdateWithKey updates using a DocKey to target a single document for update. + + // UpdateWithDocID updates using a DocID to target a single document for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. // - // Returns an ErrDocumentNotFound if a document matching the given DocKey is not found. - UpdateWithKey(ctx context.Context, key DocKey, updater string) (*UpdateResult, error) - // UpdateWithKeys updates documents matching the given DocKeys. + // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. + UpdateWithDocID(ctx context.Context, docID DocID, updater string) (*UpdateResult, error) + + // UpdateWithDocIDs updates documents matching the given DocIDs. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. // - // Returns an ErrDocumentNotFound if a document is not found for any given DocKey. - UpdateWithKeys(context.Context, []DocKey, string) (*UpdateResult, error) + // Returns an ErrDocumentNotFound if a document is not found for any given DocID. + UpdateWithDocIDs(context.Context, []DocID, string) (*UpdateResult, error) // DeleteWith deletes a target document. // - // Target can be a Filter statement, a single docKey, a single document, an array of docKeys, + // Target can be a Filter statement, a single DocID, a single document, an array of DocIDs, // or an array of documents. It is recommended to use the respective typed versions of Delete - // (e.g. DeleteWithFilter or DeleteWithKey) over this function if you can. - // This operation will soft-delete documents related to the given DocKey and update the composite block + // (e.g. DeleteWithFilter or DeleteWithDocID) over this function if you can. + // This operation will soft-delete documents related to the given DocID and update the composite block // with a status of `Deleted`. // // Returns an ErrInvalidDeleteTarget if the target type is not supported. DeleteWith(ctx context.Context, target any) (*DeleteResult, error) + // DeleteWithFilter deletes documents matching the given filter. // // This operation will soft-delete documents related to the given filter and update the composite block // with a status of `Deleted`. DeleteWithFilter(ctx context.Context, filter any) (*DeleteResult, error) - // DeleteWithKey deletes using a DocKey to target a single document for delete. + + // DeleteWithDocID deletes using a DocID to target a single document for delete. // - // This operation will soft-delete documents related to the given DocKey and update the composite block + // This operation will soft-delete documents related to the given DocID and update the composite block // with a status of `Deleted`. // - // Returns an ErrDocumentNotFound if a document matching the given DocKey is not found. - DeleteWithKey(context.Context, DocKey) (*DeleteResult, error) - // DeleteWithKeys deletes documents matching the given DocKeys. + // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. + DeleteWithDocID(context.Context, DocID) (*DeleteResult, error) + + // DeleteWithDocIDs deletes documents matching the given DocIDs. // - // This operation will soft-delete documents related to the given DocKeys and update the composite block + // This operation will soft-delete documents related to the given DocIDs and update the composite block // with a status of `Deleted`. // - // Returns an ErrDocumentNotFound if a document is not found for any given DocKey. - DeleteWithKeys(context.Context, []DocKey) (*DeleteResult, error) + // Returns an ErrDocumentNotFound if a document is not found for any given DocID. + DeleteWithDocIDs(context.Context, []DocID) (*DeleteResult, error) - // Get returns the document with the given DocKey. + // Get returns the document with the given DocID. // - // Returns an ErrDocumentNotFound if a document matching the given DocKey is not found. - Get(ctx context.Context, key DocKey, showDeleted bool) (*Document, error) + // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. + Get(ctx context.Context, docID DocID, showDeleted bool) (*Document, error) // WithTxn returns a new instance of the collection, with a transaction // handle instead of a raw DB handle. WithTxn(datastore.Txn) Collection - // GetAllDocKeys returns all the document keys that exist in the collection. - GetAllDocKeys(ctx context.Context) (<-chan DocKeysResult, error) + // GetAllDocIDs returns all the document IDs that exist in the collection. + GetAllDocIDs(ctx context.Context) (<-chan DocIDResult, error) // CreateIndex creates a new index on the collection. // `IndexDescription` contains the description of the index to be created. @@ -162,11 +177,11 @@ type Collection interface { GetIndexes(ctx context.Context) ([]IndexDescription, error) } -// DocKeysResult wraps the result of an attempt at a DocKey retrieval operation. -type DocKeysResult struct { - // If a DocKey was successfully retrieved, this will be that key. - Key DocKey - // If an error was generated whilst attempting to retrieve the DocKey, this will be the error. +// DocIDResult wraps the result of an attempt at a DocID retrieval operation. +type DocIDResult struct { + // If a DocID was successfully retrieved, this will be that DocID. + ID DocID + // If an error was generated whilst attempting to retrieve the DocID, this will be the error. Err error } @@ -174,16 +189,16 @@ type DocKeysResult struct { type UpdateResult struct { // Count contains the number of documents updated by the update call. Count int64 - // DocKeys contains the DocKeys of all the documents updated by the update call. - DocKeys []string + // DocIDs contains the DocIDs of all the documents updated by the update call. + DocIDs []string } // DeleteResult wraps the result of an delete call. type DeleteResult struct { // Count contains the number of documents deleted by the delete call. Count int64 - // DocKeys contains the DocKeys of all the documents deleted by the delete call. - DocKeys []string + // DocIDs contains the DocIDs of all the documents deleted by the delete call. + DocIDs []string } // P2PCollection is the gRPC response representation of a P2P collection topic diff --git a/client/descriptions.go b/client/descriptions.go index 7ab7cc0982..7a4ec0ba7e 100644 --- a/client/descriptions.go +++ b/client/descriptions.go @@ -127,7 +127,7 @@ type FieldKind uint8 func (f FieldKind) String() string { switch f { - case FieldKind_DocKey: + case FieldKind_DocID: return "ID" case FieldKind_BOOL: return "Boolean" @@ -165,7 +165,7 @@ func (f FieldKind) String() string { // Note: These values are serialized and persisted in the database, avoid modifying existing values. const ( FieldKind_None FieldKind = 0 - FieldKind_DocKey FieldKind = 1 + FieldKind_DocID FieldKind = 1 FieldKind_BOOL FieldKind = 2 FieldKind_BOOL_ARRAY FieldKind = 3 FieldKind_INT FieldKind = 4 @@ -201,7 +201,7 @@ const ( // in the future. They currently roughly correspond to the GQL field types, but this // equality is not guaranteed. var FieldKindStringToEnumMapping = map[string]FieldKind{ - "ID": FieldKind_DocKey, + "ID": FieldKind_DocID, "Boolean": FieldKind_BOOL, "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, "[Boolean!]": FieldKind_BOOL_ARRAY, @@ -280,7 +280,7 @@ type FieldDescription struct { // IsInternal returns true if this field is internally generated. func (f FieldDescription) IsInternal() bool { - return (f.Name == "_key") || f.RelationType&Relation_Type_INTERNAL_ID != 0 + return (f.Name == request.DocIDFieldName) || f.RelationType&Relation_Type_INTERNAL_ID != 0 } // IsObject returns true if this field is an object type. diff --git a/client/dockey.go b/client/doc_id.go similarity index 50% rename from client/dockey.go rename to client/doc_id.go index 421820d341..601a6ed791 100644 --- a/client/dockey.go +++ b/client/doc_id.go @@ -20,14 +20,14 @@ import ( mbase "github.com/multiformats/go-multibase" ) -// DocKey versions. +// DocID versions. const ( - DocKeyV0 = 0x01 + DocIDV0 = 0x01 ) -// ValidDocKeyVersions is a map of DocKey versions and their current validity. -var ValidDocKeyVersions = map[uint16]bool{ - DocKeyV0: true, +// ValidDocIDVersions is a map of DocID versions and their current validity. +var ValidDocIDVersions = map[uint16]bool{ + DocIDV0: true, } var ( @@ -35,69 +35,69 @@ var ( SDNNamespaceV0 = uuid.Must(uuid.FromString("c94acbfa-dd53-40d0-97f3-29ce16c333fc")) ) -// DocKey is the root key identifier for documents in DefraDB. -type DocKey struct { +// DocID is the root identifier for documents in DefraDB. +type DocID struct { version uint16 uuid uuid.UUID cid cid.Cid } -// NewDocKeyV0 creates a new dockey identified by the root data CID,peerID, and namespaced by the versionNS. -func NewDocKeyV0(dataCID cid.Cid) DocKey { - return DocKey{ - version: DocKeyV0, +// NewDocIDV0 creates a new DocID identified by the root data CID, peerID, and namespaced by the versionNS. +func NewDocIDV0(dataCID cid.Cid) DocID { + return DocID{ + version: DocIDV0, uuid: uuid.NewV5(SDNNamespaceV0, dataCID.String()), cid: dataCID, } } -// NewDocKeyFromString creates a new DocKey from a string. -func NewDocKeyFromString(key string) (DocKey, error) { - parts := strings.SplitN(key, "-", 2) +// NewDocIDFromString creates a new DocID from a string. +func NewDocIDFromString(docID string) (DocID, error) { + parts := strings.SplitN(docID, "-", 2) if len(parts) != 2 { - return DocKey{}, ErrMalformedDocKey + return DocID{}, ErrMalformedDocID } versionStr := parts[0] _, data, err := mbase.Decode(versionStr) if err != nil { - return DocKey{}, err + return DocID{}, err } buf := bytes.NewBuffer(data) version, err := binary.ReadUvarint(buf) if err != nil { - return DocKey{}, err + return DocID{}, err } - if _, ok := ValidDocKeyVersions[uint16(version)]; !ok { - return DocKey{}, ErrInvalidDocKeyVersion + if _, ok := ValidDocIDVersions[uint16(version)]; !ok { + return DocID{}, ErrInvalidDocIDVersion } uuid, err := uuid.FromString(parts[1]) if err != nil { - return DocKey{}, err + return DocID{}, err } - return DocKey{ + return DocID{ version: uint16(version), uuid: uuid, }, nil } -// UUID returns the doc key in UUID form. -func (key DocKey) UUID() uuid.UUID { - return key.uuid +// UUID returns the underlying document identifier in UUID form. +func (docID DocID) UUID() uuid.UUID { + return docID.uuid } -// String returns the doc key in string form. -func (key DocKey) String() string { +// String returns the underlying document identifier in string form. +func (docID DocID) String() string { buf := make([]byte, 1) - binary.PutUvarint(buf, uint64(key.version)) + binary.PutUvarint(buf, uint64(docID.version)) versionStr, _ := mbase.Encode(mbase.Base32, buf) - return versionStr + "-" + key.uuid.String() + return versionStr + "-" + docID.uuid.String() } -// Bytes returns the DocKey in Byte format. -func (key DocKey) Bytes() []byte { +// Bytes returns the underlying document identifier in Byte format. +func (docID DocID) Bytes() []byte { buf := make([]byte, binary.MaxVarintLen16) - binary.PutUvarint(buf, uint64(key.version)) - return append(buf, key.uuid.Bytes()...) + binary.PutUvarint(buf, uint64(docID.version)) + return append(buf, docID.uuid.Bytes()...) } diff --git a/client/document.go b/client/document.go index bcb8ae6070..6713f48dd0 100644 --- a/client/document.go +++ b/client/document.go @@ -53,7 +53,7 @@ import ( // @body: A document interface can be implemented by both a TypedDocument and a // UnTypedDocument, which use a schema and schemaless approach respectively. type Document struct { - key DocKey + id DocID // SchemaVersionID holds the id of the schema version that this document is // currently at. // @@ -68,10 +68,10 @@ type Document struct { isDirty bool } -// NewDocWithKey creates a new Document with a specified key. -func NewDocWithKey(key DocKey) *Document { +// NewDocWithID creates a new Document with a specified DocID. +func NewDocWithID(docID DocID) *Document { doc := newEmptyDoc() - doc.key = key + doc.id = docID return doc } @@ -90,15 +90,15 @@ func NewDocFromMap(data map[string]any) (*Document, error) { values: make(map[Field]Value), } - // check if document contains special _key field - k, hasKey := data["_key"] - if hasKey { - delete(data, "_key") // remove the key so it isn't parsed further + // check if document contains special _docID field + k, hasDocID := data[request.DocIDFieldName] + if hasDocID { + delete(data, request.DocIDFieldName) // remove the DocID so it isn't parsed further kstr, ok := k.(string) if !ok { - return nil, NewErrUnexpectedType[string]("data[_key]", k) + return nil, NewErrUnexpectedType[string]("data["+request.DocIDFieldName+"]", k) } - if doc.key, err = NewDocKeyFromString(kstr); err != nil { + if doc.id, err = NewDocIDFromString(kstr); err != nil { return nil, err } } @@ -108,9 +108,9 @@ func NewDocFromMap(data map[string]any) (*Document, error) { return nil, err } - // if no key was specified, then we assume it doesn't exist and we generate, and set it. - if !hasKey { - err = doc.generateAndSetDocKey() + // if no DocID was specified, then we assume it doesn't exist and we generate, and set it. + if !hasDocID { + err = doc.generateAndSetDocID() if err != nil { return nil, err } @@ -144,10 +144,10 @@ func (doc *Document) SetHead(head cid.Cid) { doc.head = head } -// Key returns the generated DocKey for this document. -func (doc *Document) Key() DocKey { - // Reading without a read-lock as we assume the DocKey is immutable - return doc.key +// ID returns the generated DocID for this document. +func (doc *Document) ID() DocID { + // Reading without a read-lock as we assume the DocID is immutable + return doc.id } // Get returns the raw value for a given field. @@ -392,8 +392,7 @@ func (doc *Document) String() (string, error) { return string(j), nil } -// ToMap returns the document as a map[string]any -// object. +// ToMap returns the document as a map[string]any object. func (doc *Document) ToMap() (map[string]any, error) { return doc.toMapWithKey() } @@ -479,42 +478,42 @@ func (doc *Document) toMapWithKey() (map[string]any, error) { docMap[k] = value.Value() } - docMap["_key"] = doc.Key().String() + docMap[request.DocIDFieldName] = doc.ID().String() return docMap, nil } -// GenerateDocKey generates docKey/docID corresponding to the document. -func (doc *Document) GenerateDocKey() (DocKey, error) { +// GenerateDocID generates the DocID corresponding to the document. +func (doc *Document) GenerateDocID() (DocID, error) { bytes, err := doc.Bytes() if err != nil { - return DocKey{}, err + return DocID{}, err } cid, err := ccid.NewSHA256CidV1(bytes) if err != nil { - return DocKey{}, err + return DocID{}, err } - return NewDocKeyV0(cid), nil + return NewDocIDV0(cid), nil } -// setDocKey sets the `doc.key` (should NOT be public). -func (doc *Document) setDocKey(docID DocKey) { +// setDocID sets the `doc.id` (should NOT be public). +func (doc *Document) setDocID(docID DocID) { doc.mu.Lock() defer doc.mu.Unlock() - doc.key = docID + doc.id = docID } -// generateAndSetDocKey generates the docKey/docID and then (re)sets `doc.key`. -func (doc *Document) generateAndSetDocKey() error { - docKey, err := doc.GenerateDocKey() +// generateAndSetDocID generates the DocID and then (re)sets `doc.id`. +func (doc *Document) generateAndSetDocID() error { + docID, err := doc.GenerateDocID() if err != nil { return err } - doc.setDocKey(docKey) + doc.setDocID(docID) return nil } @@ -537,8 +536,8 @@ func (doc *Document) remapAliasFields(fieldDescriptions []FieldDescription) (boo return foundAlias, nil } -// RemapAliasFieldsAndDockey remaps the alias fields and fixes (overwrites) the dockey. -func (doc *Document) RemapAliasFieldsAndDockey(fieldDescriptions []FieldDescription) error { +// RemapAliasFieldsAndDocID remaps the alias fields and fixes (overwrites) the DocID. +func (doc *Document) RemapAliasFieldsAndDocID(fieldDescriptions []FieldDescription) error { foundAlias, err := doc.remapAliasFields(fieldDescriptions) if err != nil { return err @@ -548,8 +547,8 @@ func (doc *Document) RemapAliasFieldsAndDockey(fieldDescriptions []FieldDescript return nil } - // Update the dockey so dockey isn't based on an aliased name of a field. - return doc.generateAndSetDocKey() + // Update the DocID so DocID isn't based on an aliased name of a field. + return doc.generateAndSetDocID() } // DocumentStatus represent the state of the document in the DAG store. diff --git a/client/document_test.go b/client/document_test.go index 9073373cd3..ee15dc5673 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -48,10 +48,10 @@ func TestNewFromJSON(t *testing.T) { if err != nil { t.Error(err) } - objKey := NewDocKeyV0(c) + objKey := NewDocIDV0(c) - if objKey.String() != doc.Key().String() { - t.Errorf("Incorrect doc key. Want %v, have %v", objKey.String(), doc.Key().String()) + if objKey.String() != doc.ID().String() { + t.Errorf("Incorrect document ID. Want %v, have %v", objKey.String(), doc.ID().String()) return } @@ -101,10 +101,10 @@ func TestSetWithJSON(t *testing.T) { if err != nil { t.Error(err) } - objKey := NewDocKeyV0(c) + objKey := NewDocIDV0(c) - if objKey.String() != doc.Key().String() { - t.Errorf("Incorrect doc key. Want %v, have %v", objKey.String(), doc.Key().String()) + if objKey.String() != doc.ID().String() { + t.Errorf("Incorrect document ID. Want %v, have %v", objKey.String(), doc.ID().String()) return } diff --git a/client/errors.go b/client/errors.go index 28161c502d..3d1de52a3d 100644 --- a/client/errors.go +++ b/client/errors.go @@ -32,23 +32,16 @@ const ( // This list is incomplete and undefined errors may also be returned. // Errors returned from this package may be tested against these errors with errors.Is. var ( - ErrFieldNotExist = errors.New(errFieldNotExist) - ErrUnexpectedType = errors.New(errUnexpectedType) - ErrParsingFailed = errors.New(errParsingFailed) - ErrUninitializeProperty = errors.New(errUninitializeProperty) - ErrFieldNotObject = errors.New("trying to access field on a non object type") - ErrValueTypeMismatch = errors.New("value does not match indicated type") - ErrIndexNotFound = errors.New("no index found for given ID") - ErrDocumentNotFound = errors.New("no document for the given key exists") - ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") - ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") - ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") - ErrMalformedDocKey = errors.New("malformed DocKey, missing either version or cid") - ErrInvalidDocKeyVersion = errors.New("invalid DocKey version") - ErrMaxTxnRetries = errors.New(errMaxTxnRetries) - ErrRelationOneSided = errors.New(errRelationOneSided) - ErrCollectionNotFound = errors.New(errCollectionNotFound) - ErrUnknownCRDT = errors.New(errUnknownCRDT) + ErrFieldNotExist = errors.New(errFieldNotExist) + ErrUnexpectedType = errors.New(errUnexpectedType) + ErrFieldNotObject = errors.New("trying to access field on a non object type") + ErrValueTypeMismatch = errors.New("value does not match indicated type") + ErrDocumentNotFound = errors.New("no document for the given ID exists") + ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") + ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") + ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") + ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid") + ErrInvalidDocIDVersion = errors.New("invalid document ID version") ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. diff --git a/client/mocks/collection.go b/client/mocks/collection.go index f3d7f58354..b1fac9c243 100644 --- a/client/mocks/collection.go +++ b/client/mocks/collection.go @@ -206,21 +206,21 @@ func (_c *Collection_Definition_Call) RunAndReturn(run func() client.CollectionD } // Delete provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Delete(_a0 context.Context, _a1 client.DocKey) (bool, error) { +func (_m *Collection) Delete(_a0 context.Context, _a1 client.DocID) (bool, error) { ret := _m.Called(_a0, _a1) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) (bool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) bool); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { r0 = rf(_a0, _a1) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) @@ -236,14 +236,14 @@ type Collection_Delete_Call struct { // Delete is a helper method to define mock.On call // - _a0 context.Context -// - _a1 client.DocKey +// - _a1 client.DocID func (_e *Collection_Expecter) Delete(_a0 interface{}, _a1 interface{}) *Collection_Delete_Call { return &Collection_Delete_Call{Call: _e.mock.On("Delete", _a0, _a1)} } -func (_c *Collection_Delete_Call) Run(run func(_a0 context.Context, _a1 client.DocKey)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Delete_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -253,7 +253,7 @@ func (_c *Collection_Delete_Call) Return(_a0 bool, _a1 error) *Collection_Delete return _c } -func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client.DocKey) (bool, error)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Delete_Call { _c.Call.Return(run) return _c } @@ -313,25 +313,25 @@ func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, int return _c } -// DeleteWithFilter provides a mock function with given fields: ctx, filter -func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, filter) +// DeleteWithDocID provides a mock function with given fields: _a0, _a1 +func (_m *Collection) DeleteWithDocID(_a0 context.Context, _a1 client.DocID) (*client.DeleteResult, error) { + ret := _m.Called(_a0, _a1) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (*client.DeleteResult, error)); ok { + return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) *client.DeleteResult); ok { + r0 = rf(_a0, _a1) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { - r1 = rf(ctx, filter) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { + r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) } @@ -339,45 +339,45 @@ func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) return r0, r1 } -// Collection_DeleteWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithFilter' -type Collection_DeleteWithFilter_Call struct { +// Collection_DeleteWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocID' +type Collection_DeleteWithDocID_Call struct { *mock.Call } -// DeleteWithFilter is a helper method to define mock.On call -// - ctx context.Context -// - filter interface{} -func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { - return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, filter)} +// DeleteWithDocID is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 client.DocID +func (_e *Collection_Expecter) DeleteWithDocID(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocID_Call { + return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", _a0, _a1)} } -func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, filter interface{})) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithDocID_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_DeleteWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{})) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } -func (_c *Collection_DeleteWithFilter_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithDocID_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocID_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { _c.Call.Return(run) return _c } -// DeleteWithKey provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithKey(_a0 context.Context, _a1 client.DocKey) (*client.DeleteResult, error) { +// DeleteWithDocIDs provides a mock function with given fields: _a0, _a1 +func (_m *Collection) DeleteWithDocIDs(_a0 context.Context, _a1 []client.DocID) (*client.DeleteResult, error) { ret := _m.Called(_a0, _a1) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) (*client.DeleteResult, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) (*client.DeleteResult, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) *client.DeleteResult); ok { + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) *client.DeleteResult); ok { r0 = rf(_a0, _a1) } else { if ret.Get(0) != nil { @@ -385,7 +385,7 @@ func (_m *Collection) DeleteWithKey(_a0 context.Context, _a1 client.DocKey) (*cl } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, []client.DocID) error); ok { r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) @@ -394,54 +394,54 @@ func (_m *Collection) DeleteWithKey(_a0 context.Context, _a1 client.DocKey) (*cl return r0, r1 } -// Collection_DeleteWithKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithKey' -type Collection_DeleteWithKey_Call struct { +// Collection_DeleteWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocIDs' +type Collection_DeleteWithDocIDs_Call struct { *mock.Call } -// DeleteWithKey is a helper method to define mock.On call +// DeleteWithDocIDs is a helper method to define mock.On call // - _a0 context.Context -// - _a1 client.DocKey -func (_e *Collection_Expecter) DeleteWithKey(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithKey_Call { - return &Collection_DeleteWithKey_Call{Call: _e.mock.On("DeleteWithKey", _a0, _a1)} +// - _a1 []client.DocID +func (_e *Collection_Expecter) DeleteWithDocIDs(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocIDs_Call { + return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", _a0, _a1)} } -func (_c *Collection_DeleteWithKey_Call) Run(run func(_a0 context.Context, _a1 client.DocKey)) *Collection_DeleteWithKey_Call { +func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID)) *Collection_DeleteWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey)) + run(args[0].(context.Context), args[1].([]client.DocID)) }) return _c } -func (_c *Collection_DeleteWithKey_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithKey_Call { +func (_c *Collection_DeleteWithDocIDs_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocIDs_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_DeleteWithKey_Call) RunAndReturn(run func(context.Context, client.DocKey) (*client.DeleteResult, error)) *Collection_DeleteWithKey_Call { +func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { _c.Call.Return(run) return _c } -// DeleteWithKeys provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithKeys(_a0 context.Context, _a1 []client.DocKey) (*client.DeleteResult, error) { - ret := _m.Called(_a0, _a1) +// DeleteWithFilter provides a mock function with given fields: ctx, filter +func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { + ret := _m.Called(ctx, filter) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey) (*client.DeleteResult, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { + return rf(ctx, filter) } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey) *client.DeleteResult); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { + r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, []client.DocKey) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { + r1 = rf(ctx, filter) } else { r1 = ret.Error(1) } @@ -449,31 +449,31 @@ func (_m *Collection) DeleteWithKeys(_a0 context.Context, _a1 []client.DocKey) ( return r0, r1 } -// Collection_DeleteWithKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithKeys' -type Collection_DeleteWithKeys_Call struct { +// Collection_DeleteWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithFilter' +type Collection_DeleteWithFilter_Call struct { *mock.Call } -// DeleteWithKeys is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocKey -func (_e *Collection_Expecter) DeleteWithKeys(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithKeys_Call { - return &Collection_DeleteWithKeys_Call{Call: _e.mock.On("DeleteWithKeys", _a0, _a1)} +// DeleteWithFilter is a helper method to define mock.On call +// - ctx context.Context +// - filter interface{} +func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { + return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, filter)} } -func (_c *Collection_DeleteWithKeys_Call) Run(run func(_a0 context.Context, _a1 []client.DocKey)) *Collection_DeleteWithKeys_Call { +func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, filter interface{})) *Collection_DeleteWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocKey)) + run(args[0].(context.Context), args[1].(interface{})) }) return _c } -func (_c *Collection_DeleteWithKeys_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithKeys_Call { +func (_c *Collection_DeleteWithFilter_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithFilter_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_DeleteWithKeys_Call) RunAndReturn(run func(context.Context, []client.DocKey) (*client.DeleteResult, error)) *Collection_DeleteWithKeys_Call { +func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { _c.Call.Return(run) return _c } @@ -563,21 +563,21 @@ func (_c *Collection_DropIndex_Call) RunAndReturn(run func(context.Context, stri } // Exists provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Exists(_a0 context.Context, _a1 client.DocKey) (bool, error) { +func (_m *Collection) Exists(_a0 context.Context, _a1 client.DocID) (bool, error) { ret := _m.Called(_a0, _a1) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) (bool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) bool); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { r0 = rf(_a0, _a1) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) @@ -593,14 +593,14 @@ type Collection_Exists_Call struct { // Exists is a helper method to define mock.On call // - _a0 context.Context -// - _a1 client.DocKey +// - _a1 client.DocID func (_e *Collection_Expecter) Exists(_a0 interface{}, _a1 interface{}) *Collection_Exists_Call { return &Collection_Exists_Call{Call: _e.mock.On("Exists", _a0, _a1)} } -func (_c *Collection_Exists_Call) Run(run func(_a0 context.Context, _a1 client.DocKey)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Exists_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -610,30 +610,30 @@ func (_c *Collection_Exists_Call) Return(_a0 bool, _a1 error) *Collection_Exists return _c } -func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, client.DocKey) (bool, error)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Exists_Call { _c.Call.Return(run) return _c } -// Get provides a mock function with given fields: ctx, key, showDeleted -func (_m *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { - ret := _m.Called(ctx, key, showDeleted) +// Get provides a mock function with given fields: ctx, docID, showDeleted +func (_m *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { + ret := _m.Called(ctx, docID, showDeleted) var r0 *client.Document var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, bool) (*client.Document, error)); ok { - return rf(ctx, key, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) (*client.Document, error)); ok { + return rf(ctx, docID, showDeleted) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, bool) *client.Document); ok { - r0 = rf(ctx, key, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) *client.Document); ok { + r0 = rf(ctx, docID, showDeleted) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.Document) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey, bool) error); ok { - r1 = rf(ctx, key, showDeleted) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID, bool) error); ok { + r1 = rf(ctx, docID, showDeleted) } else { r1 = ret.Error(1) } @@ -648,15 +648,15 @@ type Collection_Get_Call struct { // Get is a helper method to define mock.On call // - ctx context.Context -// - key client.DocKey +// - docID client.DocID // - showDeleted bool -func (_e *Collection_Expecter) Get(ctx interface{}, key interface{}, showDeleted interface{}) *Collection_Get_Call { - return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, key, showDeleted)} +func (_e *Collection_Expecter) Get(ctx interface{}, docID interface{}, showDeleted interface{}) *Collection_Get_Call { + return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, docID, showDeleted)} } -func (_c *Collection_Get_Call) Run(run func(ctx context.Context, key client.DocKey, showDeleted bool)) *Collection_Get_Call { +func (_c *Collection_Get_Call) Run(run func(ctx context.Context, docID client.DocID, showDeleted bool)) *Collection_Get_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey), args[2].(bool)) + run(args[0].(context.Context), args[1].(client.DocID), args[2].(bool)) }) return _c } @@ -666,25 +666,25 @@ func (_c *Collection_Get_Call) Return(_a0 *client.Document, _a1 error) *Collecti return _c } -func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, client.DocKey, bool) (*client.Document, error)) *Collection_Get_Call { +func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, client.DocID, bool) (*client.Document, error)) *Collection_Get_Call { _c.Call.Return(run) return _c } -// GetAllDocKeys provides a mock function with given fields: ctx -func (_m *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +// GetAllDocIDs provides a mock function with given fields: ctx +func (_m *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { ret := _m.Called(ctx) - var r0 <-chan client.DocKeysResult + var r0 <-chan client.DocIDResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (<-chan client.DocKeysResult, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) (<-chan client.DocIDResult, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) <-chan client.DocKeysResult); ok { + if rf, ok := ret.Get(0).(func(context.Context) <-chan client.DocIDResult); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(<-chan client.DocKeysResult) + r0 = ret.Get(0).(<-chan client.DocIDResult) } } @@ -697,30 +697,30 @@ func (_m *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysR return r0, r1 } -// Collection_GetAllDocKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllDocKeys' -type Collection_GetAllDocKeys_Call struct { +// Collection_GetAllDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllDocIDs' +type Collection_GetAllDocIDs_Call struct { *mock.Call } -// GetAllDocKeys is a helper method to define mock.On call +// GetAllDocIDs is a helper method to define mock.On call // - ctx context.Context -func (_e *Collection_Expecter) GetAllDocKeys(ctx interface{}) *Collection_GetAllDocKeys_Call { - return &Collection_GetAllDocKeys_Call{Call: _e.mock.On("GetAllDocKeys", ctx)} +func (_e *Collection_Expecter) GetAllDocIDs(ctx interface{}) *Collection_GetAllDocIDs_Call { + return &Collection_GetAllDocIDs_Call{Call: _e.mock.On("GetAllDocIDs", ctx)} } -func (_c *Collection_GetAllDocKeys_Call) Run(run func(ctx context.Context)) *Collection_GetAllDocKeys_Call { +func (_c *Collection_GetAllDocIDs_Call) Run(run func(ctx context.Context)) *Collection_GetAllDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context)) }) return _c } -func (_c *Collection_GetAllDocKeys_Call) Return(_a0 <-chan client.DocKeysResult, _a1 error) *Collection_GetAllDocKeys_Call { +func (_c *Collection_GetAllDocIDs_Call) Return(_a0 <-chan client.DocIDResult, _a1 error) *Collection_GetAllDocIDs_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_GetAllDocKeys_Call) RunAndReturn(run func(context.Context) (<-chan client.DocKeysResult, error)) *Collection_GetAllDocKeys_Call { +func (_c *Collection_GetAllDocIDs_Call) RunAndReturn(run func(context.Context) (<-chan client.DocIDResult, error)) *Collection_GetAllDocIDs_Call { _c.Call.Return(run) return _c } @@ -1085,25 +1085,25 @@ func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, int return _c } -// UpdateWithFilter provides a mock function with given fields: ctx, filter, updater -func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, filter, updater) +// UpdateWithDocID provides a mock function with given fields: ctx, docID, updater +func (_m *Collection) UpdateWithDocID(ctx context.Context, docID client.DocID, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, docID, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(ctx, docID, updater) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) *client.UpdateResult); ok { + r0 = rf(ctx, docID, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { - r1 = rf(ctx, filter, updater) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID, string) error); ok { + r1 = rf(ctx, docID, updater) } else { r1 = ret.Error(1) } @@ -1111,55 +1111,55 @@ func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, return r0, r1 } -// Collection_UpdateWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithFilter' -type Collection_UpdateWithFilter_Call struct { +// Collection_UpdateWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocID' +type Collection_UpdateWithDocID_Call struct { *mock.Call } -// UpdateWithFilter is a helper method to define mock.On call +// UpdateWithDocID is a helper method to define mock.On call // - ctx context.Context -// - filter interface{} +// - docID client.DocID // - updater string -func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { - return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, filter, updater)} +func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { + return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, docID, updater)} } -func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) + run(args[0].(context.Context), args[1].(client.DocID), args[2].(string)) }) return _c } -func (_c *Collection_UpdateWithFilter_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithDocID_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocID_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { _c.Call.Return(run) return _c } -// UpdateWithKey provides a mock function with given fields: ctx, key, updater -func (_m *Collection) UpdateWithKey(ctx context.Context, key client.DocKey, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, key, updater) +// UpdateWithDocIDs provides a mock function with given fields: _a0, _a1, _a2 +func (_m *Collection) UpdateWithDocIDs(_a0 context.Context, _a1 []client.DocID, _a2 string) (*client.UpdateResult, error) { + ret := _m.Called(_a0, _a1, _a2) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, string) (*client.UpdateResult, error)); ok { - return rf(ctx, key, updater) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(_a0, _a1, _a2) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, string) *client.UpdateResult); ok { - r0 = rf(ctx, key, updater) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) *client.UpdateResult); ok { + r0 = rf(_a0, _a1, _a2) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey, string) error); ok { - r1 = rf(ctx, key, updater) + if rf, ok := ret.Get(1).(func(context.Context, []client.DocID, string) error); ok { + r1 = rf(_a0, _a1, _a2) } else { r1 = ret.Error(1) } @@ -1167,55 +1167,55 @@ func (_m *Collection) UpdateWithKey(ctx context.Context, key client.DocKey, upda return r0, r1 } -// Collection_UpdateWithKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithKey' -type Collection_UpdateWithKey_Call struct { +// Collection_UpdateWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocIDs' +type Collection_UpdateWithDocIDs_Call struct { *mock.Call } -// UpdateWithKey is a helper method to define mock.On call -// - ctx context.Context -// - key client.DocKey -// - updater string -func (_e *Collection_Expecter) UpdateWithKey(ctx interface{}, key interface{}, updater interface{}) *Collection_UpdateWithKey_Call { - return &Collection_UpdateWithKey_Call{Call: _e.mock.On("UpdateWithKey", ctx, key, updater)} +// UpdateWithDocIDs is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 []client.DocID +// - _a2 string +func (_e *Collection_Expecter) UpdateWithDocIDs(_a0 interface{}, _a1 interface{}, _a2 interface{}) *Collection_UpdateWithDocIDs_Call { + return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", _a0, _a1, _a2)} } -func (_c *Collection_UpdateWithKey_Call) Run(run func(ctx context.Context, key client.DocKey, updater string)) *Collection_UpdateWithKey_Call { +func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID, _a2 string)) *Collection_UpdateWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey), args[2].(string)) + run(args[0].(context.Context), args[1].([]client.DocID), args[2].(string)) }) return _c } -func (_c *Collection_UpdateWithKey_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithKey_Call { +func (_c *Collection_UpdateWithDocIDs_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocIDs_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_UpdateWithKey_Call) RunAndReturn(run func(context.Context, client.DocKey, string) (*client.UpdateResult, error)) *Collection_UpdateWithKey_Call { +func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { _c.Call.Return(run) return _c } -// UpdateWithKeys provides a mock function with given fields: _a0, _a1, _a2 -func (_m *Collection) UpdateWithKeys(_a0 context.Context, _a1 []client.DocKey, _a2 string) (*client.UpdateResult, error) { - ret := _m.Called(_a0, _a1, _a2) +// UpdateWithFilter provides a mock function with given fields: ctx, filter, updater +func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, filter, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey, string) (*client.UpdateResult, error)); ok { - return rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { + return rf(ctx, filter, updater) } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey, string) *client.UpdateResult); ok { - r0 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { + r0 = rf(ctx, filter, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, []client.DocKey, string) error); ok { - r1 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { + r1 = rf(ctx, filter, updater) } else { r1 = ret.Error(1) } @@ -1223,32 +1223,32 @@ func (_m *Collection) UpdateWithKeys(_a0 context.Context, _a1 []client.DocKey, _ return r0, r1 } -// Collection_UpdateWithKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithKeys' -type Collection_UpdateWithKeys_Call struct { +// Collection_UpdateWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithFilter' +type Collection_UpdateWithFilter_Call struct { *mock.Call } -// UpdateWithKeys is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocKey -// - _a2 string -func (_e *Collection_Expecter) UpdateWithKeys(_a0 interface{}, _a1 interface{}, _a2 interface{}) *Collection_UpdateWithKeys_Call { - return &Collection_UpdateWithKeys_Call{Call: _e.mock.On("UpdateWithKeys", _a0, _a1, _a2)} +// UpdateWithFilter is a helper method to define mock.On call +// - ctx context.Context +// - filter interface{} +// - updater string +func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { + return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, filter, updater)} } -func (_c *Collection_UpdateWithKeys_Call) Run(run func(_a0 context.Context, _a1 []client.DocKey, _a2 string)) *Collection_UpdateWithKeys_Call { +func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocKey), args[2].(string)) + run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) }) return _c } -func (_c *Collection_UpdateWithKeys_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithKeys_Call { +func (_c *Collection_UpdateWithFilter_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithFilter_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_UpdateWithKeys_Call) RunAndReturn(run func(context.Context, []client.DocKey, string) (*client.UpdateResult, error)) *Collection_UpdateWithKeys_Call { +func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { _c.Call.Return(run) return _c } diff --git a/client/mocks/db.go b/client/mocks/db.go index df7b53fb5a..90dc8986d0 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -83,6 +83,62 @@ func (_c *DB_AddSchema_Call) RunAndReturn(run func(context.Context, string) ([]c return _c } +// AddView provides a mock function with given fields: ctx, gqlQuery, sdl +func (_m *DB) AddView(ctx context.Context, gqlQuery string, sdl string) ([]client.CollectionDefinition, error) { + ret := _m.Called(ctx, gqlQuery, sdl) + + var r0 []client.CollectionDefinition + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) ([]client.CollectionDefinition, error)); ok { + return rf(ctx, gqlQuery, sdl) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) []client.CollectionDefinition); ok { + r0 = rf(ctx, gqlQuery, sdl) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]client.CollectionDefinition) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, gqlQuery, sdl) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DB_AddView_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddView' +type DB_AddView_Call struct { + *mock.Call +} + +// AddView is a helper method to define mock.On call +// - ctx context.Context +// - gqlQuery string +// - sdl string +func (_e *DB_Expecter) AddView(ctx interface{}, gqlQuery interface{}, sdl interface{}) *DB_AddView_Call { + return &DB_AddView_Call{Call: _e.mock.On("AddView", ctx, gqlQuery, sdl)} +} + +func (_c *DB_AddView_Call) Run(run func(ctx context.Context, gqlQuery string, sdl string)) *DB_AddView_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *DB_AddView_Call) Return(_a0 []client.CollectionDefinition, _a1 error) *DB_AddView_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *DB_AddView_Call) RunAndReturn(run func(context.Context, string, string) ([]client.CollectionDefinition, error)) *DB_AddView_Call { + _c.Call.Return(run) + return _c +} + // BasicExport provides a mock function with given fields: ctx, config func (_m *DB) BasicExport(ctx context.Context, config *client.BackupConfig) error { ret := _m.Called(ctx, config) @@ -464,30 +520,30 @@ func (_m *DB) GetAllSchemas(_a0 context.Context) ([]client.SchemaDescription, er return r0, r1 } -// DB_GetAllSchema_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllSchemas' -type DB_GetAllSchema_Call struct { +// DB_GetAllSchemas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllSchemas' +type DB_GetAllSchemas_Call struct { *mock.Call } // GetAllSchemas is a helper method to define mock.On call // - _a0 context.Context -func (_e *DB_Expecter) GetAllSchemas(_a0 interface{}) *DB_GetAllSchema_Call { - return &DB_GetAllSchema_Call{Call: _e.mock.On("GetAllSchemas", _a0)} +func (_e *DB_Expecter) GetAllSchemas(_a0 interface{}) *DB_GetAllSchemas_Call { + return &DB_GetAllSchemas_Call{Call: _e.mock.On("GetAllSchemas", _a0)} } -func (_c *DB_GetAllSchema_Call) Run(run func(_a0 context.Context)) *DB_GetAllSchema_Call { +func (_c *DB_GetAllSchemas_Call) Run(run func(_a0 context.Context)) *DB_GetAllSchemas_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context)) }) return _c } -func (_c *DB_GetAllSchema_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetAllSchema_Call { +func (_c *DB_GetAllSchemas_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetAllSchemas_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetAllSchema_Call) RunAndReturn(run func(context.Context) ([]client.SchemaDescription, error)) *DB_GetAllSchema_Call { +func (_c *DB_GetAllSchemas_Call) RunAndReturn(run func(context.Context) ([]client.SchemaDescription, error)) *DB_GetAllSchemas_Call { _c.Call.Return(run) return _c } @@ -657,21 +713,19 @@ func (_c *DB_GetCollectionsByVersionID_Call) RunAndReturn(run func(context.Conte return _c } -// GetSchemasByName provides a mock function with given fields: _a0, _a1 -func (_m *DB) GetSchemasByName(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { +// GetSchemaByVersionID provides a mock function with given fields: _a0, _a1 +func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.SchemaDescription, error) { ret := _m.Called(_a0, _a1) - var r0 []client.SchemaDescription + var r0 client.SchemaDescription var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]client.SchemaDescription, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) (client.SchemaDescription, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, string) []client.SchemaDescription); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) client.SchemaDescription); ok { r0 = rf(_a0, _a1) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]client.SchemaDescription) - } + r0 = ret.Get(0).(client.SchemaDescription) } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { @@ -683,37 +737,37 @@ func (_m *DB) GetSchemasByName(_a0 context.Context, _a1 string) ([]client.Schema return r0, r1 } -// DB_GetSchemaByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByName' -type DB_GetSchemaByName_Call struct { +// DB_GetSchemaByVersionID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemaByVersionID' +type DB_GetSchemaByVersionID_Call struct { *mock.Call } -// GetSchemasByName is a helper method to define mock.On call +// GetSchemaByVersionID is a helper method to define mock.On call // - _a0 context.Context // - _a1 string -func (_e *DB_Expecter) GetSchemasByName(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByName_Call { - return &DB_GetSchemaByName_Call{Call: _e.mock.On("GetSchemasByName", _a0, _a1)} +func (_e *DB_Expecter) GetSchemaByVersionID(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByVersionID_Call { + return &DB_GetSchemaByVersionID_Call{Call: _e.mock.On("GetSchemaByVersionID", _a0, _a1)} } -func (_c *DB_GetSchemaByName_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByName_Call { +func (_c *DB_GetSchemaByVersionID_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByVersionID_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) return _c } -func (_c *DB_GetSchemaByName_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemaByName_Call { +func (_c *DB_GetSchemaByVersionID_Call) Return(_a0 client.SchemaDescription, _a1 error) *DB_GetSchemaByVersionID_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetSchemaByName_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemaByName_Call { +func (_c *DB_GetSchemaByVersionID_Call) RunAndReturn(run func(context.Context, string) (client.SchemaDescription, error)) *DB_GetSchemaByVersionID_Call { _c.Call.Return(run) return _c } -// GetSchemasByRoot provides a mock function with given fields: _a0, _a1 -func (_m *DB) GetSchemasByRoot(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { +// GetSchemasByName provides a mock function with given fields: _a0, _a1 +func (_m *DB) GetSchemasByName(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { ret := _m.Called(_a0, _a1) var r0 []client.SchemaDescription @@ -738,48 +792,50 @@ func (_m *DB) GetSchemasByRoot(_a0 context.Context, _a1 string) ([]client.Schema return r0, r1 } -// DB_GetSchemaByRoot_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByRoot' -type DB_GetSchemaByRoot_Call struct { +// DB_GetSchemasByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByName' +type DB_GetSchemasByName_Call struct { *mock.Call } -// GetSchemasByRoot is a helper method to define mock.On call +// GetSchemasByName is a helper method to define mock.On call // - _a0 context.Context // - _a1 string -func (_e *DB_Expecter) GetSchemasByRoot(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByRoot_Call { - return &DB_GetSchemaByRoot_Call{Call: _e.mock.On("GetSchemasByRoot", _a0, _a1)} +func (_e *DB_Expecter) GetSchemasByName(_a0 interface{}, _a1 interface{}) *DB_GetSchemasByName_Call { + return &DB_GetSchemasByName_Call{Call: _e.mock.On("GetSchemasByName", _a0, _a1)} } -func (_c *DB_GetSchemaByRoot_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByRoot_Call { +func (_c *DB_GetSchemasByName_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemasByName_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) return _c } -func (_c *DB_GetSchemaByRoot_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemaByRoot_Call { +func (_c *DB_GetSchemasByName_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemasByName_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetSchemaByRoot_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemaByRoot_Call { +func (_c *DB_GetSchemasByName_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemasByName_Call { _c.Call.Return(run) return _c } -// GetSchemaByVersionID provides a mock function with given fields: _a0, _a1 -func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.SchemaDescription, error) { +// GetSchemasByRoot provides a mock function with given fields: _a0, _a1 +func (_m *DB) GetSchemasByRoot(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { ret := _m.Called(_a0, _a1) - var r0 client.SchemaDescription + var r0 []client.SchemaDescription var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (client.SchemaDescription, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) ([]client.SchemaDescription, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, string) client.SchemaDescription); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) []client.SchemaDescription); ok { r0 = rf(_a0, _a1) } else { - r0 = ret.Get(0).(client.SchemaDescription) + if ret.Get(0) != nil { + r0 = ret.Get(0).([]client.SchemaDescription) + } } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { @@ -791,31 +847,31 @@ func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.Sche return r0, r1 } -// DB_GetSchemaByVersionID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemaByVersionID' -type DB_GetSchemaByVersionID_Call struct { +// DB_GetSchemasByRoot_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByRoot' +type DB_GetSchemasByRoot_Call struct { *mock.Call } -// GetSchemaByVersionID is a helper method to define mock.On call +// GetSchemasByRoot is a helper method to define mock.On call // - _a0 context.Context // - _a1 string -func (_e *DB_Expecter) GetSchemaByVersionID(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByVersionID_Call { - return &DB_GetSchemaByVersionID_Call{Call: _e.mock.On("GetSchemaByVersionID", _a0, _a1)} +func (_e *DB_Expecter) GetSchemasByRoot(_a0 interface{}, _a1 interface{}) *DB_GetSchemasByRoot_Call { + return &DB_GetSchemasByRoot_Call{Call: _e.mock.On("GetSchemasByRoot", _a0, _a1)} } -func (_c *DB_GetSchemaByVersionID_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByVersionID_Call { +func (_c *DB_GetSchemasByRoot_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemasByRoot_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) return _c } -func (_c *DB_GetSchemaByVersionID_Call) Return(_a0 client.SchemaDescription, _a1 error) *DB_GetSchemaByVersionID_Call { +func (_c *DB_GetSchemasByRoot_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemasByRoot_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetSchemaByVersionID_Call) RunAndReturn(run func(context.Context, string) (client.SchemaDescription, error)) *DB_GetSchemaByVersionID_Call { +func (_c *DB_GetSchemasByRoot_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemasByRoot_Call { _c.Call.Return(run) return _c } diff --git a/client/request/commit.go b/client/request/commit.go index 0715276547..ff65e20822 100644 --- a/client/request/commit.go +++ b/client/request/commit.go @@ -19,7 +19,7 @@ var ( type CommitSelect struct { Field - DocKey immutable.Option[string] + DocID immutable.Option[string] FieldID immutable.Option[string] Cid immutable.Option[string] Depth immutable.Option[uint64] diff --git a/client/request/consts.go b/client/request/consts.go index 7287a49ac3..85b7d63d84 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -21,12 +21,8 @@ const ( Cid = "cid" Data = "data" - DocKey = "dockey" - DocKeys = "dockeys" FieldName = "field" FieldIDName = "fieldId" - Id = "id" - Ids = "ids" ShowDeleted = "showDeleted" FilterClause = "filter" @@ -36,14 +32,21 @@ const ( OrderClause = "order" DepthClause = "depth" + DocIDArgName = "docID" + DocIDsArgName = "docIDs" + AverageFieldName = "_avg" CountFieldName = "_count" - KeyFieldName = "_key" + DocIDFieldName = "_docID" GroupFieldName = "_group" DeletedFieldName = "_deleted" SumFieldName = "_sum" VersionFieldName = "_version" + // New generated document id from a backed up document, + // which might have a different _docID originally. + NewDocIDFieldName = "_docIDNew" + ExplainLabel = "explain" LatestCommitsName = "latestCommits" @@ -53,13 +56,18 @@ const ( LinksFieldName = "links" HeightFieldName = "height" CidFieldName = "cid" - DockeyFieldName = "dockey" CollectionIDFieldName = "collectionID" SchemaVersionIDFieldName = "schemaVersionId" FieldNameFieldName = "fieldName" FieldIDFieldName = "fieldId" DeltaFieldName = "delta" + DeltaArgFieldName = "FieldName" + DeltaArgData = "Data" + DeltaArgSchemaVersionID = "SchemaVersionID" + DeltaArgPriority = "Priority" + DeltaArgDocID = "DocID" + LinksNameFieldName = "name" LinksCidFieldName = "cid" @@ -80,7 +88,7 @@ var ( CountFieldName: true, SumFieldName: true, AverageFieldName: true, - KeyFieldName: true, + DocIDFieldName: true, DeletedFieldName: true, } @@ -98,7 +106,7 @@ var ( VersionFields = []string{ HeightFieldName, CidFieldName, - DockeyFieldName, + DocIDArgName, CollectionIDFieldName, SchemaVersionIDFieldName, FieldNameFieldName, diff --git a/client/request/mutation.go b/client/request/mutation.go index c7f0e07ee8..3d19210458 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -46,8 +46,8 @@ func (m ObjectMutation) ToSelect() *Select { Name: m.Collection, Alias: m.Alias, }, - Fields: m.Fields, - DocKeys: m.IDs, - Filter: m.Filter, + Fields: m.Fields, + DocIDs: m.IDs, + Filter: m.Filter, } } diff --git a/client/request/select.go b/client/request/select.go index f7d1517dec..863bba2aeb 100644 --- a/client/request/select.go +++ b/client/request/select.go @@ -30,8 +30,8 @@ const ( type Select struct { Field - DocKeys immutable.Option[[]string] - CID immutable.Option[string] + DocIDs immutable.Option[[]string] + CID immutable.Option[string] // Root is the top level type of parsed request Root SelectionType @@ -114,7 +114,7 @@ func (s *Select) validateGroupBy() []error { // of `Select` objects. type selectJson struct { Field - DocKeys immutable.Option[[]string] + DocIDs immutable.Option[[]string] CID immutable.Option[string] Root SelectionType Limit immutable.Option[uint64] @@ -140,7 +140,7 @@ func (s *Select) UnmarshalJSON(bytes []byte) error { } s.Field = selectMap.Field - s.DocKeys = selectMap.DocKeys + s.DocIDs = selectMap.DocIDs s.CID = selectMap.CID s.Root = selectMap.Root s.Limit = selectMap.Limit diff --git a/client/request/subscription.go b/client/request/subscription.go index c788efbb4c..bb4e01156c 100644 --- a/client/request/subscription.go +++ b/client/request/subscription.go @@ -30,15 +30,15 @@ type ObjectSubscription struct { // ToSelect returns a basic Select object, with the same Name, Alias, and Fields as // the Subscription object. Used to create a Select planNode for the event stream return objects. -func (m ObjectSubscription) ToSelect(docKey, cid string) *Select { +func (m ObjectSubscription) ToSelect(docID, cid string) *Select { return &Select{ Field: Field{ Name: m.Collection, Alias: m.Alias, }, - DocKeys: immutable.Some([]string{docKey}), - CID: immutable.Some(cid), - Fields: m.Fields, - Filter: m.Filter, + DocIDs: immutable.Some([]string{docID}), + CID: immutable.Some(cid), + Fields: m.Fields, + Filter: m.Filter, } } diff --git a/core/crdt/base_test.go b/core/crdt/base_test.go index e69d69f05e..c3db4af3d6 100644 --- a/core/crdt/base_test.go +++ b/core/crdt/base_test.go @@ -41,7 +41,7 @@ func TestBaseCRDTNew(t *testing.T) { func TestBaseCRDTvalueKey(t *testing.T) { base := exampleBaseCRDT() - vk := base.key.WithDocKey("mykey").WithValueFlag() + vk := base.key.WithDocID("mykey").WithValueFlag() if vk.ToString() != "/v/mykey" { t.Errorf("Incorrect valueKey. Have %v, want %v", vk.ToString(), "/v/mykey") } @@ -49,7 +49,7 @@ func TestBaseCRDTvalueKey(t *testing.T) { func TestBaseCRDTprioryKey(t *testing.T) { base := exampleBaseCRDT() - pk := base.key.WithDocKey("mykey").WithPriorityFlag() + pk := base.key.WithDocID("mykey").WithPriorityFlag() if pk.ToString() != "/p/mykey" { t.Errorf("Incorrect priorityKey. Have %v, want %v", pk.ToString(), "/p/mykey") } @@ -58,13 +58,13 @@ func TestBaseCRDTprioryKey(t *testing.T) { func TestBaseCRDTSetGetPriority(t *testing.T) { base := exampleBaseCRDT() ctx := context.Background() - err := base.setPriority(ctx, base.key.WithDocKey("mykey"), 10) + err := base.setPriority(ctx, base.key.WithDocID("mykey"), 10) if err != nil { t.Errorf("baseCRDT failed to set Priority. err: %v", err) return } - priority, err := base.getPriority(ctx, base.key.WithDocKey("mykey")) + priority, err := base.getPriority(ctx, base.key.WithDocID("mykey")) if err != nil { t.Errorf("baseCRDT failed to get priority. err: %v", err) return diff --git a/core/crdt/composite.go b/core/crdt/composite.go index 761cc07828..39c0a3efe4 100644 --- a/core/crdt/composite.go +++ b/core/crdt/composite.go @@ -37,7 +37,7 @@ type CompositeDAGDelta struct { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte SubDAGs []core.DAGLink // Status represents the status of the document. By default it is `Active`. // Alternatively, if can be set to `Deleted`. @@ -67,10 +67,10 @@ func (delta *CompositeDAGDelta) Marshal() ([]byte, error) { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte Status uint8 FieldName string - }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocKey, delta.Status.UInt8(), delta.FieldName}) + }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocID, delta.Status.UInt8(), delta.FieldName}) if err != nil { return nil, err } @@ -116,7 +116,7 @@ func (c CompositeDAG) Set(patch []byte, links []core.DAGLink) *CompositeDAGDelta }) return &CompositeDAGDelta{ Data: patch, - DocKey: []byte(c.key.DocKey), + DocID: []byte(c.key.DocID), SubDAGs: links, SchemaVersionID: c.schemaVersionKey.SchemaVersionId, FieldName: c.fieldName, diff --git a/core/crdt/lwwreg.go b/core/crdt/lwwreg.go index 18979c1bfb..c256c35cea 100644 --- a/core/crdt/lwwreg.go +++ b/core/crdt/lwwreg.go @@ -32,7 +32,7 @@ type LWWRegDelta struct { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte FieldName string } @@ -58,9 +58,9 @@ func (delta *LWWRegDelta) Marshal() ([]byte, error) { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte FieldName string - }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocKey, delta.FieldName}) + }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocID, delta.FieldName}) if err != nil { return nil, err } @@ -105,7 +105,7 @@ func (reg LWWRegister) Value(ctx context.Context) ([]byte, error) { func (reg LWWRegister) Set(value []byte) *LWWRegDelta { return &LWWRegDelta{ Data: value, - DocKey: []byte(reg.key.DocKey), + DocID: []byte(reg.key.DocID), FieldName: reg.fieldName, SchemaVersionID: reg.schemaVersionKey.SchemaVersionId, } diff --git a/core/crdt/lwwreg_test.go b/core/crdt/lwwreg_test.go index 2b978feb2d..5e6e1b27a4 100644 --- a/core/crdt/lwwreg_test.go +++ b/core/crdt/lwwreg_test.go @@ -32,7 +32,7 @@ func newMockStore() datastore.DSReaderWriter { func setupLWWRegister() LWWRegister { store := newMockStore() - key := core.DataStoreKey{DocKey: "AAAA-BBBB"} + key := core.DataStoreKey{DocID: "AAAA-BBBB"} return NewLWWRegister(store, core.CollectionSchemaVersionKey{}, key, "") } diff --git a/core/doc.go b/core/doc.go index 8f6700f50c..2a149dccc5 100644 --- a/core/doc.go +++ b/core/doc.go @@ -20,8 +20,8 @@ import ( "github.com/sourcenetwork/defradb/client/request" ) -// DocKeyFieldIndex is the index of the key field in a document. -const DocKeyFieldIndex int = 0 +// DocIDFieldIndex is the index of the DocID field in a document. +const DocIDFieldIndex int = 0 // DocFields is a slice of fields in a document. type DocFields []any @@ -39,19 +39,19 @@ type Doc struct { SchemaVersionID string } -// GetKey returns the DocKey for this document. +// GetID returns the DocID for this document. // // Will panic if the document is empty. -func (d *Doc) GetKey() string { - key, _ := d.Fields[DocKeyFieldIndex].(string) - return key +func (d *Doc) GetID() string { + docID, _ := d.Fields[DocIDFieldIndex].(string) + return docID } -// SetKey sets the DocKey for this document. +// SetID sets the DocID for this document. // // Will panic if the document has not been initialised with fields. -func (d *Doc) SetKey(key string) { - d.Fields[DocKeyFieldIndex] = key +func (d *Doc) SetID(docID string) { + d.Fields[DocIDFieldIndex] = docID } // Clone returns a deep copy of this document. diff --git a/core/key.go b/core/key.go index 0a2529338a..0c038b11dd 100644 --- a/core/key.go +++ b/core/key.go @@ -67,7 +67,7 @@ type Key interface { type DataStoreKey struct { CollectionID string InstanceType InstanceType - DocKey string + DocID string FieldId string } @@ -87,13 +87,13 @@ var _ Key = (*IndexDataStoreKey)(nil) type PrimaryDataStoreKey struct { CollectionId string - DocKey string + DocID string } var _ Key = (*PrimaryDataStoreKey)(nil) type HeadStoreKey struct { - DocKey string + DocID string FieldId string //can be 'C' Cid cid.Cid } @@ -192,7 +192,7 @@ var _ Key = (*ReplicatorKey)(nil) // splitting the input using '/' as a field deliminator. It assumes // that the input string is in the following format: // -// /[CollectionId]/[InstanceType]/[DocKey]/[FieldId] +// /[CollectionId]/[InstanceType]/[DocID]/[FieldId] // // Any properties before the above (assuming a '/' deliminator) are ignored func NewDataStoreKey(key string) (DataStoreKey, error) { @@ -212,7 +212,7 @@ func NewDataStoreKey(key string) (DataStoreKey, error) { dataStoreKey.CollectionID = elements[0] dataStoreKey.InstanceType = InstanceType(elements[1]) - dataStoreKey.DocKey = elements[2] + dataStoreKey.DocID = elements[2] if numberOfElements == 4 { dataStoreKey.FieldId = elements[3] } @@ -228,9 +228,9 @@ func MustNewDataStoreKey(key string) DataStoreKey { return dsKey } -func DataStoreKeyFromDocKey(dockey client.DocKey) DataStoreKey { +func DataStoreKeyFromDocID(docID client.DocID) DataStoreKey { return DataStoreKey{ - DocKey: dockey.String(), + DocID: docID.String(), } } @@ -238,7 +238,7 @@ func DataStoreKeyFromDocKey(dockey client.DocKey) DataStoreKey { // splitting the input using '/' as a field deliminator. It assumes // that the input string is in the following format: // -// /[DocKey]/[FieldId]/[Cid] +// /[DocID]/[FieldId]/[Cid] // // Any properties before the above are ignored func NewHeadStoreKey(key string) (HeadStoreKey, error) { @@ -254,7 +254,7 @@ func NewHeadStoreKey(key string) (HeadStoreKey, error) { return HeadStoreKey{ // elements[0] is empty (key has leading '/') - DocKey: elements[1], + DocID: elements[1], FieldId: elements[2], Cid: cid, }, nil @@ -390,15 +390,15 @@ func (k DataStoreKey) WithDeletedFlag() DataStoreKey { return newKey } -func (k DataStoreKey) WithDocKey(docKey string) DataStoreKey { +func (k DataStoreKey) WithDocID(docID string) DataStoreKey { newKey := k - newKey.DocKey = docKey + newKey.DocID = docID return newKey } func (k DataStoreKey) WithInstanceInfo(key DataStoreKey) DataStoreKey { newKey := k - newKey.DocKey = key.DocKey + newKey.DocID = key.DocID newKey.FieldId = key.FieldId newKey.InstanceType = key.InstanceType return newKey @@ -412,14 +412,14 @@ func (k DataStoreKey) WithFieldId(fieldId string) DataStoreKey { func (k DataStoreKey) ToHeadStoreKey() HeadStoreKey { return HeadStoreKey{ - DocKey: k.DocKey, + DocID: k.DocID, FieldId: k.FieldId, } } -func (k HeadStoreKey) WithDocKey(docKey string) HeadStoreKey { +func (k HeadStoreKey) WithDocID(docID string) HeadStoreKey { newKey := k - newKey.DocKey = docKey + newKey.DocID = docID return newKey } @@ -444,8 +444,8 @@ func (k DataStoreKey) ToString() string { if k.InstanceType != "" { result = result + "/" + string(k.InstanceType) } - if k.DocKey != "" { - result = result + "/" + k.DocKey + if k.DocID != "" { + result = result + "/" + k.DocID } if k.FieldId != "" { result = result + "/" + k.FieldId @@ -464,7 +464,7 @@ func (k DataStoreKey) ToDS() ds.Key { func (k DataStoreKey) Equal(other DataStoreKey) bool { return k.CollectionID == other.CollectionID && - k.DocKey == other.DocKey && + k.DocID == other.DocID && k.FieldId == other.FieldId && k.InstanceType == other.InstanceType } @@ -472,7 +472,7 @@ func (k DataStoreKey) Equal(other DataStoreKey) bool { func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey { return PrimaryDataStoreKey{ CollectionId: k.CollectionID, - DocKey: k.DocKey, + DocID: k.DocID, } } @@ -582,7 +582,7 @@ func (k IndexDataStoreKey) Equal(other IndexDataStoreKey) bool { func (k PrimaryDataStoreKey) ToDataStoreKey() DataStoreKey { return DataStoreKey{ CollectionID: k.CollectionId, - DocKey: k.DocKey, + DocID: k.DocID, } } @@ -601,8 +601,8 @@ func (k PrimaryDataStoreKey) ToString() string { result = result + "/" + k.CollectionId } result = result + PRIMARY_KEY - if k.DocKey != "" { - result = result + "/" + k.DocKey + if k.DocID != "" { + result = result + "/" + k.DocID } return result @@ -786,8 +786,8 @@ func (k ReplicatorKey) ToDS() ds.Key { func (k HeadStoreKey) ToString() string { var result string - if k.DocKey != "" { - result = result + "/" + k.DocKey + if k.DocID != "" { + result = result + "/" + k.DocID } if k.FieldId != "" { result = result + "/" + k.FieldId @@ -817,8 +817,8 @@ func (k DataStoreKey) PrefixEnd() DataStoreKey { newKey.FieldId = string(bytesPrefixEnd([]byte(k.FieldId))) return newKey } - if k.DocKey != "" { - newKey.DocKey = string(bytesPrefixEnd([]byte(k.DocKey))) + if k.DocID != "" { + newKey.DocID = string(bytesPrefixEnd([]byte(k.DocID))) return newKey } if k.InstanceType != "" { diff --git a/core/key_test.go b/core/key_test.go index d22498bd8c..4984c5b14f 100644 --- a/core/key_test.go +++ b/core/key_test.go @@ -29,14 +29,14 @@ func TestNewDataStoreKey_ReturnsEmptyStruct_GivenEmptyString(t *testing.T) { assert.ErrorIs(t, ErrEmptyKey, err) } -func TestNewDataStoreKey_ReturnsCollectionIdAndIndexIdAndDocKeyAndFieldIdAndInstanceType_GivenFourItemsWithType( +func TestNewDataStoreKey_ReturnsCollectionIdAndIndexIdAndDocIDAndFieldIdAndInstanceType_GivenFourItemsWithType( t *testing.T, ) { instanceType := "anyType" fieldId := "f1" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := collectionId + "/" + instanceType + "/" + docKey + "/" + fieldId + inputString := collectionId + "/" + instanceType + "/" + docID + "/" + fieldId result, err := NewDataStoreKey(inputString) if err != nil { @@ -48,11 +48,11 @@ func TestNewDataStoreKey_ReturnsCollectionIdAndIndexIdAndDocKeyAndFieldIdAndInst t, DataStoreKey{ CollectionID: collectionId, - DocKey: docKey, + DocID: docID, FieldId: fieldId, InstanceType: InstanceType(instanceType)}, result) - assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docKey+"/"+fieldId, resultString) + assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docID+"/"+fieldId, resultString) } func TestNewDataStoreKey_ReturnsEmptyStruct_GivenAStringWithMissingElements(t *testing.T) { @@ -65,9 +65,9 @@ func TestNewDataStoreKey_ReturnsEmptyStruct_GivenAStringWithMissingElements(t *t func TestNewDataStoreKey_GivenAShortObjectMarker(t *testing.T) { instanceType := "anyType" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := collectionId + "/" + instanceType + "/" + docKey + inputString := collectionId + "/" + instanceType + "/" + docID result, err := NewDataStoreKey(inputString) if err != nil { @@ -79,18 +79,18 @@ func TestNewDataStoreKey_GivenAShortObjectMarker(t *testing.T) { t, DataStoreKey{ CollectionID: collectionId, - DocKey: docKey, + DocID: docID, InstanceType: InstanceType(instanceType)}, result) - assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docKey, resultString) + assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docID, resultString) } func TestNewDataStoreKey_GivenAStringWithExtraPrefixes(t *testing.T) { instanceType := "anyType" fieldId := "f1" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := "/db/my_database_name/data/" + collectionId + "/" + instanceType + "/" + docKey + "/" + fieldId + inputString := "/db/my_database_name/data/" + collectionId + "/" + instanceType + "/" + docID + "/" + fieldId _, err := NewDataStoreKey(inputString) @@ -100,9 +100,9 @@ func TestNewDataStoreKey_GivenAStringWithExtraPrefixes(t *testing.T) { func TestNewDataStoreKey_GivenAStringWithExtraSuffix(t *testing.T) { instanceType := "anyType" fieldId := "f1" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := "/db/data/" + collectionId + "/" + instanceType + "/" + docKey + "/" + fieldId + "/version_number" + inputString := "/db/data/" + collectionId + "/" + instanceType + "/" + docID + "/" + fieldId + "/version_number" _, err := NewDataStoreKey(inputString) diff --git a/db/backup.go b/db/backup.go index e2958d1c96..cc8cd01fff 100644 --- a/db/backup.go +++ b/db/backup.go @@ -74,7 +74,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin for _, field := range col.Schema().Fields { if field.Kind == client.FieldKind_FOREIGN_OBJECT { if val, ok := docMap[field.Name+request.RelatedObjectID]; ok { - if docMap["_newKey"] == val { + if docMap[request.NewDocIDFieldName] == val { resetMap[field.Name+request.RelatedObjectID] = val delete(docMap, field.Name+request.RelatedObjectID) } @@ -82,8 +82,8 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin } } - delete(docMap, "_key") - delete(docMap, "_newKey") + delete(docMap, request.DocIDFieldName) + delete(docMap, request.NewDocIDFieldName) doc, err := client.NewDocFromMap(docMap) if err != nil { @@ -189,13 +189,13 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } colTxn := col.WithTxn(txn) - keysCh, err := colTxn.GetAllDocKeys(ctx) + docIDsCh, err := colTxn.GetAllDocIDs(ctx) if err != nil { return err } firstDoc := true - for key := range keysCh { + for docResultWithID := range docIDsCh { if firstDoc { firstDoc = false } else { @@ -205,7 +205,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } } - doc, err := colTxn.Get(ctx, key.Key, false) + doc, err := colTxn.Get(ctx, docResultWithID.ID, false) if err != nil { return err } @@ -225,7 +225,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - if foreignKey.(string) == doc.Key().String() { + if foreignKey.(string) == doc.ID().String() { isSelfReference = true refFieldName = field.Name + request.RelatedObjectID } @@ -234,11 +234,11 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return NewErrFailedToGetCollection(field.Schema, err) } - foreignDocKey, err := client.NewDocKeyFromString(foreignKey.(string)) + foreignDocID, err := client.NewDocIDFromString(foreignKey.(string)) if err != nil { return err } - foreignDoc, err := foreignCol.Get(ctx, foreignDocKey, false) + foreignDoc, err := foreignCol.Get(ctx, foreignDocID, false) if err != nil { err := doc.Set(field.Name+request.RelatedObjectID, nil) if err != nil { @@ -250,12 +250,12 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } - delete(oldForeignDoc, "_key") - if foreignDoc.Key().String() == foreignDocKey.String() { + delete(oldForeignDoc, request.DocIDFieldName) + if foreignDoc.ID().String() == foreignDocID.String() { delete(oldForeignDoc, field.Name+request.RelatedObjectID) } - if foreignDoc.Key().String() == doc.Key().String() { + if foreignDoc.ID().String() == doc.ID().String() { isSelfReference = true refFieldName = field.Name + request.RelatedObjectID } @@ -265,15 +265,15 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } - if foreignDoc.Key().String() != doc.Key().String() { - err = doc.Set(field.Name+request.RelatedObjectID, newForeignDoc.Key().String()) + if foreignDoc.ID().String() != doc.ID().String() { + err = doc.Set(field.Name+request.RelatedObjectID, newForeignDoc.ID().String()) if err != nil { return err } } - if newForeignDoc.Key().String() != foreignDoc.Key().String() { - keyChangeCache[foreignDoc.Key().String()] = newForeignDoc.Key().String() + if newForeignDoc.ID().String() != foreignDoc.ID().String() { + keyChangeCache[foreignDoc.ID().String()] = newForeignDoc.ID().String() } } } @@ -286,7 +286,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } - delete(docM, "_key") + delete(docM, request.DocIDFieldName) if isSelfReference { delete(docM, refFieldName) } @@ -295,17 +295,17 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - // newKey is needed to let the user know what will be the key of the imported document. - docM["_newKey"] = newDoc.Key().String() - // NewDocFromMap removes the "_key" map item so we add it back. - docM["_key"] = doc.Key().String() + // a new docID is needed to let the user know what will be the docID of the imported document. + docM[request.NewDocIDFieldName] = newDoc.ID().String() + // NewDocFromMap removes the "_docID" map item so we add it back. + docM[request.DocIDFieldName] = doc.ID().String() if isSelfReference { - docM[refFieldName] = newDoc.Key().String() + docM[refFieldName] = newDoc.ID().String() } - if newDoc.Key().String() != doc.Key().String() { - keyChangeCache[doc.Key().String()] = newDoc.Key().String() + if newDoc.ID().String() != doc.ID().String() { + keyChangeCache[doc.ID().String()] = newDoc.ID().String() } var b []byte diff --git a/db/backup_test.go b/db/backup_test.go index f0e7a6e338..cbe1aed58d 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -77,7 +77,7 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -139,7 +139,7 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -201,7 +201,7 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -275,7 +275,7 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Book":[{"_key":"bae-4399f189-138d-5d49-9e25-82e78463677b","_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}],"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`) + data := []byte(`{"Book":[{"_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b","_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}],"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -330,7 +330,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), + []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), 0664, ) require.NoError(t, err) @@ -345,7 +345,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -375,7 +375,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), + []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), 0664, ) require.NoError(t, err) @@ -391,7 +391,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { col1, err := db.getCollectionByName(ctx, txn, "Address") require.NoError(t, err) - key1, err := client.NewDocKeyFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") + key1, err := client.NewDocIDFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") require.NoError(t, err) _, err = col1.Get(ctx, key1, false) require.NoError(t, err) @@ -399,12 +399,12 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { col2, err := db.getCollectionByName(ctx, txn, "User") require.NoError(t, err) - key2, err := client.NewDocKeyFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") + key2, err := client.NewDocIDFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") require.NoError(t, err) _, err = col2.Get(ctx, key2, false) require.NoError(t, err) - key3, err := client.NewDocKeyFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") + key3, err := client.NewDocIDFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") require.NoError(t, err) _, err = col2.Get(ctx, key3, false) require.NoError(t, err) @@ -434,7 +434,7 @@ func TestBasicImport_WithJSONArray_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`["Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]]`), + []byte(`["Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]]`), 0664, ) require.NoError(t, err) @@ -469,7 +469,7 @@ func TestBasicImport_WithObjectCollection_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), + []byte(`{"Address":{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), 0664, ) require.NoError(t, err) @@ -504,7 +504,7 @@ func TestBasicImport_WithInvalidFilepath_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), + []byte(`{"Address":{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), 0664, ) require.NoError(t, err) @@ -540,7 +540,7 @@ func TestBasicImport_WithInvalidCollection_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Addresses":{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), + []byte(`{"Addresses":{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), 0664, ) require.NoError(t, err) diff --git a/db/base/collection_keys.go b/db/base/collection_keys.go index 6a762ff180..f32da872fe 100644 --- a/db/base/collection_keys.go +++ b/db/base/collection_keys.go @@ -17,18 +17,21 @@ import ( "github.com/sourcenetwork/defradb/core" ) -// MakeIndexPrefix generates a key prefix for the given collection/index descriptions -func MakeCollectionKey(col client.CollectionDescription) core.DataStoreKey { +// MakeDataStoreKeyWithCollectionDescription returns the datastore key for the given collection description. +func MakeDataStoreKeyWithCollectionDescription(col client.CollectionDescription) core.DataStoreKey { return core.DataStoreKey{ CollectionID: col.IDString(), } } -// MakeIndexKey generates a key for the target dockey, using the collection/index description -func MakeDocKey(col client.CollectionDescription, docKey string) core.DataStoreKey { +// MakeDataStoreKeyWithCollectionAndDocID returns the datastore key for the given docID and collection description. +func MakeDataStoreKeyWithCollectionAndDocID( + col client.CollectionDescription, + docID string, +) core.DataStoreKey { return core.DataStoreKey{ CollectionID: col.IDString(), - DocKey: docKey, + DocID: docID, } } @@ -41,14 +44,14 @@ func MakePrimaryIndexKeyForCRDT( ) (core.DataStoreKey, error) { switch ctype { case client.COMPOSITE: - return MakeCollectionKey(c).WithInstanceInfo(key).WithFieldId(core.COMPOSITE_NAMESPACE), nil + return MakeDataStoreKeyWithCollectionDescription(c).WithInstanceInfo(key).WithFieldId(core.COMPOSITE_NAMESPACE), nil case client.LWW_REGISTER: field, ok := c.GetFieldByName(fieldName, &schema) if !ok { return core.DataStoreKey{}, client.NewErrFieldNotExist(fieldName) } - return MakeCollectionKey(c).WithInstanceInfo(key).WithFieldId(fmt.Sprint(field.ID)), nil + return MakeDataStoreKeyWithCollectionDescription(c).WithInstanceInfo(key).WithFieldId(fmt.Sprint(field.ID)), nil } return core.DataStoreKey{}, ErrInvalidCrdtType } diff --git a/db/collection.go b/db/collection.go index 65b0fbaa22..f5b1cd3b27 100644 --- a/db/collection.go +++ b/db/collection.go @@ -173,7 +173,7 @@ func (db *db) updateSchema( if _, ok := schema.GetField(idFieldName); !ok { schema.Fields = append(schema.Fields, client.FieldDescription{ Name: idFieldName, - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, RelationType: client.Relation_Type_INTERNAL_ID, RelationName: field.RelationName, }) @@ -285,7 +285,7 @@ func validateUpdateSchemaFields( var existingField client.FieldDescription var fieldAlreadyExists bool if proposedField.ID != client.FieldID(0) || - proposedField.Name == request.KeyFieldName { + proposedField.Name == request.DocIDFieldName { existingField, fieldAlreadyExists = existingFieldsByID[proposedField.ID] } @@ -351,8 +351,8 @@ func validateUpdateSchemaFields( idFieldName := proposedField.Name + request.RelatedObjectID idField, idFieldFound := proposedDesc.GetField(idFieldName) if idFieldFound { - if idField.Kind != client.FieldKind_DocKey { - return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocKey, idField.Kind) + if idField.Kind != client.FieldKind_DocID { + return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocID, idField.Kind) } if idField.RelationType != client.Relation_Type_INTERNAL_ID { @@ -630,23 +630,23 @@ func (db *db) getAllCollections(ctx context.Context, txn datastore.Txn) ([]clien return collections, nil } -// GetAllDocKeys returns all the document keys that exist in the collection. +// GetAllDocIDs returns all the document IDs that exist in the collection. // // @todo: We probably need a lock on the collection for this kind of op since // it hits every key and will cause Tx conflicts for concurrent Txs -func (c *collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (c *collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { txn, err := c.getTxn(ctx, true) if err != nil { return nil, err } - return c.getAllDocKeysChan(ctx, txn) + return c.getAllDocIDsChan(ctx, txn) } -func (c *collection) getAllDocKeysChan( +func (c *collection) getAllDocIDsChan( ctx context.Context, txn datastore.Txn, -) (<-chan client.DocKeysResult, error) { +) (<-chan client.DocIDResult, error) { prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix CollectionId: fmt.Sprint(c.ID()), } @@ -658,11 +658,11 @@ func (c *collection) getAllDocKeysChan( return nil, err } - resCh := make(chan client.DocKeysResult) + resCh := make(chan client.DocIDResult) go func() { defer func() { if err := q.Close(); err != nil { - log.ErrorE(ctx, "Failed to close AllDocKeys query", err) + log.ErrorE(ctx, errFailedtoCloseQueryReqAllIDs, err) } close(resCh) c.discardImplicitTxn(ctx, txn) @@ -677,23 +677,22 @@ func (c *collection) getAllDocKeysChan( // noop, just continue on the with the for loop } if res.Error != nil { - resCh <- client.DocKeysResult{ + resCh <- client.DocIDResult{ Err: res.Error, } return } - // now we have a doc key - rawDocKey := ds.NewKey(res.Key).BaseNamespace() - key, err := client.NewDocKeyFromString(rawDocKey) + rawDocID := ds.NewKey(res.Key).BaseNamespace() + docID, err := client.NewDocIDFromString(rawDocID) if err != nil { - resCh <- client.DocKeysResult{ + resCh <- client.DocIDResult{ Err: res.Error, } return } - resCh <- client.DocKeysResult{ - Key: key, + resCh <- client.DocIDResult{ + ID: docID, } } }() @@ -742,7 +741,7 @@ func (c *collection) WithTxn(txn datastore.Txn) client.Collection { } // Create a new document. -// Will verify the DocKey/CID to ensure that the new document is correctly formatted. +// Will verify the DocID/CID to ensure that the new document is correctly formatted. func (c *collection) Create(ctx context.Context, doc *client.Document) error { txn, err := c.getTxn(ctx, false) if err != nil { @@ -758,7 +757,7 @@ func (c *collection) Create(ctx context.Context, doc *client.Document) error { } // CreateMany creates a collection of documents at once. -// Will verify the DocKey/CID to ensure that the new documents are correctly formatted. +// Will verify the DocID/CID to ensure that the new documents are correctly formatted. func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) error { txn, err := c.getTxn(ctx, false) if err != nil { @@ -775,29 +774,29 @@ func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) er return c.commitImplicitTxn(ctx, txn) } -func (c *collection) getKeysFromDoc( +func (c *collection) getDocIDAndPrimaryKeyFromDoc( doc *client.Document, -) (client.DocKey, core.PrimaryDataStoreKey, error) { - docKey, err := doc.GenerateDocKey() +) (client.DocID, core.PrimaryDataStoreKey, error) { + docID, err := doc.GenerateDocID() if err != nil { - return client.DocKey{}, core.PrimaryDataStoreKey{}, err + return client.DocID{}, core.PrimaryDataStoreKey{}, err } - primaryKey := c.getPrimaryKeyFromDocKey(docKey) - if primaryKey.DocKey != doc.Key().String() { - return client.DocKey{}, core.PrimaryDataStoreKey{}, - NewErrDocVerification(doc.Key().String(), primaryKey.DocKey) + primaryKey := c.getPrimaryKeyFromDocID(docID) + if primaryKey.DocID != doc.ID().String() { + return client.DocID{}, core.PrimaryDataStoreKey{}, + NewErrDocVerification(doc.ID().String(), primaryKey.DocID) } - return docKey, primaryKey, nil + return docID, primaryKey, nil } func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - // This has to be done before dockey verification happens in the next step. - if err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields); err != nil { + // This has to be done before docID verification happens in the next step. + if err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields); err != nil { return err } - dockey, primaryKey, err := c.getKeysFromDoc(doc) + docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) if err != nil { return err } @@ -808,15 +807,15 @@ func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client. return err } if exists { - return NewErrDocumentAlreadyExists(primaryKey.DocKey) + return NewErrDocumentAlreadyExists(primaryKey.DocID) } if isDeleted { - return NewErrDocumentDeleted(primaryKey.DocKey) + return NewErrDocumentDeleted(primaryKey.DocID) } // write value object marker if we have an empty doc if len(doc.Values()) == 0 { - valueKey := c.getDSKeyFromDockey(dockey) + valueKey := c.getDataStoreKeyFromDocID(docID) err = txn.Datastore().Put(ctx, valueKey.ToDS(), []byte{base.ObjectMarker}) if err != nil { return err @@ -842,7 +841,7 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { } defer c.discardImplicitTxn(ctx, txn) - primaryKey := c.getPrimaryKeyFromDocKey(doc.Key()) + primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return err @@ -851,7 +850,7 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { return client.ErrDocumentNotFound } if isDeleted { - return NewErrDocumentDeleted(primaryKey.DocKey) + return NewErrDocumentDeleted(primaryKey.DocID) } err = c.update(ctx, txn, doc) @@ -862,7 +861,7 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { return c.commitImplicitTxn(ctx, txn) } -// Contract: DB Exists check is already performed, and a doc with the given key exists. +// Contract: DB Exists check is already performed, and a doc with the given ID exists. // Note: Should we CompareAndSet the update, IE: Query(read-only) the state, and update if changed // or, just update everything regardless. // Should probably be smart about the update due to the MerkleCRDT overhead, shouldn't @@ -884,15 +883,15 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { } defer c.discardImplicitTxn(ctx, txn) - // Check if document already exists with key - primaryKey := c.getPrimaryKeyFromDocKey(doc.Key()) + // Check if document already exists with primary DS key. + primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return err } if isDeleted { - return NewErrDocumentDeleted(doc.Key().String()) + return NewErrDocumentDeleted(doc.ID().String()) } if exists { @@ -933,7 +932,7 @@ func (c *collection) save( // Loop through doc values // => instantiate MerkleCRDT objects // => Set/Publish new CRDT values - primaryKey := c.getPrimaryKeyFromDocKey(doc.Key()) + primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) links := make([]core.DAGLink, 0) docProperties := make(map[string]any) for k, v := range doc.Fields() { @@ -958,7 +957,7 @@ func (c *collection) save( if isSecondaryRelationID { primaryId := val.Value().(string) - err = c.patchPrimaryDoc(ctx, txn, c.Name(), relationFieldDescription, primaryKey.DocKey, primaryId) + err = c.patchPrimaryDoc(ctx, txn, c.Name(), relationFieldDescription, primaryKey.DocID, primaryId) if err != nil { return cid.Undef, err } @@ -968,7 +967,7 @@ func (c *collection) save( continue } - err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.Key().String(), fieldDescription, val.Value()) + err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.ID().String(), fieldDescription, val.Value()) if err != nil { return cid.Undef, err } @@ -1017,7 +1016,7 @@ func (c *collection) save( func() { c.db.events.Updates.Value().Publish( events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: headNode.Cid(), SchemaRoot: c.Schema().Root, Block: headNode, @@ -1038,7 +1037,7 @@ func (c *collection) save( func (c *collection) validateOneToOneLinkDoesntAlreadyExist( ctx context.Context, txn datastore.Txn, - docKey string, + docID string, fieldDescription client.FieldDescription, value any, ) error { @@ -1060,8 +1059,8 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( filter := fmt.Sprintf( `{_and: [{%s: {_ne: "%s"}}, {%s: {_eq: "%s"}}]}`, - request.KeyFieldName, - docKey, + request.DocIDFieldName, + docID, fieldDescription.Name, value, ) @@ -1102,7 +1101,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( if err != nil { return err } - return NewErrOneOneAlreadyLinked(docKey, existingDocument.GetKey(), objFieldDescription.RelationName) + return NewErrOneOneAlreadyLinked(docID, existingDocument.GetID(), objFieldDescription.RelationName) } err = selectionPlan.Close() @@ -1113,18 +1112,18 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( return nil } -// Delete will attempt to delete a document by key will return true if a deletion is successful, -// and return false, along with an error, if it cannot. +// Delete will attempt to delete a document by docID and return true if a deletion is successful, +// otherwise will return false, along with an error, if it cannot. // If the document doesn't exist, then it will return false, and a ErrDocumentNotFound error. -// This operation will all state relating to the given DocKey. This includes data, block, and head storage. -func (c *collection) Delete(ctx context.Context, key client.DocKey) (bool, error) { +// This operation will all state relating to the given DocID. This includes data, block, and head storage. +func (c *collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { txn, err := c.getTxn(ctx, false) if err != nil { return false, err } defer c.discardImplicitTxn(ctx, txn) - primaryKey := c.getPrimaryKeyFromDocKey(key) + primaryKey := c.getPrimaryKeyFromDocID(docID) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return false, err @@ -1133,7 +1132,7 @@ func (c *collection) Delete(ctx context.Context, key client.DocKey) (bool, error return false, client.ErrDocumentNotFound } if isDeleted { - return false, NewErrDocumentDeleted(primaryKey.DocKey) + return false, NewErrDocumentDeleted(primaryKey.DocID) } err = c.applyDelete(ctx, txn, primaryKey) @@ -1143,15 +1142,15 @@ func (c *collection) Delete(ctx context.Context, key client.DocKey) (bool, error return true, c.commitImplicitTxn(ctx, txn) } -// Exists checks if a given document exists with supplied DocKey. -func (c *collection) Exists(ctx context.Context, key client.DocKey) (bool, error) { +// Exists checks if a given document exists with supplied DocID. +func (c *collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { txn, err := c.getTxn(ctx, false) if err != nil { return false, err } defer c.discardImplicitTxn(ctx, txn) - primaryKey := c.getPrimaryKeyFromDocKey(key) + primaryKey := c.getPrimaryKeyFromDocID(docID) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err @@ -1159,13 +1158,13 @@ func (c *collection) Exists(ctx context.Context, key client.DocKey) (bool, error return exists && !isDeleted, c.commitImplicitTxn(ctx, txn) } -// check if a document exists with the given key +// check if a document exists with the given primary key func (c *collection) exists( ctx context.Context, txn datastore.Txn, - key core.PrimaryDataStoreKey, + primaryKey core.PrimaryDataStoreKey, ) (exists bool, isDeleted bool, err error) { - val, err := txn.Datastore().Get(ctx, key.ToDS()) + val, err := txn.Datastore().Get(ctx, primaryKey.ToDS()) if err != nil && errors.Is(err, ds.ErrNotFound) { return false, false, nil } else if err != nil { @@ -1181,7 +1180,7 @@ func (c *collection) exists( func (c *collection) saveFieldToMerkleCRDT( ctx context.Context, txn datastore.Txn, - key core.DataStoreKey, + dsKey core.DataStoreKey, val client.Value, ) (ipld.Node, uint64, error) { switch val.Type() { @@ -1201,7 +1200,7 @@ func (c *collection) saveFieldToMerkleCRDT( } } - fieldID, err := strconv.Atoi(key.FieldId) + fieldID, err := strconv.Atoi(dsKey.FieldId) if err != nil { return nil, 0, err } @@ -1216,7 +1215,7 @@ func (c *collection) saveFieldToMerkleCRDT( merkleCRDT := merklecrdt.NewMerkleLWWRegister( txn, core.NewCollectionSchemaVersionKey(schema.VersionID, c.ID()), - key, + dsKey, field.Name, ) @@ -1229,16 +1228,16 @@ func (c *collection) saveFieldToMerkleCRDT( func (c *collection) saveCompositeToMerkleCRDT( ctx context.Context, txn datastore.Txn, - key core.DataStoreKey, + dsKey core.DataStoreKey, buf []byte, links []core.DAGLink, status client.DocumentStatus, ) (ipld.Node, uint64, error) { - key = key.WithFieldId(core.COMPOSITE_NAMESPACE) + dsKey = dsKey.WithFieldId(core.COMPOSITE_NAMESPACE) merkleCRDT := merklecrdt.NewMerkleCompositeDAG( txn, core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), - key, + dsKey, "", ) @@ -1278,30 +1277,30 @@ func (c *collection) commitImplicitTxn(ctx context.Context, txn datastore.Txn) e return nil } -func (c *collection) getPrimaryKeyFromDocKey(docKey client.DocKey) core.PrimaryDataStoreKey { +func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) core.PrimaryDataStoreKey { return core.PrimaryDataStoreKey{ CollectionId: fmt.Sprint(c.ID()), - DocKey: docKey.String(), + DocID: docID.String(), } } -func (c *collection) getDSKeyFromDockey(docKey client.DocKey) core.DataStoreKey { +func (c *collection) getDataStoreKeyFromDocID(docID client.DocID) core.DataStoreKey { return core.DataStoreKey{ CollectionID: fmt.Sprint(c.ID()), - DocKey: docKey.String(), + DocID: docID.String(), InstanceType: core.ValueKey, } } -func (c *collection) tryGetFieldKey(key core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) { +func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) { fieldId, hasField := c.tryGetSchemaFieldID(fieldName) if !hasField { return core.DataStoreKey{}, false } return core.DataStoreKey{ - CollectionID: key.CollectionId, - DocKey: key.DocKey, + CollectionID: primaryKey.CollectionId, + DocID: primaryKey.DocID, FieldId: strconv.FormatUint(uint64(fieldId), 10), }, true } diff --git a/db/collection_delete.go b/db/collection_delete.go index afa7d64a92..6dbf5dfa5e 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -24,11 +24,11 @@ import ( // DeleteWith deletes a target document. // -// Target can be a Filter statement, a single docKey, a single document, -// an array of docKeys, or an array of documents. +// Target can be a Filter statement, a single DocID, a single document, +// an array of DocIDs, or an array of documents. // // If you want more type safety, use the respective typed versions of Delete. -// Eg: DeleteWithFilter or DeleteWithKey +// Eg: DeleteWithFilter or DeleteWithDocID func (c *collection) DeleteWith( ctx context.Context, target any, @@ -36,19 +36,19 @@ func (c *collection) DeleteWith( switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) - case client.DocKey: - return c.DeleteWithKey(ctx, t) - case []client.DocKey: - return c.DeleteWithKeys(ctx, t) + case client.DocID: + return c.DeleteWithDocID(ctx, t) + case []client.DocID: + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } } -// DeleteWithKey deletes using a DocKey to target a single document for delete. -func (c *collection) DeleteWithKey( +// DeleteWithDocID deletes using a DocID to target a single document for delete. +func (c *collection) DeleteWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) if err != nil { @@ -57,7 +57,7 @@ func (c *collection) DeleteWithKey( defer c.discardImplicitTxn(ctx, txn) - dsKey := c.getPrimaryKeyFromDocKey(key) + dsKey := c.getPrimaryKeyFromDocID(docID) res, err := c.deleteWithKey(ctx, txn, dsKey, client.Deleted) if err != nil { return nil, err @@ -66,10 +66,10 @@ func (c *collection) DeleteWithKey( return res, c.commitImplicitTxn(ctx, txn) } -// DeleteWithKeys is the same as DeleteWithKey but accepts multiple keys as a slice. -func (c *collection) DeleteWithKeys( +// DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. +func (c *collection) DeleteWithDocIDs( ctx context.Context, - keys []client.DocKey, + docIDs []client.DocID, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) if err != nil { @@ -78,7 +78,7 @@ func (c *collection) DeleteWithKeys( defer c.discardImplicitTxn(ctx, txn) - res, err := c.deleteWithKeys(ctx, txn, keys, client.Deleted) + res, err := c.deleteWithIDs(ctx, txn, docIDs, client.Deleted) if err != nil { return nil, err } @@ -112,7 +112,7 @@ func (c *collection) deleteWithKey( key core.PrimaryDataStoreKey, status client.DocumentStatus, ) (*client.DeleteResult, error) { - // Check the docKey we have been given to delete with actually has a corresponding + // Check the key we have been given to delete with actually has a corresponding // document (i.e. document actually exists in the collection). err := c.applyDelete(ctx, txn, key) if err != nil { @@ -121,38 +121,38 @@ func (c *collection) deleteWithKey( // Upon successfull deletion, record a summary. results := &client.DeleteResult{ - Count: 1, - DocKeys: []string{key.DocKey}, + Count: 1, + DocIDs: []string{key.DocID}, } return results, nil } -func (c *collection) deleteWithKeys( +func (c *collection) deleteWithIDs( ctx context.Context, txn datastore.Txn, - keys []client.DocKey, + docIDs []client.DocID, status client.DocumentStatus, ) (*client.DeleteResult, error) { results := &client.DeleteResult{ - DocKeys: make([]string, 0), + DocIDs: make([]string, 0), } - for _, key := range keys { - dsKey := c.getPrimaryKeyFromDocKey(key) + for _, docID := range docIDs { + primaryKey := c.getPrimaryKeyFromDocID(docID) // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, txn, dsKey) + err := c.applyDelete(ctx, txn, primaryKey) if err != nil { return nil, err } - // Add this deleted key to our list. - results.DocKeys = append(results.DocKeys, key.String()) + // Add this deleted docID to our list. + results.DocIDs = append(results.DocIDs, docID.String()) } // Upon successfull deletion, record a summary of how many we deleted. - results.Count = int64(len(results.DocKeys)) + results.Count = int64(len(results.DocIDs)) return results, nil } @@ -186,7 +186,7 @@ func (c *collection) deleteWithFilter( }() results := &client.DeleteResult{ - DocKeys: make([]string, 0), + DocIDs: make([]string, 0), } // Keep looping until results from the selection plan have been iterated through. @@ -202,26 +202,26 @@ func (c *collection) deleteWithFilter( } doc := selectionPlan.Value() - // Extract the dockey in the string format from the document value. - docKey := doc.GetKey() - // Convert from string to client.DocKey. - key := core.PrimaryDataStoreKey{ + // Extract the docID in the string format from the document value. + docID := doc.GetID() + + primaryKey := core.PrimaryDataStoreKey{ CollectionId: fmt.Sprint(c.ID()), - DocKey: docKey, + DocID: docID, } - // Delete the document that is associated with this key we got from the filter. - err = c.applyDelete(ctx, txn, key) + // Delete the document that is associated with this DS key we got from the filter. + err = c.applyDelete(ctx, txn, primaryKey) if err != nil { return nil, err } - // Add key of successfully deleted document to our list. - results.DocKeys = append(results.DocKeys, docKey) + // Add docID of successfully deleted document to our list. + results.DocIDs = append(results.DocIDs, docID) } - results.Count = int64(len(results.DocKeys)) + results.Count = int64(len(results.DocIDs)) return results, nil } @@ -229,9 +229,9 @@ func (c *collection) deleteWithFilter( func (c *collection) applyDelete( ctx context.Context, txn datastore.Txn, - key core.PrimaryDataStoreKey, + primaryKey core.PrimaryDataStoreKey, ) error { - found, isDeleted, err := c.exists(ctx, txn, key) + found, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return err } @@ -239,10 +239,10 @@ func (c *collection) applyDelete( return client.ErrDocumentNotFound } if isDeleted { - return NewErrDocumentDeleted(key.DocKey) + return NewErrDocumentDeleted(primaryKey.DocID) } - dsKey := key.ToDataStoreKey() + dsKey := primaryKey.ToDataStoreKey() headset := clock.NewHeadSet( txn.Headstore(), @@ -278,7 +278,7 @@ func (c *collection) applyDelete( func() { c.db.events.Updates.Value().Publish( events.Update{ - DocKey: key.DocKey, + DocID: primaryKey.DocID, Cid: headNode.Cid(), SchemaRoot: c.Schema().Root, Block: headNode, diff --git a/db/collection_get.go b/db/collection_get.go index d210072793..9ab14d4424 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -20,16 +20,16 @@ import ( "github.com/sourcenetwork/defradb/db/fetcher" ) -func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { // create txn txn, err := c.getTxn(ctx, true) if err != nil { return nil, err } defer c.discardImplicitTxn(ctx, txn) - dsKey := c.getPrimaryKeyFromDocKey(key) + primaryKey := c.getPrimaryKeyFromDocID(docID) - found, isDeleted, err := c.exists(ctx, txn, dsKey) + found, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return nil, err } @@ -37,7 +37,7 @@ func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted boo return nil, client.ErrDocumentNotFound } - doc, err := c.get(ctx, txn, dsKey, nil, showDeleted) + doc, err := c.get(ctx, txn, primaryKey, nil, showDeleted) if err != nil { return nil, err } @@ -47,7 +47,7 @@ func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted boo func (c *collection) get( ctx context.Context, txn datastore.Txn, - key core.PrimaryDataStoreKey, + primaryKey core.PrimaryDataStoreKey, fields []client.FieldDescription, showDeleted bool, ) (*client.Document, error) { @@ -60,8 +60,8 @@ func (c *collection) get( return nil, err } - // construct target key for DocKey - targetKey := base.MakeDocKey(c.Description(), key.DocKey) + // construct target DS key from DocID. + targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(c.Description(), primaryKey.DocID) // run the doc fetcher err = df.Start(ctx, core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd()))) if err != nil { diff --git a/db/collection_index.go b/db/collection_index.go index 278586902b..c724205805 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -125,7 +125,7 @@ func (c *collection) updateIndexedDoc( oldDoc, err := c.get( ctx, txn, - c.getPrimaryKeyFromDocKey(doc.Key()), desc.CollectIndexedFields(&schema), + c.getPrimaryKeyFromDocID(doc.ID()), desc.CollectIndexedFields(&schema), false, ) if err != nil { @@ -239,7 +239,7 @@ func (c *collection) iterateAllDocs( _ = df.Close() return err } - start := base.MakeCollectionKey(c.Description()) + start := base.MakeDataStoreKeyWithCollectionDescription(c.Description()) spans := core.NewSpans(core.NewSpan(start, start.PrefixEnd())) err = df.Start(ctx, spans) diff --git a/db/collection_update.go b/db/collection_update.go index e6dbc4617a..bdfbc0ddd5 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -26,10 +26,10 @@ import ( ) // UpdateWith updates a target document using the given updater type. Target -// can be a Filter statement, a single docKey, a single document, -// an array of docKeys, or an array of documents. +// can be a Filter statement, a single DocID, a single document, +// an array of DocIDs, or an array of documents. // If you want more type safety, use the respective typed versions of Update. -// Eg: UpdateWithFilter or UpdateWithKey +// Eg: UpdateWithFilter or UpdateWithDocID func (c *collection) UpdateWith( ctx context.Context, target any, @@ -38,10 +38,10 @@ func (c *collection) UpdateWith( switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) - case client.DocKey: - return c.UpdateWithKey(ctx, t, updater) - case []client.DocKey: - return c.UpdateWithKeys(ctx, t, updater) + case client.DocID: + return c.UpdateWithDocID(ctx, t, updater) + case []client.DocID: + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -67,12 +67,12 @@ func (c *collection) UpdateWithFilter( return res, c.commitImplicitTxn(ctx, txn) } -// UpdateWithKey updates using a DocKey to target a single document for update. +// UpdateWithDocID updates using a DocID to target a single document for update. // An updater value is provided, which could be a string Patch, string Merge Patch // or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithKey( +func (c *collection) UpdateWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { txn, err := c.getTxn(ctx, false) @@ -80,7 +80,7 @@ func (c *collection) UpdateWithKey( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithKey(ctx, txn, key, updater) + res, err := c.updateWithDocID(ctx, txn, docID, updater) if err != nil { return nil, err } @@ -88,12 +88,12 @@ func (c *collection) UpdateWithKey( return res, c.commitImplicitTxn(ctx, txn) } -// UpdateWithKeys is the same as UpdateWithKey but accepts multiple keys as a slice. +// UpdateWithDocIDs is the same as UpdateWithDocID but accepts multiple DocIDs as a slice. // An updater value is provided, which could be a string Patch, string Merge Patch // or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithKeys( +func (c *collection) UpdateWithDocIDs( ctx context.Context, - keys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { txn, err := c.getTxn(ctx, false) @@ -101,7 +101,7 @@ func (c *collection) UpdateWithKeys( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithKeys(ctx, txn, keys, updater) + res, err := c.updateWithIDs(ctx, txn, docIDs, updater) if err != nil { return nil, err } @@ -109,10 +109,10 @@ func (c *collection) UpdateWithKeys( return res, c.commitImplicitTxn(ctx, txn) } -func (c *collection) updateWithKey( +func (c *collection) updateWithDocID( ctx context.Context, txn datastore.Txn, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { parsedUpdater, err := fastjson.Parse(updater) @@ -127,7 +127,7 @@ func (c *collection) updateWithKey( return nil, client.ErrInvalidUpdater } - doc, err := c.Get(ctx, key, false) + doc, err := c.Get(ctx, docID, false) if err != nil { return nil, err } @@ -147,16 +147,16 @@ func (c *collection) updateWithKey( } results := &client.UpdateResult{ - Count: 1, - DocKeys: []string{key.String()}, + Count: 1, + DocIDs: []string{docID.String()}, } return results, nil } -func (c *collection) updateWithKeys( +func (c *collection) updateWithIDs( ctx context.Context, txn datastore.Txn, - keys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { parsedUpdater, err := fastjson.Parse(updater) @@ -172,10 +172,10 @@ func (c *collection) updateWithKeys( } results := &client.UpdateResult{ - DocKeys: make([]string, len(keys)), + DocIDs: make([]string, len(docIDs)), } - for i, key := range keys { - doc, err := c.Get(ctx, key, false) + for i, docIDs := range docIDs { + doc, err := c.Get(ctx, docIDs, false) if err != nil { return nil, err } @@ -194,7 +194,7 @@ func (c *collection) updateWithKeys( return nil, err } - results.DocKeys[i] = key.String() + results.DocIDs[i] = docIDs.String() results.Count++ } return results, nil @@ -245,7 +245,7 @@ func (c *collection) updateWithFilter( }() results := &client.UpdateResult{ - DocKeys: make([]string, 0), + DocIDs: make([]string, 0), } docMap := selectionPlan.DocumentMap() @@ -283,7 +283,7 @@ func (c *collection) updateWithFilter( } // add successful updated doc to results - results.DocKeys = append(results.DocKeys, doc.Key().String()) + results.DocIDs = append(results.DocIDs, doc.ID().String()) results.Count++ } @@ -341,21 +341,21 @@ func (c *collection) isSecondaryIDField(fieldDesc client.FieldDescription) (clie return relationFieldDescription, valid && !relationFieldDescription.IsPrimaryRelation() } -// patchPrimaryDoc patches the (primary) document linked to from the document of the given dockey via the +// patchPrimaryDoc patches the (primary) document linked to from the document of the given DocID via the // given (secondary) relationship field description (hosted on the collection of the document matching the -// given dockey). +// given DocID). // -// The given field value should be the string representation of the dockey of the primary document to be +// The given field value should be the string representation of the DocID of the primary document to be // patched. func (c *collection) patchPrimaryDoc( ctx context.Context, txn datastore.Txn, secondaryCollectionName string, relationFieldDescription client.FieldDescription, - docKey string, + docID string, fieldValue string, ) error { - primaryDockey, err := client.NewDocKeyFromString(fieldValue) + primaryDocID, err := client.NewDocIDFromString(fieldValue) if err != nil { return err } @@ -384,7 +384,7 @@ func (c *collection) patchPrimaryDoc( doc, err := primaryCol.Get( ctx, - primaryDockey, + primaryDocID, false, ) if err != nil && !errors.Is(err, ds.ErrNotFound) { @@ -401,11 +401,11 @@ func (c *collection) patchPrimaryDoc( return err } - if existingVal != nil && existingVal.Value() != "" && existingVal.Value() != docKey { - return NewErrOneOneAlreadyLinked(docKey, fieldValue, relationFieldDescription.RelationName) + if existingVal != nil && existingVal.Value() != "" && existingVal.Value() != docID { + return NewErrOneOneAlreadyLinked(docID, fieldValue, relationFieldDescription.RelationName) } - err = doc.Set(primaryIDField.Name, docKey) + err = doc.Set(primaryIDField.Name, docID) if err != nil { return err } @@ -424,7 +424,7 @@ func (c *collection) patchPrimaryDoc( // the typed value again as an interface. func validateFieldSchema(val *fastjson.Value, field client.FieldDescription) (any, error) { switch field.Kind { - case client.FieldKind_DocKey, client.FieldKind_STRING: + case client.FieldKind_DocID, client.FieldKind_STRING: return getString(val) case client.FieldKind_STRING_ARRAY: diff --git a/db/errors.go b/db/errors.go index 67f74db296..ca5e09e107 100644 --- a/db/errors.go +++ b/db/errors.go @@ -47,8 +47,8 @@ const ( errFieldKindNotFound string = "no type found for given name" errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" errSchemaNotFound string = "no schema found for given name" - errDocumentAlreadyExists string = "a document with the given dockey already exists" - errDocumentDeleted string = "a document with the given dockey has been deleted" + errDocumentAlreadyExists string = "a document with the given ID already exists" + errDocumentDeleted string = "a document with the given ID has been deleted" errIndexMissingFields string = "index missing fields" errNonZeroIndexIDProvided string = "non-zero index ID provided" errIndexFieldMissingName string = "index field missing name" @@ -73,9 +73,10 @@ const ( errIndexDescHasNonExistingField string = "index description has non existing field" errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" errCreateFile string = "failed to create file" + errRemoveFile string = "failed to remove file" errOpenFile string = "failed to open file" errCloseFile string = "failed to close file" - errRemoveFile string = "failed to remove file" + errFailedtoCloseQueryReqAllIDs string = "failed to close query requesting all docIDs" errFailedToReadByte string = "failed to read byte" errFailedToWriteString string = "failed to write string" errJSONDecode string = "failed to decode JSON" @@ -91,82 +92,23 @@ const ( ) var ( - ErrFailedToGetHeads = errors.New(errFailedToGetHeads) - ErrFailedToCreateCollectionQuery = errors.New(errFailedToCreateCollectionQuery) - ErrFailedToGetCollection = errors.New(errFailedToGetCollection) - ErrFailedToGetAllCollections = errors.New(errFailedToGetAllCollections) - // ErrDocVerification occurs when a documents contents fail the verification during a Create() - // call against the supplied Document Key. - ErrDocVerification = errors.New(errDocVerification) - ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") - ErrDeleteTargetEmpty = errors.New("the doc delete targeter cannot be empty") - ErrDeleteEmpty = errors.New("the doc delete cannot be empty") - ErrUpdateTargetEmpty = errors.New("the doc update targeter cannot be empty") - ErrUpdateEmpty = errors.New("the doc update cannot be empty") - ErrInvalidMergeValueType = errors.New( - "the type of value in the merge patch doesn't match the schema", - ) - ErrMissingDocFieldToUpdate = errors.New("missing document field to update") - ErrDocMissingKey = errors.New("document is missing key") - ErrInvalidFilter = errors.New("invalid filter") - ErrInvalidOpPath = errors.New("invalid patch op path") - ErrDocumentAlreadyExists = errors.New(errDocumentAlreadyExists) - ErrDocumentDeleted = errors.New(errDocumentDeleted) - ErrUnknownCRDTArgument = errors.New("invalid CRDT arguments") - ErrCollectionAlreadyExists = errors.New("collection already exists") - ErrCollectionNameEmpty = errors.New("collection name can't be empty") - ErrSchemaNameEmpty = errors.New("schema name can't be empty") - ErrSchemaRootEmpty = errors.New("schema root can't be empty") - ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") - ErrKeyEmpty = errors.New("key cannot be empty") - ErrAddingP2PCollection = errors.New(errAddingP2PCollection) - ErrRemovingP2PCollection = errors.New(errRemovingP2PCollection) - ErrAddCollectionWithPatch = errors.New(errAddCollectionWithPatch) - ErrCollectionIDDoesntMatch = errors.New(errCollectionIDDoesntMatch) - ErrSchemaRootDoesntMatch = errors.New(errSchemaRootDoesntMatch) - ErrCannotModifySchemaName = errors.New(errCannotModifySchemaName) - ErrCannotSetVersionID = errors.New(errCannotSetVersionID) - ErrCannotSetFieldID = errors.New(errCannotSetFieldID) - ErrRelationalFieldMissingSchema = errors.New(errRelationalFieldMissingSchema) - ErrRelationalFieldInvalidRelationType = errors.New(errRelationalFieldInvalidRelationType) - ErrRelationalFieldMissingIDField = errors.New(errRelationalFieldMissingIDField) - ErrRelationalFieldMissingRelationName = errors.New(errRelationalFieldMissingRelationName) - ErrPrimarySideNotDefined = errors.New(errPrimarySideNotDefined) - ErrPrimarySideOnMany = errors.New(errPrimarySideOnMany) - ErrBothSidesPrimary = errors.New(errBothSidesPrimary) - ErrRelatedFieldKindMismatch = errors.New(errRelatedFieldKindMismatch) - ErrRelatedFieldRelationTypeMismatch = errors.New(errRelatedFieldRelationTypeMismatch) - ErrRelationalFieldIDInvalidType = errors.New(errRelationalFieldIDInvalidType) - ErrDuplicateField = errors.New(errDuplicateField) - ErrCannotMutateField = errors.New(errCannotMutateField) - ErrCannotMoveField = errors.New(errCannotMoveField) - ErrInvalidCRDTType = errors.New(errInvalidCRDTType) - ErrCannotDeleteField = errors.New(errCannotDeleteField) - ErrFieldKindNotFound = errors.New(errFieldKindNotFound) - ErrFieldKindDoesNotMatchFieldSchema = errors.New(errFieldKindDoesNotMatchFieldSchema) - ErrSchemaNotFound = errors.New(errSchemaNotFound) - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) - ErrIndexFieldMissingDirection = errors.New(errIndexFieldMissingDirection) - ErrIndexSingleFieldWrongDirection = errors.New(errIndexSingleFieldWrongDirection) - ErrCorruptedIndex = errors.New(errCorruptedIndex) - ErrCanNotChangeIndexWithPatch = errors.New(errCanNotChangeIndexWithPatch) - ErrFieldOrAliasToFieldNotExist = errors.New(errFieldOrAliasToFieldNotExist) - ErrCreateFile = errors.New(errCreateFile) - ErrOpenFile = errors.New(errOpenFile) - ErrCloseFile = errors.New(errCloseFile) - ErrRemoveFile = errors.New(errRemoveFile) - ErrFailedToReadByte = errors.New(errFailedToReadByte) - ErrFailedToWriteString = errors.New(errFailedToWriteString) - ErrJSONDecode = errors.New(errJSONDecode) - ErrDocFromMap = errors.New(errDocFromMap) - ErrDocCreate = errors.New(errDocCreate) - ErrDocUpdate = errors.New(errDocUpdate) - ErrExpectedJSONObject = errors.New(errExpectedJSONObject) - ErrExpectedJSONArray = errors.New(errExpectedJSONArray) - ErrOneOneAlreadyLinked = errors.New(errOneOneAlreadyLinked) - ErrIndexDoesNotMatchName = errors.New(errIndexDoesNotMatchName) - ErrInvalidViewQuery = errors.New(errInvalidViewQuery) + ErrFailedToGetCollection = errors.New(errFailedToGetCollection) + ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") + ErrInvalidFilter = errors.New("invalid filter") + ErrCollectionAlreadyExists = errors.New("collection already exists") + ErrCollectionNameEmpty = errors.New("collection name can't be empty") + ErrSchemaNameEmpty = errors.New("schema name can't be empty") + ErrSchemaRootEmpty = errors.New("schema root can't be empty") + ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") + ErrKeyEmpty = errors.New("key cannot be empty") + ErrCannotSetVersionID = errors.New(errCannotSetVersionID) + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) + ErrIndexSingleFieldWrongDirection = errors.New(errIndexSingleFieldWrongDirection) + ErrCorruptedIndex = errors.New(errCorruptedIndex) + ErrExpectedJSONObject = errors.New(errExpectedJSONObject) + ErrExpectedJSONArray = errors.New(errExpectedJSONArray) + ErrInvalidViewQuery = errors.New(errInvalidViewQuery) ) // NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. @@ -245,6 +187,9 @@ func NewErrFailedToGetAllCollections(inner error) error { } // NewErrDocVerification returns a new error indicating that the document verification failed. +// +// This occurs when a documents contents fail the verification during a Create() +// call against the supplied Document ID (docID). func NewErrDocVerification(expected string, actual string) error { return errors.New( errDocVerification, @@ -450,17 +395,17 @@ func NewErrCannotDeleteField(name string, id client.FieldID) error { ) } -func NewErrDocumentAlreadyExists(dockey string) error { +func NewErrDocumentAlreadyExists(docID string) error { return errors.New( errDocumentAlreadyExists, - errors.NewKV("DocKey", dockey), + errors.NewKV("DocID", docID), ) } -func NewErrDocumentDeleted(dockey string) error { +func NewErrDocumentDeleted(docID string) error { return errors.New( errDocumentDeleted, - errors.NewKV("DocKey", dockey), + errors.NewKV("DocID", docID), ) } @@ -635,10 +580,10 @@ func NewErrIndexDoesNotMatchName(index, name string) error { ) } -func NewErrCanNotIndexNonUniqueField(dockey, fieldName string, value any) error { +func NewErrCanNotIndexNonUniqueField(docID, fieldName string, value any) error { return errors.New( errCanNotIndexNonUniqueField, - errors.NewKV("Dockey", dockey), + errors.NewKV("DocID", docID), errors.NewKV("Field name", fieldName), errors.NewKV("Field value", value), ) diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index bc22471465..dc9291fb0d 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -19,16 +19,20 @@ import ( ) type EncodedDocument interface { - // Key returns the key of the document - Key() []byte + // ID returns the ID of the document + ID() []byte + SchemaVersionID() string + // Status returns the document status. // // For example, whether it is deleted or active. Status() client.DocumentStatus + // Properties returns a copy of the decoded property values mapped by their field // description. Properties(onlyFilterProps bool) (map[client.FieldDescription]any, error) + // Reset re-initializes the EncodedDocument object. Reset() } @@ -61,7 +65,7 @@ func (e encProperty) Decode() (any, error) { // @todo: Implement Encoded Document type type encodedDocument struct { - key []byte + id []byte schemaVersionID string status client.DocumentStatus properties map[client.FieldDescription]*encProperty @@ -78,8 +82,8 @@ type encodedDocument struct { var _ EncodedDocument = (*encodedDocument)(nil) -func (encdoc *encodedDocument) Key() []byte { - return encdoc.key +func (encdoc *encodedDocument) ID() []byte { + return encdoc.id } func (encdoc *encodedDocument) SchemaVersionID() string { @@ -93,7 +97,7 @@ func (encdoc *encodedDocument) Status() client.DocumentStatus { // Reset re-initializes the EncodedDocument object. func (encdoc *encodedDocument) Reset() { encdoc.properties = make(map[client.FieldDescription]*encProperty, 0) - encdoc.key = nil + encdoc.id = nil encdoc.filterSet = nil encdoc.selectSet = nil encdoc.schemaVersionID = "" @@ -103,12 +107,12 @@ func (encdoc *encodedDocument) Reset() { // Decode returns a properly decoded document object func Decode(encdoc EncodedDocument) (*client.Document, error) { - key, err := client.NewDocKeyFromString(string(encdoc.Key())) + docID, err := client.NewDocIDFromString(string(encdoc.ID())) if err != nil { return nil, err } - doc := client.NewDocWithKey(key) + doc := client.NewDocWithID(docID) properties, err := encdoc.Properties(false) if err != nil { return nil, err @@ -141,8 +145,8 @@ func (encdoc *encodedDocument) MergeProperties(other EncodedDocument) { for field, prop := range otherEncDoc.properties { encdoc.properties[field] = prop } - if other.Key() != nil { - encdoc.key = other.Key() + if other.ID() != nil { + encdoc.id = other.ID() } if other.SchemaVersionID() != "" { encdoc.schemaVersionID = other.SchemaVersionID() @@ -153,7 +157,7 @@ func (encdoc *encodedDocument) MergeProperties(other EncodedDocument) { // map of field/value pairs func DecodeToDoc(encdoc EncodedDocument, mapping *core.DocumentMapping, filter bool) (core.Doc, error) { doc := mapping.NewDoc() - doc.SetKey(string(encdoc.Key())) + doc.SetID(string(encdoc.ID())) properties, err := encdoc.Properties(filter) if err != nil { diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index da7a0df1e1..a9cb39d9d5 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -127,7 +127,7 @@ type DocumentFetcher struct { // Since deleted documents are stored under a different instance type than active documents, // we use a parallel fetcher to be able to return the documents in the expected order. - // That being lexicographically ordered dockeys. + // That being lexicographically ordered docIDs. deletedDocFetcher *DocumentFetcher execInfo ExecInfo @@ -250,7 +250,7 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele df.deletedDocs = withDeleted if !spans.HasValue { // no specified spans so create a prefix scan key for the entire collection - start := base.MakeCollectionKey(df.col.Description()) + start := base.MakeDataStoreKeyWithCollectionDescription(df.col.Description()) if withDeleted { start = start.WithDeletedFlag() } else { @@ -334,7 +334,7 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone if seekNext { curKey := df.kv.Key - curKey.FieldId = "" // clear field so prefixEnd applies to dockey + curKey.FieldId = "" // clear field so prefixEnd applies to docID seekKey := curKey.PrefixEnd().ToString() spanDone, df.kv, err = df.seekKV(seekKey) // handle any internal errors @@ -370,7 +370,7 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone } // check if we've crossed document boundries - if (df.doc.key != nil && df.kv.Key.DocKey != string(df.doc.key)) || seekNext { + if (df.doc.id != nil && df.kv.Key.DocID != string(df.doc.id)) || seekNext { df.isReadingDocument = false return false, true, nil } @@ -472,10 +472,10 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { if df.filterSet != nil { df.doc.filterSet = bitset.New(df.filterSet.Len()) if df.filterSet.Test(0) { - df.doc.filterSet.Set(0) // mark dockey as set + df.doc.filterSet.Set(0) // mark docID as set } } - df.doc.key = []byte(kv.Key.DocKey) + df.doc.id = []byte(kv.Key.DocID) df.passedFilter = false df.ranFilter = false @@ -534,15 +534,15 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec var resultExecInfo ExecInfo // If the deletedDocFetcher isn't nil, this means that the user requested to include the deleted documents - // in the query. To keep the active and deleted docs in lexicographic order of dockeys, we use the two distinct - // fetchers and fetch the one that has the next lowest (or highest if requested in reverse order) dockey value. + // in the query. To keep the active and deleted docs in lexicographic order of docIDs, we use the two distinct + // fetchers and fetch the one that has the next lowest (or highest if requested in reverse order) docID value. ddf := df.deletedDocFetcher if ddf != nil { // If we've reached the end of the deleted docs, we can skip to getting the next active docs. if !ddf.kvEnd { if df.kvEnd || - (df.reverse && ddf.kv.Key.DocKey > df.kv.Key.DocKey) || - (!df.reverse && ddf.kv.Key.DocKey < df.kv.Key.DocKey) { + (df.reverse && ddf.kv.Key.DocID > df.kv.Key.DocID) || + (!df.reverse && ddf.kv.Key.DocID < df.kv.Key.DocID) { encdoc, execInfo, err := ddf.FetchNext(ctx) if err != nil { return nil, ExecInfo{}, err @@ -573,7 +573,7 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec if df.kv == nil { return nil, ExecInfo{}, client.NewErrUninitializeProperty("DocumentFetcher", "kv") } - // save the DocKey of the current kv pair so we can track when we cross the doc pair boundries + // save the DocID of the current kv pair so we can track when we cross the doc pair boundries // keyparts := df.kv.Key.List() // key := keyparts[len(keyparts)-2] diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 6b4833d00f..b8608e2b7d 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -129,15 +129,15 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo } if f.indexDesc.Unique { - f.doc.key = res.value + f.doc.id = res.value } else { - f.doc.key = res.key.FieldValues[1] + f.doc.id = res.key.FieldValues[1] } f.doc.properties[f.indexedField] = property f.execInfo.FieldsFetched++ if f.docFetcher != nil && len(f.docFields) > 0 { - targetKey := base.MakeDocKey(f.col.Description(), string(f.doc.key)) + targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(f.col.Description(), string(f.doc.id)) spans := core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd())) err := f.docFetcher.Start(ctx, spans) if err != nil { diff --git a/db/fetcher/mocks/encoded_document.go b/db/fetcher/mocks/encoded_document.go index 538d32ff4d..5d9382a14d 100644 --- a/db/fetcher/mocks/encoded_document.go +++ b/db/fetcher/mocks/encoded_document.go @@ -21,8 +21,8 @@ func (_m *EncodedDocument) EXPECT() *EncodedDocument_Expecter { return &EncodedDocument_Expecter{mock: &_m.Mock} } -// Key provides a mock function with given fields: -func (_m *EncodedDocument) Key() []byte { +// ID provides a mock function with given fields: +func (_m *EncodedDocument) ID() []byte { ret := _m.Called() var r0 []byte @@ -37,29 +37,29 @@ func (_m *EncodedDocument) Key() []byte { return r0 } -// EncodedDocument_Key_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Key' -type EncodedDocument_Key_Call struct { +// EncodedDocument_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' +type EncodedDocument_ID_Call struct { *mock.Call } -// Key is a helper method to define mock.On call -func (_e *EncodedDocument_Expecter) Key() *EncodedDocument_Key_Call { - return &EncodedDocument_Key_Call{Call: _e.mock.On("Key")} +// ID is a helper method to define mock.On call +func (_e *EncodedDocument_Expecter) ID() *EncodedDocument_ID_Call { + return &EncodedDocument_ID_Call{Call: _e.mock.On("ID")} } -func (_c *EncodedDocument_Key_Call) Run(run func()) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_ID_Call) Run(run func()) *EncodedDocument_ID_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } -func (_c *EncodedDocument_Key_Call) Return(_a0 []byte) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_ID_Call) Return(_a0 []byte) *EncodedDocument_ID_Call { _c.Call.Return(_a0) return _c } -func (_c *EncodedDocument_Key_Call) RunAndReturn(run func() []byte) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_ID_Call) RunAndReturn(run func() []byte) *EncodedDocument_ID_Call { _c.Call.Return(run) return _c } diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 454bcf17c6..fc232bb9c7 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -86,7 +86,7 @@ type VersionedFetcher struct { root datastore.RootStore store datastore.Txn - key core.DataStoreKey + dsKey core.DataStoreKey version cid.Cid queuedCids *list.List @@ -144,25 +144,25 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans core.Spans) error { } // For the VersionedFetcher, the spans needs to be in the format - // Span{Start: DocKey, End: CID} + // Span{Start: DocID, End: CID} dk := spans.Value[0].Start() cidRaw := spans.Value[0].End() - if dk.DocKey == "" { - return client.NewErrUninitializeProperty("Spans", "DocKey") - } else if cidRaw.DocKey == "" { // todo: dont abuse DataStoreKey/Span like this! + if dk.DocID == "" { + return client.NewErrUninitializeProperty("Spans", "DocID") + } else if cidRaw.DocID == "" { // todo: dont abuse DataStoreKey/Span like this! return client.NewErrUninitializeProperty("Spans", "CID") } // decode cidRaw from core.Key to cid.Cid // need to remove '/' prefix from the core.Key - c, err := cid.Decode(cidRaw.DocKey) + c, err := cid.Decode(cidRaw.DocID) if err != nil { return NewErrFailedToDecodeCIDForVFetcher(err) } vf.ctx = ctx - vf.key = dk + vf.dsKey = dk vf.version = c if err := vf.seekTo(vf.version); err != nil { @@ -180,7 +180,7 @@ func (vf *VersionedFetcher) Rootstore() ds.Datastore { // Start a fetcher with the needed info (cid embedded in a span) /* -1. Init with DocKey (VersionedFetched is scoped to a single doc) +1. Init with DocID (VersionedFetched is scoped to a single doc) 2. - Create transient stores (head, data, block) 3. Start with a given Txn and CID span set (length 1 for now) 4. call traverse with the target cid @@ -258,8 +258,8 @@ func (vf *VersionedFetcher) seekNext(c cid.Cid, topParent bool) error { // check if cid block exists in the global store, handle err // @todo: Find an efficient way to determine if a CID is a member of a - // DocKey State graph - // @body: We could possibly append the DocKey to the CID either as a + // DocID State graph + // @body: We could possibly append the DocID to the CID either as a // child key, or an instance on the CID key. hasLocalBlock, err := vf.store.DAGstore().Has(vf.ctx, c) @@ -380,7 +380,7 @@ func (vf *VersionedFetcher) processNode( // handle CompositeDAG mcrdt, exists := vf.mCRDTs[crdtIndex] if !exists { - key, err := base.MakePrimaryIndexKeyForCRDT(vf.col.Description(), vf.col.Schema(), ctype, vf.key, fieldName) + dsKey, err := base.MakePrimaryIndexKeyForCRDT(vf.col.Description(), vf.col.Schema(), ctype, vf.dsKey, fieldName) if err != nil { return err } @@ -388,7 +388,7 @@ func (vf *VersionedFetcher) processNode( vf.store, core.CollectionSchemaVersionKey{}, ctype, - key, + dsKey, fieldName, ) if err != nil { @@ -429,7 +429,7 @@ func (vf *VersionedFetcher) Close() error { } // NewVersionedSpan creates a new VersionedSpan from a DataStoreKey and a version CID. -func NewVersionedSpan(dockey core.DataStoreKey, version cid.Cid) core.Spans { +func NewVersionedSpan(dsKey core.DataStoreKey, version cid.Cid) core.Spans { // Todo: Dont abuse DataStoreKey for version cid! - return core.NewSpans(core.NewSpan(dockey, core.DataStoreKey{DocKey: version.String()})) + return core.NewSpans(core.NewSpan(dsKey, core.DataStoreKey{DocID: version.String()})) } diff --git a/db/index.go b/db/index.go index 804eac492e..693df4a5f1 100644 --- a/db/index.go +++ b/db/index.go @@ -212,7 +212,7 @@ func (i *collectionSimpleIndex) getDocumentsIndexKey( return core.IndexDataStoreKey{}, err } - key.FieldValues = append(key.FieldValues, []byte(doc.Key().String())) + key.FieldValues = append(key.FieldValues, []byte(doc.ID().String())) return key, nil } @@ -280,7 +280,7 @@ func (i *collectionUniqueIndex) Save( if exists { return i.newUniqueIndexError(doc) } - err = txn.Datastore().Put(ctx, key.ToDS(), []byte(doc.Key().String())) + err = txn.Datastore().Put(ctx, key.ToDS(), []byte(doc.ID().String())) if err != nil { return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) } @@ -294,7 +294,7 @@ func (i *collectionUniqueIndex) newUniqueIndexError( if err != nil { return err } - return NewErrCanNotIndexNonUniqueField(doc.Key().String(), i.fieldDesc.Name, fieldVal.Value()) + return NewErrCanNotIndexNonUniqueField(doc.ID().String(), i.fieldDesc.Name, fieldVal.Value()) } func (i *collectionUniqueIndex) Update( diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 4110463c09..38309bf745 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -176,7 +176,7 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { key.FieldValues = [][]byte{fieldBytesVal} if !b.isUnique { - key.FieldValues = append(key.FieldValues, []byte(b.doc.Key().String())) + key.FieldValues = append(key.FieldValues, []byte(b.doc.ID().String())) } } else if len(b.values) > 0 { key.FieldValues = b.values @@ -376,13 +376,13 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t require.NoError(f.t, err) f.commitTxn() - userDocKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(userDoc).Build() - prodDocKey := newIndexKeyBuilder(f).Col(productsColName).Field(productsCategoryFieldName).Doc(prodDoc).Build() + userDocID := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(userDoc).Build() + prodDocID := newIndexKeyBuilder(f).Col(productsColName).Field(productsCategoryFieldName).Doc(prodDoc).Build() - data, err := f.txn.Datastore().Get(f.ctx, userDocKey.ToDS()) + data, err := f.txn.Datastore().Get(f.ctx, userDocID.ToDS()) require.NoError(t, err) assert.Len(t, data, 0) - data, err = f.txn.Datastore().Get(f.ctx, prodDocKey.ToDS()) + data, err = f.txn.Datastore().Get(f.ctx, prodDocID.ToDS()) require.NoError(t, err) assert.Len(t, data, 0) } @@ -619,7 +619,7 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) fieldKeyString := core.DataStoreKey{ CollectionID: f.users.Description().IDString(), - }.WithDocKey(doc.Key().String()). + }.WithDocID(doc.ID().String()). WithFieldId("1"). WithValueFlag(). ToString() @@ -923,7 +923,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { require.NoError(t, err) encodedDoc := shimEncodedDocument{ - key: []byte(doc.Key().String()), + key: []byte(doc.ID().String()), schemaVersionID: f.users.Schema().VersionID, } @@ -987,7 +987,7 @@ type shimEncodedDocument struct { var _ fetcher.EncodedDocument = (*shimEncodedDocument)(nil) -func (encdoc *shimEncodedDocument) Key() []byte { +func (encdoc *shimEncodedDocument) ID() []byte { return encdoc.key } @@ -1026,10 +1026,10 @@ func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS()) require.NoError(t, err, key1.ToString()) - assert.Equal(t, data, []byte(doc1.Key().String())) + assert.Equal(t, data, []byte(doc1.ID().String())) data, err = f.txn.Datastore().Get(f.ctx, key2.ToDS()) require.NoError(t, err) - assert.Equal(t, data, []byte(doc2.Key().String())) + assert.Equal(t, data, []byte(doc2.ID().String())) } func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { @@ -1052,7 +1052,7 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { data, err := f.txn.Datastore().Get(f.ctx, key.ToDS()) require.NoError(t, err) - assert.Equal(t, data, []byte(doc.Key().String())) + assert.Equal(t, data, []byte(doc.ID().String())) } func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { diff --git a/db/subscriptions.go b/db/subscriptions.go index af981ad95f..2e7d2d4123 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -74,7 +74,7 @@ func (db *db) handleEvent( ) { p := planner.New(ctx, db.WithTxn(txn), txn) - s := r.ToSelect(evt.DocKey, evt.Cid.String()) + s := r.ToSelect(evt.DocID, evt.Cid.String()) result, err := p.RunSubscriptionRequest(ctx, s) if err != nil { diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index a52fce09f3..c3bd354bc7 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -38,4 +38,5 @@ Execute queries, add schema types, obtain node info, etc. * [defradb client query](defradb_client_query.md) - Send a DefraDB GraphQL query request * [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node * [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions +* [defradb client view](defradb_client_view.md) - Manage views within a running DefraDB instance diff --git a/docs/cli/defradb_client_collection.md b/docs/cli/defradb_client_collection.md index 7807f49503..d164902b97 100644 --- a/docs/cli/defradb_client_collection.md +++ b/docs/cli/defradb_client_collection.md @@ -33,9 +33,9 @@ Create, read, update, and delete documents within a collection. * [defradb client](defradb_client.md) - Interact with a DefraDB node * [defradb client collection create](defradb_client_collection_create.md) - Create a new document. -* [defradb client collection delete](defradb_client_collection_delete.md) - Delete documents by key or filter. +* [defradb client collection delete](defradb_client_collection_delete.md) - Delete documents by docID or filter. * [defradb client collection describe](defradb_client_collection_describe.md) - View collection description. +* [defradb client collection docIDs](defradb_client_collection_docIDs.md) - List all document IDs (docIDs). * [defradb client collection get](defradb_client_collection_get.md) - View document fields. -* [defradb client collection keys](defradb_client_collection_keys.md) - List all document keys. -* [defradb client collection update](defradb_client_collection_update.md) - Update documents by key or filter. +* [defradb client collection update](defradb_client_collection_update.md) - Update documents by docID or filter. diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index fea6c6ccc7..30676654d5 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -1,28 +1,28 @@ ## defradb client collection delete -Delete documents by key or filter. +Delete documents by docID or filter. ### Synopsis -Delete documents by key or filter and lists the number of documents deleted. +Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by key(s) - defradb client collection delete --name User --key bae-123,bae-456 +Example: delete by docID(s) + defradb client collection delete --name User --docID bae-123,bae-456 Example: delete by filter defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' ``` -defradb client collection delete [--filter --key ] [flags] +defradb client collection delete [--filter --docID ] [flags] ``` ### Options ``` + --docID strings Document ID --filter string Document filter -h, --help help for delete - --key strings Document key ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_collection_keys.md b/docs/cli/defradb_client_collection_docIDs.md similarity index 83% rename from docs/cli/defradb_client_collection_keys.md rename to docs/cli/defradb_client_collection_docIDs.md index 234d8e051f..5ca8444e2e 100644 --- a/docs/cli/defradb_client_collection_keys.md +++ b/docs/cli/defradb_client_collection_docIDs.md @@ -1,23 +1,23 @@ -## defradb client collection keys +## defradb client collection docIDs -List all document keys. +List all document IDs (docIDs). ### Synopsis -List all document keys. +List all document IDs (docIDs). Example: - defradb client collection keys --name User + defradb client collection docIDs --name User ``` -defradb client collection keys [flags] +defradb client collection docIDs [flags] ``` ### Options ``` - -h, --help help for keys + -h, --help help for docIDs ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_collection_get.md b/docs/cli/defradb_client_collection_get.md index 675988c487..3f60490272 100644 --- a/docs/cli/defradb_client_collection_get.md +++ b/docs/cli/defradb_client_collection_get.md @@ -11,7 +11,7 @@ Example: ``` -defradb client collection get [--show-deleted] [flags] +defradb client collection get [--show-deleted] [flags] ``` ### Options diff --git a/docs/cli/defradb_client_collection_update.md b/docs/cli/defradb_client_collection_update.md index c081614cce..4ba111f025 100644 --- a/docs/cli/defradb_client_collection_update.md +++ b/docs/cli/defradb_client_collection_update.md @@ -1,33 +1,33 @@ ## defradb client collection update -Update documents by key or filter. +Update documents by docID or filter. ### Synopsis -Update documents by key or filter. +Update documents by docID or filter. Example: update from string - defradb client collection update --name User --key bae-123 '{ "name": "Bob" }' + defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' Example: update by filter defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by keys +Example: update by docIDs defradb client collection update --name User \ - --key bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123,bae-456 --updater '{ "verified": true }' ``` -defradb client collection update [--filter --key --updater ] [flags] +defradb client collection update [--filter --docID --updater ] [flags] ``` ### Options ``` + --docID strings Document ID --filter string Document filter -h, --help help for update - --key strings Document key --updater string Document updater ``` diff --git a/docs/cli/defradb_client_document.md b/docs/cli/defradb_client_document.md deleted file mode 100644 index bc527357e7..0000000000 --- a/docs/cli/defradb_client_document.md +++ /dev/null @@ -1,38 +0,0 @@ -## defradb client document - -Create, read, update, and delete documents. - -### Synopsis - -Create, read, update, and delete documents. - -### Options - -``` - -h, --help help for document -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client](defradb_client.md) - Interact with a DefraDB node -* [defradb client document create](defradb_client_document_create.md) - Create a new document. -* [defradb client document delete](defradb_client_document_delete.md) - Delete documents by key or filter. -* [defradb client document get](defradb_client_document_get.md) - View detailed document info. -* [defradb client document keys](defradb_client_document_keys.md) - List all collection document keys. -* [defradb client document save](defradb_client_document_save.md) - Create or update a document. -* [defradb client document update](defradb_client_document_update.md) - Update documents by key or filter. - diff --git a/docs/cli/defradb_client_document_create.md b/docs/cli/defradb_client_document_create.md deleted file mode 100644 index 99dbd0d7f5..0000000000 --- a/docs/cli/defradb_client_document_create.md +++ /dev/null @@ -1,44 +0,0 @@ -## defradb client document create - -Create a new document. - -### Synopsis - -Create a new document. - -Example: create document - defradb client document create --collection User '{ "name": "Bob" }' - -Example: create documents - defradb client document create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' - - -``` -defradb client document create --collection [flags] -``` - -### Options - -``` - -c, --collection string Collection name - -h, --help help for create -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_document_delete.md b/docs/cli/defradb_client_document_delete.md deleted file mode 100644 index 96a0b1e973..0000000000 --- a/docs/cli/defradb_client_document_delete.md +++ /dev/null @@ -1,46 +0,0 @@ -## defradb client document delete - -Delete documents by key or filter. - -### Synopsis - -Delete documents by key or filter and lists the number of documents deleted. - -Example: delete by key(s) - defradb client document delete --collection User --key bae-123,bae-456 - -Example: delete by filter - defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' - - -``` -defradb client document delete --collection [--filter --key ] [flags] -``` - -### Options - -``` - -c, --collection string Collection name - --filter string Document filter - -h, --help help for delete - --key strings Document key -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_document_save.md b/docs/cli/defradb_client_document_save.md deleted file mode 100644 index 41f59a860c..0000000000 --- a/docs/cli/defradb_client_document_save.md +++ /dev/null @@ -1,42 +0,0 @@ -## defradb client document save - -Create or update a document. - -### Synopsis - -Create or update a document. - -Example: - defradb client document save --collection User --key bae-123 '{ "name": "Bob" }' - - -``` -defradb client document save --collection --key [flags] -``` - -### Options - -``` - -c, --collection string Collection name - -h, --help help for save - --key string Document key -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_document_update.md b/docs/cli/defradb_client_document_update.md deleted file mode 100644 index 3efc67ebf0..0000000000 --- a/docs/cli/defradb_client_document_update.md +++ /dev/null @@ -1,52 +0,0 @@ -## defradb client document update - -Update documents by key or filter. - -### Synopsis - -Update documents by key or filter. - -Example: - defradb client document update --collection User --key bae-123 '{ "name": "Bob" }' - -Example: update by filter - defradb client document update --collection User \ - --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' - -Example: update by keys - defradb client document update --collection User \ - --key bae-123,bae-456 --updater '{ "verified": true }' - - -``` -defradb client document update --collection [--filter --key --updater ] [flags] -``` - -### Options - -``` - -c, --collection string Collection name - --filter string Document filter - -h, --help help for update - --key strings Document key - --updater string Document updater -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index 96b6418440..e434cb91cd 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -7,6 +7,7 @@ Creates a secondary index on a collection's field(s) Creates a secondary index on a collection's field(s). The --name flag is optional. If not provided, a name will be generated automatically. +The --unique flag is optional. If provided, the index will be unique. Example: create an index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name @@ -15,7 +16,7 @@ Example: create a named index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name --name UsersByName ``` -defradb client index create -c --collection --fields [-n --name ] [flags] +defradb client index create -c --collection --fields [-n --name ] [--unique] [flags] ``` ### Options @@ -25,6 +26,7 @@ defradb client index create -c --collection --fields [-n - --fields strings Fields to index -h, --help help for create -n, --name string Index name + -u, --unique Make the index unique ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_document_get.md b/docs/cli/defradb_client_view.md similarity index 63% rename from docs/cli/defradb_client_document_get.md rename to docs/cli/defradb_client_view.md index 600712ec0b..c3aaf4a69f 100644 --- a/docs/cli/defradb_client_document_get.md +++ b/docs/cli/defradb_client_view.md @@ -1,25 +1,15 @@ -## defradb client document get +## defradb client view -View detailed document info. +Manage views within a running DefraDB instance ### Synopsis -View detailed document info. - -Example: - defradb client document get --collection User bae-123 - - -``` -defradb client document get --collection [--show-deleted] [flags] -``` +Manage (add) views withing a running DefraDB instance ### Options ``` - -c, --collection string Collection name - -h, --help help for get - --show-deleted Show deleted documents + -h, --help help for view ``` ### Options inherited from parent commands @@ -38,5 +28,6 @@ defradb client document get --collection [--show-deleted] ### SEE ALSO -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. +* [defradb client](defradb_client.md) - Interact with a DefraDB node +* [defradb client view add](defradb_client_view_add.md) - Add new view diff --git a/docs/cli/defradb_client_document_keys.md b/docs/cli/defradb_client_view_add.md similarity index 66% rename from docs/cli/defradb_client_document_keys.md rename to docs/cli/defradb_client_view_add.md index e436f4df6b..caac7d862a 100644 --- a/docs/cli/defradb_client_document_keys.md +++ b/docs/cli/defradb_client_view_add.md @@ -1,24 +1,24 @@ -## defradb client document keys +## defradb client view add -List all collection document keys. +Add new view ### Synopsis -List all collection document keys. - -Example: - defradb client document keys --collection User keys - +Add new database view. + +Example: add from an argument string: + defradb client view add 'Foo { name, ...}' 'type Foo { ... }' + +Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network. ``` -defradb client document keys --collection [flags] +defradb client view add [query] [sdl] [flags] ``` ### Options ``` - -c, --collection string Collection name - -h, --help help for keys + -h, --help help for add ``` ### Options inherited from parent commands @@ -37,5 +37,5 @@ defradb client document keys --collection [flags] ### SEE ALSO -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. +* [defradb client view](defradb_client_view.md) - Manage views within a running DefraDB instance diff --git a/docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md b/docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md new file mode 100644 index 0000000000..bf6a541977 --- /dev/null +++ b/docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md @@ -0,0 +1,7 @@ +# Rename _key to _docID + +Rename from using older terminology of `_key` to `_docID`, the update is done to all documentation, and throughout the codebase. + +- All instances of `(k|K)ey(s|S)` and `(d|D)ockey(s|s)` should have been updated to use the term `(d|D)ocID(s)` instead. + +- Usage of id/ids argument has also been updated with docID/docIDs diff --git a/events/db_update.go b/events/db_update.go index d9479656a3..a6865b8707 100644 --- a/events/db_update.go +++ b/events/db_update.go @@ -25,7 +25,7 @@ var EmptyUpdateChannel = immutable.None[Channel[Update]]() // UpdateEvent represents a new DAG node added to the append-only MerkleCRDT Clock graph // of a document or sub-field. type Update struct { - DocKey string + DocID string Cid cid.Cid SchemaRoot string Block ipld.Node diff --git a/examples/request/user_creation.graphql b/examples/request/user_creation.graphql index 915285bb10..0cab4c6d45 100644 --- a/examples/request/user_creation.graphql +++ b/examples/request/user_creation.graphql @@ -1,5 +1,5 @@ mutation { create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key + _docID } -} \ No newline at end of file +} diff --git a/examples/request/user_query.graphql b/examples/request/user_query.graphql index ffdc86b259..5d38670c23 100644 --- a/examples/request/user_query.graphql +++ b/examples/request/user_query.graphql @@ -4,6 +4,6 @@ query { age verified points - _key + _docID } -} \ No newline at end of file +} diff --git a/http/client_collection.go b/http/client_collection.go index 35ca21ce4f..36b99cd9f2 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -62,9 +62,9 @@ func (c *Collection) Definition() client.CollectionDefinition { func (c *Collection) Create(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name) - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -90,9 +90,9 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er var docMapList []json.RawMessage for _, doc := range docs { - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -122,7 +122,7 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er } func (c *Collection) Update(ctx context.Context, doc *client.Document) error { - methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, doc.Key().String()) + methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, doc.ID().String()) body, err := doc.ToJSONPatch() if err != nil { @@ -141,7 +141,7 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { } func (c *Collection) Save(ctx context.Context, doc *client.Document) error { - _, err := c.Get(ctx, doc.Key(), true) + _, err := c.Get(ctx, doc.ID(), true) if err == nil { return c.Update(ctx, doc) } @@ -151,8 +151,8 @@ func (c *Collection) Save(ctx context.Context, doc *client.Document) error { return err } -func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { - methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docKey.String()) +func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docID.String()) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) if err != nil { @@ -165,8 +165,8 @@ func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, er return true, nil } -func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { - _, err := c.Get(ctx, docKey, false) +func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { + _, err := c.Get(ctx, docID, false) if err != nil { return false, err } @@ -177,10 +177,10 @@ func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) - case client.DocKey: - return c.UpdateWithKey(ctx, t, updater) - case []client.DocKey: - return c.UpdateWithKeys(ctx, t, updater) + case client.DocID: + return c.UpdateWithDocID(ctx, t, updater) + case []client.DocID: + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -218,28 +218,28 @@ func (c *Collection) UpdateWithFilter( }) } -func (c *Collection) UpdateWithKey( +func (c *Collection) UpdateWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { return c.updateWith(ctx, CollectionUpdateRequest{ - Key: key.String(), + DocID: docID.String(), Updater: updater, }) } -func (c *Collection) UpdateWithKeys( +func (c *Collection) UpdateWithDocIDs( ctx context.Context, - docKeys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { - var keys []string - for _, key := range docKeys { - keys = append(keys, key.String()) + var strDocIDs []string + for _, docID := range docIDs { + strDocIDs = append(strDocIDs, docID.String()) } return c.updateWith(ctx, CollectionUpdateRequest{ - Keys: keys, + DocIDs: strDocIDs, Updater: updater, }) } @@ -248,10 +248,10 @@ func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.Delete switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) - case client.DocKey: - return c.DeleteWithKey(ctx, t) - case []client.DocKey: - return c.DeleteWithKeys(ctx, t) + case client.DocID: + return c.DeleteWithDocID(ctx, t) + case []client.DocID: + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -284,29 +284,29 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. }) } -func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { return c.deleteWith(ctx, CollectionDeleteRequest{ - Key: docKey.String(), + DocID: docID.String(), }) } -func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { - var keys []string - for _, key := range docKeys { - keys = append(keys, key.String()) +func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { + var strDocIDs []string + for _, docID := range docIDs { + strDocIDs = append(strDocIDs, docID.String()) } return c.deleteWith(ctx, CollectionDeleteRequest{ - Keys: keys, + DocIDs: strDocIDs, }) } -func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { query := url.Values{} if showDeleted { query.Add("show_deleted", "true") } - methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, key.String()) + methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docID.String()) methodURL.RawQuery = query.Encode() req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -332,7 +332,7 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } } -func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -345,7 +345,7 @@ func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysRe if err != nil { return nil, err } - docKeyCh := make(chan client.DocKeysResult) + docIDCh := make(chan client.DocIDResult) go func() { eventReader := sse.NewReadCloser(res.Body) @@ -353,32 +353,32 @@ func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysRe // and body of the request are already // checked and it cannot be handled properly defer eventReader.Close() //nolint:errcheck - defer close(docKeyCh) + defer close(docIDCh) for { evt, err := eventReader.Next() if err != nil { return } - var res DocKeyResult + var res DocIDResult if err := json.Unmarshal(evt.Data, &res); err != nil { return } - key, err := client.NewDocKeyFromString(res.Key) + docID, err := client.NewDocIDFromString(res.DocID) if err != nil { return } - docKey := client.DocKeysResult{ - Key: key, + docIDResult := client.DocIDResult{ + ID: docID, } if res.Error != "" { - docKey.Err = fmt.Errorf(res.Error) + docIDResult.Err = fmt.Errorf(res.Error) } - docKeyCh <- docKey + docIDCh <- docIDResult } }() - return docKeyCh, nil + return docIDCh, nil } func (c *Collection) CreateIndex( diff --git a/http/errors.go b/http/errors.go index dae6a2d863..b78771723f 100644 --- a/http/errors.go +++ b/http/errors.go @@ -26,18 +26,8 @@ const ( // Errors returned from this package may be tested against these errors with errors.Is. var ( ErrNoListener = errors.New("cannot serve with no listener") - ErrSchema = errors.New("base must start with the http or https scheme") - ErrDatabaseNotAvailable = errors.New("no database available") - ErrFormNotSupported = errors.New("content type application/x-www-form-urlencoded not yet supported") - ErrBodyEmpty = errors.New("body cannot be empty") - ErrMissingGQLRequest = errors.New("missing GraphQL request") - ErrPeerIdUnavailable = errors.New("no PeerID available. P2P might be disabled") - ErrStreamingUnsupported = errors.New("streaming unsupported") ErrNoEmail = errors.New("email address must be specified for tls with autocert") - ErrPayloadFormat = errors.New("invalid payload format") - ErrMissingNewKey = errors.New("missing _newKey for imported doc") ErrInvalidRequestBody = errors.New("invalid request body") - ErrDocKeyDoesNotMatch = errors.New("document key does not match") ErrStreamingNotSupported = errors.New("streaming not supported") ErrMigrationNotFound = errors.New("migration not found") ErrMissingRequest = errors.New("missing request") diff --git a/http/handler_collection.go b/http/handler_collection.go index 69f08d7073..87a47e1ad2 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -26,14 +26,14 @@ import ( type collectionHandler struct{} type CollectionDeleteRequest struct { - Key string `json:"key"` - Keys []string `json:"keys"` + DocID string `json:"docID"` + DocIDs []string `json:"docIDs"` Filter any `json:"filter"` } type CollectionUpdateRequest struct { - Key string `json:"key"` - Keys []string `json:"keys"` + DocID string `json:"docID"` + DocIDs []string `json:"docIDs"` Filter any `json:"filter"` Updater string `json:"updater"` } @@ -101,29 +101,29 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request return } responseJSON(rw, http.StatusOK, result) - case request.Key != "": - docKey, err := client.NewDocKeyFromString(request.Key) + case request.DocID != "": + docID, err := client.NewDocIDFromString(request.DocID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.DeleteWith(req.Context(), docKey) + result, err := col.DeleteWith(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) - case request.Keys != nil: - var docKeys []client.DocKey - for _, key := range request.Keys { - docKey, err := client.NewDocKeyFromString(key) + case request.DocIDs != nil: + var docIDs []client.DocID + for _, docIDStr := range request.DocIDs { + docID, err := client.NewDocIDFromString(docIDStr) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - docKeys = append(docKeys, docKey) + docIDs = append(docIDs, docID) } - result, err := col.DeleteWith(req.Context(), docKeys) + result, err := col.DeleteWith(req.Context(), docIDs) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -151,29 +151,29 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request return } responseJSON(rw, http.StatusOK, result) - case request.Key != "": - docKey, err := client.NewDocKeyFromString(request.Key) + case request.DocID != "": + docID, err := client.NewDocIDFromString(request.DocID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.UpdateWith(req.Context(), docKey, request.Updater) + result, err := col.UpdateWith(req.Context(), docID, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) - case request.Keys != nil: - var docKeys []client.DocKey - for _, key := range request.Keys { - docKey, err := client.NewDocKeyFromString(key) + case request.DocIDs != nil: + var docIDs []client.DocID + for _, docIDStr := range request.DocIDs { + docID, err := client.NewDocIDFromString(docIDStr) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - docKeys = append(docKeys, docKey) + docIDs = append(docIDs, docID) } - result, err := col.UpdateWith(req.Context(), docKeys, request.Updater) + result, err := col.UpdateWith(req.Context(), docIDs, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -187,12 +187,12 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) - docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - doc, err := col.Get(req.Context(), docKey, true) + doc, err := col.Get(req.Context(), docID, true) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -217,12 +217,12 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) - docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - _, err = col.Delete(req.Context(), docKey) + _, err = col.Delete(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -234,12 +234,12 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) showDeleted, _ := strconv.ParseBool(req.URL.Query().Get("show_deleted")) - docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - doc, err := col.Get(req.Context(), docKey, showDeleted) + doc, err := col.Get(req.Context(), docID, showDeleted) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -252,12 +252,12 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusOK, docMap) } -type DocKeyResult struct { - Key string `json:"key"` +type DocIDResult struct { + DocID string `json:"docID"` Error string `json:"error"` } -func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) flusher, ok := rw.(http.Flusher) @@ -266,7 +266,7 @@ func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Requ return } - docKeyCh, err := col.GetAllDocKeys(req.Context()) + docIDsResult, err := col.GetAllDocIDs(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -279,12 +279,12 @@ func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Requ rw.WriteHeader(http.StatusOK) flusher.Flush() - for docKey := range docKeyCh { - results := &DocKeyResult{ - Key: docKey.Key.String(), + for docID := range docIDsResult { + results := &DocIDResult{ + DocID: docID.ID.String(), } - if docKey.Err != nil { - results.Error = docKey.Err.Error() + if docID.Err != nil { + results.Error = docID.Err.Error() } data, err := json.Marshal(results) if err != nil { @@ -478,7 +478,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { dropIndex.Responses["200"] = successResponse dropIndex.Responses["400"] = errorResponse - documentKeyPathParam := openapi3.NewPathParameter("key"). + documentIDPathParam := openapi3.NewPathParameter("docID"). WithRequired(true). WithSchema(openapi3.NewStringSchema()) @@ -487,51 +487,51 @@ func (h *collectionHandler) bindRoutes(router *Router) { WithJSONSchemaRef(documentSchema) collectionGet := openapi3.NewOperation() - collectionGet.Description = "Get a document by key" + collectionGet.Description = "Get a document by docID" collectionGet.OperationID = "collection_get" collectionGet.Tags = []string{"collection"} collectionGet.AddParameter(collectionNamePathParam) - collectionGet.AddParameter(documentKeyPathParam) + collectionGet.AddParameter(documentIDPathParam) collectionGet.AddResponse(200, collectionGetResponse) collectionGet.Responses["400"] = errorResponse collectionUpdate := openapi3.NewOperation() - collectionUpdate.Description = "Update a document by key" + collectionUpdate.Description = "Update a document by docID" collectionUpdate.OperationID = "collection_update" collectionUpdate.Tags = []string{"collection"} collectionUpdate.AddParameter(collectionNamePathParam) - collectionUpdate.AddParameter(documentKeyPathParam) + collectionUpdate.AddParameter(documentIDPathParam) collectionUpdate.Responses = make(openapi3.Responses) collectionUpdate.Responses["200"] = successResponse collectionUpdate.Responses["400"] = errorResponse collectionDelete := openapi3.NewOperation() - collectionDelete.Description = "Delete a document by key" + collectionDelete.Description = "Delete a document by docID" collectionDelete.OperationID = "collection_delete" collectionDelete.Tags = []string{"collection"} collectionDelete.AddParameter(collectionNamePathParam) - collectionDelete.AddParameter(documentKeyPathParam) + collectionDelete.AddParameter(documentIDPathParam) collectionDelete.Responses = make(openapi3.Responses) collectionDelete.Responses["200"] = successResponse collectionDelete.Responses["400"] = errorResponse collectionKeys := openapi3.NewOperation() collectionKeys.AddParameter(collectionNamePathParam) - collectionKeys.Description = "Get all document keys" + collectionKeys.Description = "Get all document IDs" collectionKeys.OperationID = "collection_keys" collectionKeys.Tags = []string{"collection"} collectionKeys.Responses = make(openapi3.Responses) collectionKeys.Responses["200"] = successResponse collectionKeys.Responses["400"] = errorResponse - router.AddRoute("/collections/{name}", http.MethodGet, collectionKeys, h.GetAllDocKeys) + router.AddRoute("/collections/{name}", http.MethodGet, collectionKeys, h.GetAllDocIDs) router.AddRoute("/collections/{name}", http.MethodPost, collectionCreate, h.Create) router.AddRoute("/collections/{name}", http.MethodPatch, collectionUpdateWith, h.UpdateWith) router.AddRoute("/collections/{name}", http.MethodDelete, collectionDeleteWith, h.DeleteWith) router.AddRoute("/collections/{name}/indexes", http.MethodPost, createIndex, h.CreateIndex) router.AddRoute("/collections/{name}/indexes", http.MethodGet, getIndexes, h.GetIndexes) router.AddRoute("/collections/{name}/indexes/{index}", http.MethodDelete, dropIndex, h.DropIndex) - router.AddRoute("/collections/{name}/{key}", http.MethodGet, collectionGet, h.Get) - router.AddRoute("/collections/{name}/{key}", http.MethodPatch, collectionUpdate, h.Update) - router.AddRoute("/collections/{name}/{key}", http.MethodDelete, collectionDelete, h.Delete) + router.AddRoute("/collections/{name}/{docID}", http.MethodGet, collectionGet, h.Get) + router.AddRoute("/collections/{name}/{docID}", http.MethodPatch, collectionUpdate, h.Update) + router.AddRoute("/collections/{name}/{docID}", http.MethodDelete, collectionDelete, h.Delete) } diff --git a/lens/fetcher.go b/lens/fetcher.go index 9186adbb7c..71f5b6243a 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -192,7 +192,7 @@ func encodedDocToLensDoc(doc fetcher.EncodedDocument) (LensDoc, error) { for field, fieldValue := range properties { docAsMap[field.Name] = fieldValue } - docAsMap[request.KeyFieldName] = string(doc.Key()) + docAsMap[request.DocIDFieldName] = string(doc.ID()) // Note: client.Document does not have a means of flagging as to whether it is // deleted or not, and, currently the fetcher does not ever returned deleted items @@ -207,7 +207,7 @@ func (f *lensedFetcher) lensDocToEncodedDoc(docAsMap LensDoc) (fetcher.EncodedDo properties := map[client.FieldDescription]any{} for fieldName, fieldByteValue := range docAsMap { - if fieldName == request.KeyFieldName { + if fieldName == request.DocIDFieldName { key = fieldByteValue.(string) continue } @@ -277,14 +277,14 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string } } - dockey, ok := original[request.KeyFieldName].(string) + docID, ok := original[request.DocIDFieldName].(string) if !ok { return core.ErrInvalidKey } datastoreKeyBase := core.DataStoreKey{ CollectionID: f.col.Description().IDString(), - DocKey: dockey, + DocID: docID, InstanceType: core.ValueKey, } @@ -326,7 +326,7 @@ type lensEncodedDocument struct { var _ fetcher.EncodedDocument = (*lensEncodedDocument)(nil) -func (encdoc *lensEncodedDocument) Key() []byte { +func (encdoc *lensEncodedDocument) ID() []byte { return encdoc.key } diff --git a/merkle/clock/clock_test.go b/merkle/clock/clock_test.go index a804165062..311d990952 100644 --- a/merkle/clock/clock_test.go +++ b/merkle/clock/clock_test.go @@ -17,6 +17,7 @@ import ( cid "github.com/ipfs/go-cid" ds "github.com/ipfs/go-datastore" + "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" ccid "github.com/sourcenetwork/defradb/core/cid" "github.com/sourcenetwork/defradb/core/crdt" @@ -32,7 +33,12 @@ func newTestMerkleClock() *MerkleClock { multistore := datastore.MultiStoreFrom(s) reg := crdt.NewLWWRegister(multistore.Rootstore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "") - return NewMerkleClock(multistore.Headstore(), multistore.DAGstore(), core.HeadStoreKey{DocKey: "dockey", FieldId: "1"}, reg).(*MerkleClock) + return NewMerkleClock( + multistore.Headstore(), + multistore.DAGstore(), + core.HeadStoreKey{DocID: request.DocIDArgName, FieldId: "1"}, + reg, + ).(*MerkleClock) } func TestNewMerkleClock(t *testing.T) { diff --git a/merkle/clock/heads_test.go b/merkle/clock/heads_test.go index a857571515..18db117ebb 100644 --- a/merkle/clock/heads_test.go +++ b/merkle/clock/heads_test.go @@ -45,7 +45,7 @@ func newHeadSet() *heads { return NewHeadSet( datastore.AsDSReaderWriter(s), - core.HeadStoreKey{}.WithDocKey("mydockey").WithFieldId("1"), + core.HeadStoreKey{}.WithDocID("myDocID").WithFieldId("1"), ) } diff --git a/net/client.go b/net/client.go index 947495c5e1..20c33e33fd 100644 --- a/net/client.go +++ b/net/client.go @@ -36,12 +36,12 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er log.Debug( ctx, "Preparing pushLog request", - logging.NewKV("DocKey", evt.DocKey), + logging.NewKV("DocID", evt.DocID), logging.NewKV("CID", evt.Cid), logging.NewKV("SchemaRoot", evt.SchemaRoot)) body := &pb.PushLogRequest_Body{ - DocKey: []byte(evt.DocKey), + DocID: []byte(evt.DocID), Cid: evt.Cid.Bytes(), SchemaRoot: []byte(evt.SchemaRoot), Creator: s.peer.host.ID().String(), @@ -55,7 +55,7 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er log.Debug( ctx, "Pushing log", - logging.NewKV("DocKey", evt.DocKey), + logging.NewKV("DocID", evt.DocID), logging.NewKV("CID", evt.Cid), logging.NewKV("PeerID", pid), ) @@ -72,7 +72,7 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er return NewErrPushLog( err, errors.NewKV("CID", evt.Cid), - errors.NewKV("DocKey", evt.DocKey), + errors.NewKV("DocID", evt.DocID), errors.NewKV("PeerID", pid), ) } diff --git a/net/client_test.go b/net/client_test.go index df07e00c34..7eba460b95 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -40,7 +40,7 @@ func TestPushlogWithDialFailure(t *testing.T) { ) err = n.server.pushLog(ctx, events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: cid, SchemaRoot: "test", Block: &EmptyNode{}, @@ -61,7 +61,7 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { require.NoError(t, err) err = n.server.pushLog(ctx, events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: cid, SchemaRoot: "test", Block: &EmptyNode{}, @@ -109,7 +109,7 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { require.NoError(t, err) err = n1.server.pushLog(ctx, events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: cid, SchemaRoot: col.SchemaRoot(), Block: &EmptyNode{}, diff --git a/net/dag.go b/net/dag.go index 1760864db4..f083904915 100644 --- a/net/dag.go +++ b/net/dag.go @@ -77,20 +77,20 @@ func (p *Peer) sendJobWorker() { return case newJob := <-p.sendJobs: - jobs, ok := docWorkerQueue[newJob.bp.dsKey.DocKey] + jobs, ok := docWorkerQueue[newJob.bp.dsKey.DocID] if !ok { jobs = make(chan *dagJob, numWorkers) for i := 0; i < numWorkers; i++ { go p.dagWorker(jobs) } - docWorkerQueue[newJob.bp.dsKey.DocKey] = jobs + docWorkerQueue[newJob.bp.dsKey.DocID] = jobs } jobs <- newJob - case dockey := <-p.closeJob: - if jobs, ok := docWorkerQueue[dockey]; ok { + case docID := <-p.closeJob: + if jobs, ok := docWorkerQueue[docID]; ok { close(jobs) - delete(docWorkerQueue, dockey) + delete(docWorkerQueue, docID) } } } diff --git a/net/dag_test.go b/net/dag_test.go index 6f0145b0ae..fc46b6a96c 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -62,7 +62,7 @@ func TestSendJobWorker_WithNewJob_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - dsKey := core.DataStoreKeyFromDocKey(doc.Key()) + dsKey := core.DataStoreKeyFromDocID(doc.ID()) txn, err := db.NewTxn(ctx, false) require.NoError(t, err) @@ -103,7 +103,7 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - dsKey := core.DataStoreKeyFromDocKey(doc.Key()) + dsKey := core.DataStoreKeyFromDocID(doc.ID()) txn, err := db.NewTxn(ctx, false) require.NoError(t, err) @@ -119,7 +119,7 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { }, } - n.closeJob <- dsKey.DocKey + n.closeJob <- dsKey.DocID n.Close() select { @@ -164,7 +164,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - dsKey := core.DataStoreKeyFromDocKey(doc.Key()) + dsKey := core.DataStoreKeyFromDocID(doc.ID()) err = col.Create(ctx, doc) require.NoError(t, err) diff --git a/net/doc.go b/net/doc.go index dd80ee53a4..57cd6cd7fc 100644 --- a/net/doc.go +++ b/net/doc.go @@ -13,7 +13,7 @@ /* Package net provides P2P network functions for the core DefraDB instance. -Notable design descision: all DocKeys (Documents) have their own respective PubSub topics. +Notable design descision: all DocIDs (Documents) have their own respective PubSub topics. The Peer object encapsulates an instanciated DB objects, libp2p host object, libp2p DAGService. Peer is responsible for storing all network related meta-data, maintaining open connections, pubsub mechanics, etc. diff --git a/net/errors.go b/net/errors.go index e9ac8fc748..1ca2d857d5 100644 --- a/net/errors.go +++ b/net/errors.go @@ -20,11 +20,11 @@ import ( const ( errPushLog = "failed to push log" - errFailedToGetDockey = "failed to get DocKey from broadcast message" - errPublishingToDockeyTopic = "can't publish log %s for dockey %s" + errFailedToGetDocID = "failed to get DocID from broadcast message" + errPublishingToDocIDTopic = "can't publish log %s for docID %s" errPublishingToSchemaTopic = "can't publish log %s for schema %s" errReplicatorExists = "replicator already exists for %s with peerID %s" - errReplicatorDocKey = "failed to get dockey for replicator %s with peerID %s" + errReplicatorDocID = "failed to get docID for replicator %s with peerID %s" errReplicatorCollections = "failed to get collections for replicator" ) @@ -41,24 +41,24 @@ func NewErrPushLog(inner error, kv ...errors.KV) error { return errors.Wrap(errPushLog, inner, kv...) } -func NewErrFailedToGetDockey(inner error, kv ...errors.KV) error { - return errors.Wrap(errFailedToGetDockey, inner, kv...) +func NewErrFailedToGetDocID(inner error, kv ...errors.KV) error { + return errors.Wrap(errFailedToGetDocID, inner, kv...) } -func NewErrPublishingToDockeyTopic(inner error, cid, key string, kv ...errors.KV) error { - return errors.Wrap(fmt.Sprintf(errPublishingToDockeyTopic, cid, key), inner, kv...) +func NewErrPublishingToDocIDTopic(inner error, cid, docID string, kv ...errors.KV) error { + return errors.Wrap(fmt.Sprintf(errPublishingToDocIDTopic, cid, docID), inner, kv...) } -func NewErrPublishingToSchemaTopic(inner error, cid, key string, kv ...errors.KV) error { - return errors.Wrap(fmt.Sprintf(errPublishingToSchemaTopic, cid, key), inner, kv...) +func NewErrPublishingToSchemaTopic(inner error, cid, docID string, kv ...errors.KV) error { + return errors.Wrap(fmt.Sprintf(errPublishingToSchemaTopic, cid, docID), inner, kv...) } func NewErrReplicatorExists(collection string, peerID peer.ID, kv ...errors.KV) error { return errors.New(fmt.Sprintf(errReplicatorExists, collection, peerID), kv...) } -func NewErrReplicatorDocKey(inner error, collection string, peerID peer.ID, kv ...errors.KV) error { - return errors.Wrap(fmt.Sprintf(errReplicatorDocKey, collection, peerID), inner, kv...) +func NewErrReplicatorDocID(inner error, collection string, peerID peer.ID, kv ...errors.KV) error { + return errors.Wrap(fmt.Sprintf(errReplicatorDocID, collection, peerID), inner, kv...) } func NewErrReplicatorCollections(inner error, kv ...errors.KV) error { diff --git a/net/pb/net.pb.go b/net/pb/net.pb.go index 92eaafa5be..a9b5a2162d 100644 --- a/net/pb/net.pb.go +++ b/net/pb/net.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.31.0 -// protoc v3.12.4 +// protoc v4.25.1 // source: net.proto package net_pb @@ -27,7 +27,7 @@ type Document struct { unknownFields protoimpl.UnknownFields // ID of the document. - DocKey []byte `protobuf:"bytes,1,opt,name=docKey,proto3" json:"docKey,omitempty"` + DocID []byte `protobuf:"bytes,1,opt,name=docID,proto3" json:"docID,omitempty"` // head of the log. Head []byte `protobuf:"bytes,4,opt,name=head,proto3" json:"head,omitempty"` } @@ -64,9 +64,9 @@ func (*Document) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{0} } -func (x *Document) GetDocKey() []byte { +func (x *Document) GetDocID() []byte { if x != nil { - return x.DocKey + return x.DocID } return nil } @@ -521,8 +521,8 @@ type PushLogRequest_Body struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // docKey is the DocKey of the document that is affected by the log. - DocKey []byte `protobuf:"bytes,1,opt,name=docKey,proto3" json:"docKey,omitempty"` + // docID is the ID of the document that is affected by the log. + DocID []byte `protobuf:"bytes,1,opt,name=docID,proto3" json:"docID,omitempty"` // cid is the CID of the composite of the document. Cid []byte `protobuf:"bytes,2,opt,name=cid,proto3" json:"cid,omitempty"` // schemaRoot is the SchemaRoot of the collection that the document resides in. @@ -565,9 +565,9 @@ func (*PushLogRequest_Body) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{7, 0} } -func (x *PushLogRequest_Body) GetDocKey() []byte { +func (x *PushLogRequest_Body) GetDocID() []byte { if x != nil { - return x.DocKey + return x.DocID } return nil } @@ -604,59 +604,59 @@ var File_net_proto protoreflect.FileDescriptor var file_net_proto_rawDesc = []byte{ 0x0a, 0x09, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x6e, 0x65, 0x74, - 0x2e, 0x70, 0x62, 0x22, 0x53, 0x0a, 0x08, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, - 0x16, 0x0a, 0x06, 0x64, 0x6f, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x06, 0x64, 0x6f, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x1a, 0x1b, 0x0a, 0x03, 0x4c, - 0x6f, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, - 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, - 0x0a, 0x10, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, - 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, - 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, - 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, - 0x0d, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xd6, - 0x01, 0x0a, 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, - 0x64, 0x79, 0x1a, 0x92, 0x01, 0x0a, 0x04, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x64, - 0x6f, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x6f, 0x63, - 0x4b, 0x65, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x03, 0x63, 0x69, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, - 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, - 0x26, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6e, - 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x4c, - 0x6f, 0x67, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, - 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, - 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, - 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, - 0xd1, 0x02, 0x0a, 0x07, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, - 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, - 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, + 0x2e, 0x70, 0x62, 0x22, 0x51, 0x0a, 0x08, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, + 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, + 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x1a, 0x1b, 0x0a, 0x03, 0x4c, 0x6f, 0x67, + 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x22, 0x00, 0x12, 0x48, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, - 0x70, 0x68, 0x12, 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, - 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, - 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, - 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, - 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, - 0x16, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, - 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, - 0x42, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, - 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, - 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, - 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, - 0x79, 0x22, 0x00, 0x42, 0x0a, 0x5a, 0x08, 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, 0x68, 0x44, + 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, 0x0a, 0x0d, + 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0d, 0x0a, + 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xd4, 0x01, 0x0a, + 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, + 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, + 0x1a, 0x90, 0x01, 0x0a, 0x04, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, + 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, + 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x63, 0x69, + 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, + 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x26, 0x0a, 0x03, 0x6c, + 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, + 0x62, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x03, + 0x6c, 0x6f, 0x67, 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, + 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x48, + 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0xd1, 0x02, 0x0a, 0x07, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, + 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, + 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, + 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x48, + 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1b, + 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, + 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6e, 0x65, + 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4c, + 0x6f, 0x67, 0x12, 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, + 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6e, 0x65, 0x74, 0x2e, + 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, + 0x12, 0x39, 0x0a, 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x6e, 0x65, + 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, + 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x47, + 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, + 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, + 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, + 0x0a, 0x5a, 0x08, 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( diff --git a/net/pb/net.proto b/net/pb/net.proto index 45c38bb256..5b0ee35dfb 100644 --- a/net/pb/net.proto +++ b/net/pb/net.proto @@ -6,7 +6,7 @@ option go_package = "/;net_pb"; // Log represents a thread log. message Document { // ID of the document. - bytes docKey = 1; + bytes docID = 1; // head of the log. bytes head = 4; @@ -33,8 +33,8 @@ message PushLogRequest { Body body = 1; message Body { - // docKey is the DocKey of the document that is affected by the log. - bytes docKey = 1; + // docID is the ID of the document that is affected by the log. + bytes docID = 1; // cid is the CID of the composite of the document. bytes cid = 2; // schemaRoot is the SchemaRoot of the collection that the document resides in. diff --git a/net/pb/net_grpc.pb.go b/net/pb/net_grpc.pb.go index bad62cdad7..75ae790ab6 100644 --- a/net/pb/net_grpc.pb.go +++ b/net/pb/net_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.3.0 -// - protoc v3.12.4 +// - protoc v4.25.1 // source: net.proto package net_pb diff --git a/net/pb/net_vtproto.pb.go b/net/pb/net_vtproto.pb.go index ae28bba13d..2bae8f83f3 100644 --- a/net/pb/net_vtproto.pb.go +++ b/net/pb/net_vtproto.pb.go @@ -95,10 +95,10 @@ func (m *Document) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i-- dAtA[i] = 0x22 } - if len(m.DocKey) > 0 { - i -= len(m.DocKey) - copy(dAtA[i:], m.DocKey) - i = encodeVarint(dAtA, i, uint64(len(m.DocKey))) + if len(m.DocID) > 0 { + i -= len(m.DocID) + copy(dAtA[i:], m.DocID) + i = encodeVarint(dAtA, i, uint64(len(m.DocID))) i-- dAtA[i] = 0xa } @@ -364,10 +364,10 @@ func (m *PushLogRequest_Body) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i-- dAtA[i] = 0x12 } - if len(m.DocKey) > 0 { - i -= len(m.DocKey) - copy(dAtA[i:], m.DocKey) - i = encodeVarint(dAtA, i, uint64(len(m.DocKey))) + if len(m.DocID) > 0 { + i -= len(m.DocID) + copy(dAtA[i:], m.DocID) + i = encodeVarint(dAtA, i, uint64(len(m.DocID))) i-- dAtA[i] = 0xa } @@ -547,7 +547,7 @@ func (m *Document) SizeVT() (n int) { } var l int _ = l - l = len(m.DocKey) + l = len(m.DocID) if l > 0 { n += 1 + l + sov(uint64(l)) } @@ -625,7 +625,7 @@ func (m *PushLogRequest_Body) SizeVT() (n int) { } var l int _ = l - l = len(m.DocKey) + l = len(m.DocID) if l > 0 { n += 1 + l + sov(uint64(l)) } @@ -815,7 +815,7 @@ func (m *Document) UnmarshalVT(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DocKey", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field DocID", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { @@ -842,9 +842,9 @@ func (m *Document) UnmarshalVT(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.DocKey = append(m.DocKey[:0], dAtA[iNdEx:postIndex]...) - if m.DocKey == nil { - m.DocKey = []byte{} + m.DocID = append(m.DocID[:0], dAtA[iNdEx:postIndex]...) + if m.DocID == nil { + m.DocID = []byte{} } iNdEx = postIndex case 4: @@ -1240,7 +1240,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DocKey", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field DocID", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { @@ -1267,9 +1267,9 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.DocKey = append(m.DocKey[:0], dAtA[iNdEx:postIndex]...) - if m.DocKey == nil { - m.DocKey = []byte{} + m.DocID = append(m.DocID[:0], dAtA[iNdEx:postIndex]...) + if m.DocID == nil { + m.DocID = []byte{} } iNdEx = postIndex case 2: diff --git a/net/peer.go b/net/peer.go index 2e702584a8..acdba2e9c8 100644 --- a/net/peer.go +++ b/net/peer.go @@ -64,7 +64,7 @@ type Peer struct { p2pRPC *grpc.Server // rpc server over the P2P network // Used to close the dagWorker pool for a given document. - // The string represents a dockey. + // The string represents a docID. closeJob chan string sendJobs chan *dagJob @@ -266,7 +266,7 @@ func (p *Peer) handleBroadcastLoop() { // RegisterNewDocument registers a new document with the peer node. func (p *Peer) RegisterNewDocument( ctx context.Context, - dockey client.DocKey, + docID client.DocID, c cid.Cid, nd ipld.Node, schemaRoot string, @@ -274,23 +274,23 @@ func (p *Peer) RegisterNewDocument( log.Debug( p.ctx, "Registering a new document for our peer node", - logging.NewKV("DocKey", dockey.String()), + logging.NewKV("DocID", docID.String()), ) // register topic - if err := p.server.addPubSubTopic(dockey.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { + if err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { log.ErrorE( p.ctx, "Failed to create new pubsub topic", err, - logging.NewKV("DocKey", dockey.String()), + logging.NewKV("DocID", docID.String()), ) return err } // publish log body := &pb.PushLogRequest_Body{ - DocKey: []byte(dockey.String()), + DocID: []byte(docID.String()), Cid: c.Bytes(), SchemaRoot: []byte(schemaRoot), Creator: p.host.ID().String(), @@ -309,18 +309,18 @@ func (p *Peer) pushToReplicator( ctx context.Context, txn datastore.Txn, collection client.Collection, - keysCh <-chan client.DocKeysResult, + docIDsCh <-chan client.DocIDResult, pid peer.ID, ) { - for key := range keysCh { - if key.Err != nil { - log.ErrorE(ctx, "Key channel error", key.Err) + for docIDResult := range docIDsCh { + if docIDResult.Err != nil { + log.ErrorE(ctx, "Key channel error", docIDResult.Err) continue } - dockey := core.DataStoreKeyFromDocKey(key.Key) + docID := core.DataStoreKeyFromDocID(docIDResult.ID) headset := clock.NewHeadSet( txn.Headstore(), - dockey.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), + docID.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), ) cids, priority, err := headset.List(ctx) if err != nil { @@ -328,7 +328,7 @@ func (p *Peer) pushToReplicator( ctx, "Failed to get heads", err, - logging.NewKV("DocKey", key.Key.String()), + logging.NewKV("DocID", docIDResult.ID.String()), logging.NewKV("PeerID", pid), logging.NewKV("Collection", collection.Name())) continue @@ -352,7 +352,7 @@ func (p *Peer) pushToReplicator( } evt := events.Update{ - DocKey: key.Key.String(), + DocID: docIDResult.ID.String(), Cid: c, SchemaRoot: collection.SchemaRoot(), Block: nd, @@ -420,14 +420,14 @@ func (p *Peer) loadP2PCollections(ctx context.Context) (map[string]struct{}, err } func (p *Peer) handleDocCreateLog(evt events.Update) error { - dockey, err := client.NewDocKeyFromString(evt.DocKey) + docID, err := client.NewDocIDFromString(evt.DocID) if err != nil { - return NewErrFailedToGetDockey(err) + return NewErrFailedToGetDocID(err) } // We need to register the document before pushing to the replicators if we want to // ensure that we have subscribed to the topic. - err = p.RegisterNewDocument(p.ctx, dockey, evt.Cid, evt.Block, evt.SchemaRoot) + err = p.RegisterNewDocument(p.ctx, docID, evt.Cid, evt.Block, evt.SchemaRoot) if err != nil { return err } @@ -438,19 +438,19 @@ func (p *Peer) handleDocCreateLog(evt events.Update) error { } func (p *Peer) handleDocUpdateLog(evt events.Update) error { - dockey, err := client.NewDocKeyFromString(evt.DocKey) + docID, err := client.NewDocIDFromString(evt.DocID) if err != nil { - return NewErrFailedToGetDockey(err) + return NewErrFailedToGetDocID(err) } log.Debug( p.ctx, "Preparing pubsub pushLog request from broadcast", - logging.NewKV("DocKey", dockey), + logging.NewKV("DocID", docID), logging.NewKV("CID", evt.Cid), logging.NewKV("SchemaRoot", evt.SchemaRoot)) body := &pb.PushLogRequest_Body{ - DocKey: []byte(dockey.String()), + DocID: []byte(docID.String()), Cid: evt.Cid.Bytes(), SchemaRoot: []byte(evt.SchemaRoot), Creator: p.host.ID().String(), @@ -465,8 +465,8 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { // push to each peer (replicator) p.pushLogToReplicators(p.ctx, evt) - if err := p.server.publishLog(p.ctx, evt.DocKey, req); err != nil { - return NewErrPublishingToDockeyTopic(err, evt.Cid.String(), evt.DocKey) + if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { + return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) } if err := p.server.publishLog(p.ctx, evt.SchemaRoot, req); err != nil { @@ -479,7 +479,7 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { // push to each peer (replicator) peers := make(map[string]struct{}) - for _, peer := range p.ps.ListPeers(lg.DocKey) { + for _, peer := range p.ps.ListPeers(lg.DocID) { peers[peer.String()] = struct{}{} } for _, peer := range p.ps.ListPeers(lg.SchemaRoot) { @@ -503,7 +503,7 @@ func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { p.ctx, "Failed pushing log", err, - logging.NewKV("DocKey", lg.DocKey), + logging.NewKV("DocID", lg.DocID), logging.NewKV("CID", lg.Cid), logging.NewKV("PeerID", peerID)) } diff --git a/net/peer_collection.go b/net/peer_collection.go index 58f83f7aa8..02bbb6e9a6 100644 --- a/net/peer_collection.go +++ b/net/peer_collection.go @@ -65,16 +65,16 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er // from the pubsub topics to avoid receiving duplicate events. removedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocKeys(p.ctx) + keyChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return err } for key := range keyChan { - err := p.server.removePubSubTopic(key.Key.String()) + err := p.server.removePubSubTopic(key.ID.String()) if err != nil { return p.rollbackRemovePubSubTopics(removedTopics, err) } - removedTopics = append(removedTopics, key.Key.String()) + removedTopics = append(removedTopics, key.ID.String()) } } @@ -130,16 +130,16 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) // to the pubsub topics. addedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocKeys(p.ctx) + keyChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return err } for key := range keyChan { - err := p.server.addPubSubTopic(key.Key.String(), true) + err := p.server.addPubSubTopic(key.ID.String(), true) if err != nil { return p.rollbackAddPubSubTopics(addedTopics, err) } - addedTopics = append(addedTopics, key.Key.String()) + addedTopics = append(addedTopics, key.ID.String()) } } diff --git a/net/peer_replicator.go b/net/peer_replicator.go index c444dee58f..0506e018c4 100644 --- a/net/peer_replicator.go +++ b/net/peer_replicator.go @@ -92,9 +92,9 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { // push all collection documents to the replicator peer for _, col := range added { - keysCh, err := col.WithTxn(txn).GetAllDocKeys(ctx) + keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx) if err != nil { - return NewErrReplicatorDocKey(err, col.Name(), rep.Info.ID) + return NewErrReplicatorDocID(err, col.Name(), rep.Info.ID) } p.pushToReplicator(ctx, txn, col, keysCh, rep.Info.ID) } diff --git a/net/peer_test.go b/net/peer_test.go index cdbc4581dc..780ae74e35 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -187,7 +187,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { ) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, ps, h.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, ps, h.ID(), doc.ID().String(), true) require.NoError(t, err) _, err = NewPeer(ctx, db, h, nil, ps, nil, nil) @@ -341,7 +341,7 @@ func TestRegisterNewDocument_NoError(t *testing.T) { cid, err := createCID(doc) require.NoError(t, err) - err = n.RegisterNewDocument(ctx, doc.Key(), cid, &EmptyNode{}, col.SchemaRoot()) + err = n.RegisterNewDocument(ctx, doc.ID(), cid, &EmptyNode{}, col.SchemaRoot()) require.NoError(t, err) } @@ -362,13 +362,13 @@ func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) require.NoError(t, err) cid, err := createCID(doc) require.NoError(t, err) - err = n.RegisterNewDocument(ctx, doc.Key(), cid, &EmptyNode{}, col.SchemaRoot()) + err = n.RegisterNewDocument(ctx, doc.ID(), cid, &EmptyNode{}, col.SchemaRoot()) require.Equal(t, err.Error(), "creating topic: joining topic: topic already exists") } @@ -482,7 +482,7 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test err = col.Create(ctx, doc) require.NoError(t, err) - keysCh, err := col.GetAllDocKeys(ctx) + keysCh, err := col.GetAllDocIDs(ctx) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -805,14 +805,14 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) require.NoError(t, err) err = n.handleDocCreateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, @@ -821,15 +821,15 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { require.NoError(t, err) } -func TestHandleDocCreateLog_WithInvalidDockey_NoError(t *testing.T) { +func TestHandleDocCreateLog_WithInvalidDocID_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) defer n.Close() err := n.handleDocCreateLog(events.Update{ - DocKey: "some-invalid-key", + DocID: "some-invalid-key", }) - require.ErrorContains(t, err, "failed to get DocKey from broadcast message: selected encoding not supported") + require.ErrorContains(t, err, "failed to get DocID from broadcast message: selected encoding not supported") } func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { @@ -852,11 +852,11 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { err = col.Create(ctx, doc) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.ID().String(), true) require.NoError(t, err) err = n.handleDocCreateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), SchemaRoot: col.SchemaRoot(), }) require.ErrorContains(t, err, "topic already exists") @@ -888,14 +888,14 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) require.NoError(t, err) err = n.handleDocUpdateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, @@ -904,18 +904,18 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { require.NoError(t, err) } -func TestHandleDoUpdateLog_WithInvalidDockey_NoError(t *testing.T) { +func TestHandleDoUpdateLog_WithInvalidDocID_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) defer n.Close() err := n.handleDocUpdateLog(events.Update{ - DocKey: "some-invalid-key", + DocID: "some-invalid-key", }) - require.ErrorContains(t, err, "failed to get DocKey from broadcast message: selected encoding not supported") + require.ErrorContains(t, err, "failed to get DocID from broadcast message: selected encoding not supported") } -func TestHandleDocUpdateLog_WithExistingDockeyTopic_TopicExistsError(t *testing.T) { +func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) defer n.Close() @@ -941,17 +941,17 @@ func TestHandleDocUpdateLog_WithExistingDockeyTopic_TopicExistsError(t *testing. delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.ID().String(), true) require.NoError(t, err) err = n.handleDocUpdateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, @@ -985,7 +985,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) @@ -995,7 +995,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. require.NoError(t, err) err = n.handleDocUpdateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, diff --git a/net/process.go b/net/process.go index 3d776cc1c1..38a5a077fb 100644 --- a/net/process.go +++ b/net/process.go @@ -69,7 +69,7 @@ func (bp *blockProcessor) mergeBlocks(ctx context.Context) { ctx, "Failed to process block", err, - logging.NewKV("DocKey", bp.dsKey.DocKey), + logging.NewKV("DocID", bp.dsKey.DocID), logging.NewKV("CID", nd.Cid()), ) } @@ -111,7 +111,7 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field ctx, "Failed to process block", err, - logging.NewKV("DocKey", bp.dsKey.DocKey), + logging.NewKV("DocID", bp.dsKey.DocID), logging.NewKV("CID", nd.Cid()), ) } @@ -132,7 +132,7 @@ func initCRDTForType( description := col.Description() if field == "" { // empty field name implies composite type ctype = client.COMPOSITE - key = base.MakeCollectionKey( + key = base.MakeDataStoreKeyWithCollectionDescription( description, ).WithInstanceInfo( dsKey, @@ -155,7 +155,7 @@ func initCRDTForType( } ctype = fd.Typ fieldID := fd.ID.String() - key = base.MakeCollectionKey(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) + key = base.MakeDataStoreKeyWithCollectionDescription(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) return merklecrdt.NewMerkleLWWRegister( diff --git a/net/server.go b/net/server.go index b770e3cf2b..e93000d1b9 100644 --- a/net/server.go +++ b/net/server.go @@ -94,8 +94,8 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) return nil, err } - // Get all DocKeys across all collections in the DB - log.Debug(p.ctx, "Getting all existing DocKey...") + // Get all DocIDs across all collections in the DB + log.Debug(p.ctx, "Getting all existing DocIDs...") cols, err := s.db.GetAllCollections(s.peer.ctx) if err != nil { return nil, err @@ -103,28 +103,28 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) i := 0 for _, col := range cols { - // If we subscribed to the collection, we skip subscribing to the collection's dockeys. + // If we subscribed to the collection, we skip subscribing to the collection's docIDs. if _, ok := colMap[col.SchemaRoot()]; ok { continue } - keyChan, err := col.GetAllDocKeys(p.ctx) + docIDChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return nil, err } - for key := range keyChan { + for docID := range docIDChan { log.Debug( p.ctx, - "Registering existing DocKey pubsub topic", - logging.NewKV("DocKey", key.Key.String()), + "Registering existing DocID pubsub topic", + logging.NewKV("DocID", docID.ID.String()), ) - if err := s.addPubSubTopic(key.Key.String(), true); err != nil { + if err := s.addPubSubTopic(docID.ID.String(), true); err != nil { return nil, err } i++ } } - log.Debug(p.ctx, "Finished registering all DocKey pubsub topics", logging.NewKV("Count", i)) + log.Debug(p.ctx, "Finished registering all DocID pubsub topics", logging.NewKV("Count", i)) } var err error @@ -166,29 +166,29 @@ type docQueue struct { mu sync.Mutex } -// add adds a docKey to the queue. If the docKey is already in the queue, it will -// wait for the docKey to be removed from the queue. For every add call, done must -// be called to remove the docKey from the queue. Otherwise, subsequent add calls will +// add adds a docID to the queue. If the docID is already in the queue, it will +// wait for the docID to be removed from the queue. For every add call, done must +// be called to remove the docID from the queue. Otherwise, subsequent add calls will // block forever. -func (dq *docQueue) add(docKey string) { +func (dq *docQueue) add(docID string) { dq.mu.Lock() - done, ok := dq.docs[docKey] + done, ok := dq.docs[docID] if !ok { - dq.docs[docKey] = make(chan struct{}) + dq.docs[docID] = make(chan struct{}) } dq.mu.Unlock() if ok { <-done - dq.add(docKey) + dq.add(docID) } } -func (dq *docQueue) done(docKey string) { +func (dq *docQueue) done(docID string) { dq.mu.Lock() defer dq.mu.Unlock() - done, ok := dq.docs[docKey] + done, ok := dq.docs[docID] if ok { - delete(dq.docs, docKey) + delete(dq.docs, docID) close(done) } } @@ -205,14 +205,14 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if err != nil { return nil, err } - dockey, err := client.NewDocKeyFromString(string(req.Body.DocKey)) + docID, err := client.NewDocIDFromString(string(req.Body.DocID)) if err != nil { return nil, err } - s.docQueue.add(dockey.String()) + s.docQueue.add(docID.String()) defer func() { - s.docQueue.done(dockey.String()) + s.docQueue.done(docID.String()) if s.pushLogEmitter != nil { byPeer, err := libpeer.Decode(req.Body.Creator) if err != nil { @@ -247,7 +247,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL } schemaRoot := string(req.Body.SchemaRoot) - dsKey := core.DataStoreKeyFromDocKey(dockey) + dsKey := core.DataStoreKeyFromDocID(docID) var txnErr error for retry := 0; retry < s.peer.db.MaxTxnRetries(); retry++ { @@ -292,17 +292,17 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL ctx, "Failed to process remote block", err, - logging.NewKV("DocKey", dsKey.DocKey), + logging.NewKV("DocID", dsKey.DocID), logging.NewKV("CID", cid), ) } session.Wait() bp.mergeBlocks(ctx) - // dagWorkers specific to the dockey will have been spawned within handleChildBlocks. + // dagWorkers specific to the DocID will have been spawned within handleChildBlocks. // Once we are done with the dag syncing process, we can get rid of those workers. if s.peer.closeJob != nil { - s.peer.closeJob <- dsKey.DocKey + s.peer.closeJob <- dsKey.DocID } if txnErr = txn.Commit(ctx); txnErr != nil { @@ -312,10 +312,10 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return &pb.PushLogReply{}, txnErr } - // Once processed, subscribe to the dockey topic on the pubsub network unless we already + // Once processed, subscribe to the DocID topic on the pubsub network unless we already // suscribe to the collection. if !s.hasPubSubTopic(col.SchemaRoot()) { - err = s.addPubSubTopic(dsKey.DocKey, true) + err = s.addPubSubTopic(dsKey.DocID, true) if err != nil { return nil, err } @@ -441,7 +441,7 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe ctx, "Published log", logging.NewKV("CID", cid), - logging.NewKV("DocKey", topic), + logging.NewKV("DocID", topic), ) return nil } @@ -470,7 +470,7 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) return nil, nil } -// pubSubEventHandler logs events from the subscribed dockey topics. +// pubSubEventHandler logs events from the subscribed DocID topics. func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { log.Info( s.peer.ctx, diff --git a/net/server_test.go b/net/server_test.go index 6b5c3a3e20..521a3b7634 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -81,11 +81,11 @@ func TestNewServerWithCollectionSubscribed(t *testing.T) { require.NoError(t, err) } -type mockDBDockeysError struct { +type mockDBDocIDsError struct { client.DB } -func (mDB *mockDBDockeysError) GetAllCollections(context.Context) ([]client.Collection, error) { +func (mDB *mockDBDocIDsError) GetAllCollections(context.Context) ([]client.Collection, error) { return []client.Collection{ &mockCollection{}, }, nil @@ -98,11 +98,11 @@ type mockCollection struct { func (mCol *mockCollection) SchemaRoot() string { return "mockColID" } -func (mCol *mockCollection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (mCol *mockCollection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { return nil, mockError } -func TestNewServerWithGetAllDockeysError(t *testing.T) { +func TestNewServerWithGetAllDocIDsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) @@ -112,7 +112,7 @@ func TestNewServerWithGetAllDockeysError(t *testing.T) { }`) require.NoError(t, err) - mDB := mockDBDockeysError{db} + mDB := mockDBDocIDsError{db} _, err = newServer(n.Peer, &mDB) require.ErrorIs(t, err, mockError) @@ -137,7 +137,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { err = col.Create(ctx, doc) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) require.NoError(t, err) _, err = newServer(n.Peer, db) @@ -226,20 +226,20 @@ func TestDocQueue(t *testing.T) { docs: make(map[string]chan struct{}), } - testKey := "test" + testDocID := "test" - q.add(testKey) - go q.add(testKey) + q.add(testDocID) + go q.add(testDocID) // give time for the goroutine to block time.Sleep(10 * time.Millisecond) require.Len(t, q.docs, 1) - q.done(testKey) - // give time for the goroutine to add the key + q.done(testDocID) + // give time for the goroutine to add the docID time.Sleep(10 * time.Millisecond) q.mu.Lock() require.Len(t, q.docs, 1) q.mu.Unlock() - q.done(testKey) + q.done(testDocID) q.mu.Lock() require.Len(t, q.docs, 0) q.mu.Unlock() @@ -274,7 +274,7 @@ func TestPushLog(t *testing.T) { _, err = n.server.PushLog(ctx, &net_pb.PushLogRequest{ Body: &net_pb.PushLogRequest_Body{ - DocKey: []byte(doc.Key().String()), + DocID: []byte(doc.ID().String()), Cid: cid.Bytes(), SchemaRoot: []byte(col.SchemaRoot()), Creator: n.PeerID().String(), diff --git a/planner/commit.go b/planner/commit.go index b4fd3ed3c1..3caf6d2f4a 100644 --- a/planner/commit.go +++ b/planner/commit.go @@ -68,15 +68,15 @@ func (n *dagScanNode) Kind() string { func (n *dagScanNode) Init() error { if len(n.spans.Value) == 0 { - if n.commitSelect.DocKey.HasValue() { - key := core.DataStoreKey{}.WithDocKey(n.commitSelect.DocKey.Value()) + if n.commitSelect.DocID.HasValue() { + dsKey := core.DataStoreKey{}.WithDocID(n.commitSelect.DocID.Value()) if n.commitSelect.FieldID.HasValue() { field := n.commitSelect.FieldID.Value() - key = key.WithFieldId(field) + dsKey = dsKey.WithFieldId(field) } - n.spans = core.NewSpans(core.NewSpan(key, key.PrefixEnd())) + n.spans = core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) } } @@ -89,9 +89,9 @@ func (n *dagScanNode) Start() error { // Spans needs to parse the given span set. dagScanNode only // cares about the first value in the span set. The value is -// either a CID or a DocKey. +// either a CID or a DocID. // If its a CID, set the node CID val -// if its a DocKey, set the node Key val (headset) +// if its a DocID, set the node Key val (headset) func (n *dagScanNode) Spans(spans core.Spans) { if len(spans.Value) == 0 { return @@ -291,7 +291,7 @@ All the dagScanNode endpoints use similar structures func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.Link, error) { commit := n.commitSelect.DocumentMapping.NewDoc() cid := block.Cid() - n.commitSelect.DocumentMapping.SetFirstOfName(&commit, "cid", cid.String()) + n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.CidFieldName, cid.String()) // decode the delta, get the priority and payload nd, err := dag.DecodeProtobuf(block.RawData()) @@ -305,18 +305,18 @@ func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.L return core.Doc{}, nil, err } - prio, ok := delta["Priority"].(uint64) + prio, ok := delta[request.DeltaArgPriority].(uint64) if !ok { return core.Doc{}, nil, ErrDeltaMissingPriority } - schemaVersionId, ok := delta["SchemaVersionID"].(string) + schemaVersionId, ok := delta[request.DeltaArgSchemaVersionID].(string) if !ok { return core.Doc{}, nil, ErrDeltaMissingSchemaVersionID } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.SchemaVersionIDFieldName, schemaVersionId) - fieldName, ok := delta["FieldName"] + fieldName, ok := delta[request.DeltaArgFieldName] if !ok { return core.Doc{}, nil, ErrDeltaMissingFieldName } @@ -346,17 +346,17 @@ func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.L } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.HeightFieldName, int64(prio)) - n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.DeltaFieldName, delta["Data"]) + n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.DeltaFieldName, request.DeltaArgData) n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.FieldNameFieldName, fieldName) n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.FieldIDFieldName, fieldID) - dockey, ok := delta["DocKey"].([]byte) + docID, ok := delta[request.DeltaArgDocID].([]byte) if !ok { - return core.Doc{}, nil, ErrDeltaMissingDockey + return core.Doc{}, nil, ErrDeltaMissingDocID } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, - request.DockeyFieldName, string(dockey)) + request.DocIDArgName, string(docID)) cols, err := n.planner.db.GetCollectionsByVersionID(n.planner.ctx, schemaVersionId) if err != nil { diff --git a/planner/create.go b/planner/create.go index 618591ccfe..c8c48b658d 100644 --- a/planner/create.go +++ b/planner/create.go @@ -86,7 +86,7 @@ func (n *createNode) Next() (bool, error) { currentValue := n.documentMapping.NewDoc() - currentValue.SetKey(n.doc.Key().String()) + currentValue.SetID(n.doc.ID().String()) for i, value := range n.doc.Values() { if len(n.documentMapping.IndexesByName[i.Name()]) > 0 { n.documentMapping.SetFirstOfName(¤tValue, i.Name(), value.Value()) @@ -101,8 +101,8 @@ func (n *createNode) Next() (bool, error) { n.currentValue = currentValue desc := n.collection.Description() - docKey := base.MakeDocKey(desc, currentValue.GetKey()) - n.results.Spans(core.NewSpans(core.NewSpan(docKey, docKey.PrefixEnd()))) + docID := base.MakeDataStoreKeyWithCollectionAndDocID(desc, currentValue.GetID()) + n.results.Spans(core.NewSpans(core.NewSpan(docID, docID.PrefixEnd()))) err := n.results.Init() if err != nil { diff --git a/planner/delete.go b/planner/delete.go index de59cf30b7..b1096ffdb6 100644 --- a/planner/delete.go +++ b/planner/delete.go @@ -27,7 +27,7 @@ type deleteNode struct { source planNode filter *mapper.Filter - ids []string + docIDs []string execInfo deleteExecInfo } @@ -49,11 +49,11 @@ func (n *deleteNode) Next() (bool, error) { } n.currentValue = n.source.Value() - key, err := client.NewDocKeyFromString(n.currentValue.GetKey()) + docID, err := client.NewDocIDFromString(n.currentValue.GetID()) if err != nil { return false, err } - _, err = n.collection.DeleteWithKey(n.p.ctx, key) + _, err = n.collection.DeleteWithDocID(n.p.ctx, docID) if err != nil { return false, err } @@ -88,7 +88,7 @@ func (n *deleteNode) simpleExplain() (map[string]any, error) { simpleExplainMap := map[string]any{} // Add the document id(s) that request wants to delete. - simpleExplainMap[idsLabel] = n.ids + simpleExplainMap[request.DocIDsArgName] = n.docIDs // Add the filter attribute if it exists, otherwise have it nil. if n.filter == nil { @@ -131,7 +131,7 @@ func (p *Planner) DeleteDocs(parsed *mapper.Mutation) (planNode, error) { return &deleteNode{ p: p, filter: parsed.Filter, - ids: parsed.DocKeys.Value(), + docIDs: parsed.DocIDs.Value(), collection: col.WithTxn(p.txn), source: slctNode, docMapper: docMapper{parsed.DocumentMapping}, diff --git a/planner/errors.go b/planner/errors.go index c4856178f3..54db7a7c79 100644 --- a/planner/errors.go +++ b/planner/errors.go @@ -20,9 +20,9 @@ const ( ) var ( - ErrDeltaMissingSchemaVersionID = errors.New("commit Delta missing schema version id") + ErrDeltaMissingSchemaVersionID = errors.New("commit Delta missing schema version ID") ErrDeltaMissingPriority = errors.New("commit Delta missing priority key") - ErrDeltaMissingDockey = errors.New("commit Delta missing dockey") + ErrDeltaMissingDocID = errors.New("commit Delta missing document ID") ErrDeltaMissingFieldName = errors.New("commit Delta missing field name") ErrFailedToFindScanNode = errors.New("failed to find original scan node in plan graph") ErrMissingQueryOrMutation = errors.New("request is missing query or mutation operation statements") @@ -34,9 +34,6 @@ var ( ErrMissingChildValue = errors.New("expected child value, however none was yielded") ErrUnknownRelationType = errors.New("failed sub selection, unknown relation type") ErrUnknownExplainRequestType = errors.New("can not explain request of unknown type") - ErrSubTypeInit = errors.New(errSubTypeInit) - ErrFailedToCollectExecExplainInfo = errors.New(errFailedToCollectExecExplainInfo) - ErrUnknownDependency = errors.New(errUnknownDependency) ) func NewErrUnknownDependency(name string) error { diff --git a/planner/explain.go b/planner/explain.go index 07f96f9b0a..76e562dc94 100644 --- a/planner/explain.go +++ b/planner/explain.go @@ -56,10 +56,8 @@ const ( dataLabel = "data" fieldNameLabel = "fieldName" filterLabel = "filter" - idsLabel = "ids" joinRootLabel = "root" joinSubTypeLabel = "subType" - keysLabel = "_keys" limitLabel = "limit" offsetLabel = "offset" sourcesLabel = "sources" @@ -170,7 +168,7 @@ func buildDebugExplainGraph(source planNode) (map[string]any, error) { // // query @explain { // user { -// _key +// _docID // age // name // } diff --git a/planner/group.go b/planner/group.go index 0890b13d84..ae2a0c8bee 100644 --- a/planner/group.go +++ b/planner/group.go @@ -230,10 +230,10 @@ func (n *groupNode) simpleExplain() (map[string]any, error) { c := child.Targetable // Get targetable attribute(s) of this child. - if c.DocKeys.HasValue() { - childExplainGraph["docKeys"] = c.DocKeys.Value() + if c.DocIDs.HasValue() { + childExplainGraph[request.DocIDsArgName] = c.DocIDs.Value() } else { - childExplainGraph["docKeys"] = nil + childExplainGraph[request.DocIDsArgName] = nil } if c.Filter == nil { diff --git a/planner/mapper/commitSelect.go b/planner/mapper/commitSelect.go index c71e4fdc20..969a87e938 100644 --- a/planner/mapper/commitSelect.go +++ b/planner/mapper/commitSelect.go @@ -20,7 +20,7 @@ type CommitSelect struct { Select // The key of the target document for which to get commits for. - DocKey immutable.Option[string] + DocID immutable.Option[string] // The field for which commits have been requested. FieldID immutable.Option[string] @@ -42,7 +42,7 @@ func (s *CommitSelect) CloneTo(index int) Requestable { func (s *CommitSelect) cloneTo(index int) *CommitSelect { return &CommitSelect{ Select: *s.Select.cloneTo(index), - DocKey: s.DocKey, + DocID: s.DocID, FieldID: s.FieldID, Cid: s.Cid, } diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 3771cb5475..ff7e19ff21 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -762,7 +762,7 @@ func getTopLevelInfo( // be fine for now schema = schemas[0] } else { - mapping.Add(core.DocKeyFieldIndex, request.KeyFieldName) + mapping.Add(core.DocIDFieldIndex, request.DocIDFieldName) schema = collection.Schema() } @@ -1035,7 +1035,7 @@ func resolveSecondaryRelationIDs( if !siblingFound { objectFieldName := strings.TrimSuffix(existingField.Name, request.RelatedObjectID) - // We only require the dockey of the related object, so an empty join is all we need. + // We only require the docID of the related object, so an empty join is all we need. join, err := constructEmptyJoin( ctx, store, @@ -1069,7 +1069,7 @@ func ToCommitSelect( } return &CommitSelect{ Select: *underlyingSelect, - DocKey: selectRequest.DocKey, + DocID: selectRequest.DocID, FieldID: selectRequest.FieldID, Depth: selectRequest.Depth, Cid: selectRequest.Cid, @@ -1096,7 +1096,7 @@ func ToMutation(ctx context.Context, store client.Store, mutationRequest *reques func toTargetable(index int, selectRequest *request.Select, docMap *core.DocumentMapping) Targetable { return Targetable{ Field: toField(index, selectRequest), - DocKeys: selectRequest.DocKeys, + DocIDs: selectRequest.DocIDs, Filter: ToFilter(selectRequest.Filter.Value(), docMap), Limit: toLimit(selectRequest.Limit, selectRequest.Offset), GroupBy: toGroupBy(selectRequest.GroupBy, docMap), @@ -1141,7 +1141,7 @@ func toFilterMap( sourceClause any, mapping *core.DocumentMapping, ) (connor.FilterKey, any) { - if strings.HasPrefix(sourceKey, "_") && sourceKey != request.KeyFieldName { + if strings.HasPrefix(sourceKey, "_") && sourceKey != request.DocIDFieldName { key := &Operator{ Operation: sourceKey, } diff --git a/planner/mapper/targetable.go b/planner/mapper/targetable.go index 0b571e6830..ae9d81e29a 100644 --- a/planner/mapper/targetable.go +++ b/planner/mapper/targetable.go @@ -192,9 +192,9 @@ type Targetable struct { // The basic field information of this property. Field - // A optional collection of docKeys that can be specified to restrict results + // A optional collection of docIDs that can be specified to restrict results // to belonging to this set. - DocKeys immutable.Option[[]string] + DocIDs immutable.Option[[]string] // An optional filter, that can be specified to restrict results to documents // that satisfies all of its conditions. @@ -218,7 +218,7 @@ type Targetable struct { func (t *Targetable) cloneTo(index int) *Targetable { return &Targetable{ Field: *t.Field.cloneTo(index), - DocKeys: t.DocKeys, + DocIDs: t.DocIDs, Filter: t.Filter, Limit: t.Limit, GroupBy: t.GroupBy, diff --git a/planner/multi.go b/planner/multi.go index 02bd4a0fda..30bbc8338c 100644 --- a/planner/multi.go +++ b/planner/multi.go @@ -56,7 +56,7 @@ type appendNode interface { // Eg: // // user { -// _key +// _docID // name // friends { // name @@ -164,7 +164,7 @@ func (p *parallelNode) nextMerge(index int, plan mergeNode) (bool, error) { scan node ========= { - _key: bae-ALICE, + _docID: bae-ALICE, name: Alice, points: 124, verified: false @@ -175,7 +175,7 @@ typeJoin node(merge) { friends: [ { - _key: bae-BOB, + _docID: bae-BOB, name: bob, points: 99.9, verified: true, @@ -187,14 +187,14 @@ output ====== { - _key: bae-ALICE, + _docID: bae-ALICE, name: Alice, points: 124, verified: false, friends: [ { - _key: bae-BOB, + _docID: bae-BOB, name: bob, points: 99.9, verified: true, @@ -205,13 +205,13 @@ output */ func (p *parallelNode) nextAppend(index int, plan appendNode) (bool, error) { - key := p.currentValue.GetKey() + key := p.currentValue.GetID() if key == "" { return false, nil } // pass the doc key as a reference through the spans interface - spans := core.NewSpans(core.NewSpan(core.DataStoreKey{DocKey: key}, core.DataStoreKey{})) + spans := core.NewSpans(core.NewSpan(core.DataStoreKey{DocID: key}, core.DataStoreKey{})) plan.Spans(spans) err := plan.Init() if err != nil { @@ -239,7 +239,7 @@ func (p *parallelNode) nextAppend(index int, plan appendNode) (bool, error) { query { user { - _key + _docID name points verified @@ -253,7 +253,7 @@ query { scan node ========= { - _key: bae-ALICE, + _docID: bae-ALICE, name: Alice, points: 124, verified: false diff --git a/planner/scan.go b/planner/scan.go index 64a534da6d..19ae079f5f 100644 --- a/planner/scan.go +++ b/planner/scan.go @@ -170,7 +170,7 @@ func (n *scanNode) Start() error { func (n *scanNode) initScan() error { if !n.spans.HasValue { - start := base.MakeCollectionKey(n.col.Description()) + start := base.MakeDataStoreKeyWithCollectionDescription(n.col.Description()) n.spans = core.NewSpans(core.NewSpan(start, start.PrefixEnd())) } diff --git a/planner/select.go b/planner/select.go index 11b2ef510b..f1d85de9f3 100644 --- a/planner/select.go +++ b/planner/select.go @@ -114,7 +114,7 @@ type selectNode struct { // are defined in the subtype scan node. filter *mapper.Filter - keys immutable.Option[[]string] + docIDs immutable.Option[[]string] selectReq *mapper.Select groupSelects []*mapper.Select @@ -166,10 +166,10 @@ func (n *selectNode) Next() (bool, error) { n.execInfo.filterMatches++ - if n.keys.HasValue() { - docKey := n.currentValue.GetKey() - for _, key := range n.keys.Value() { - if docKey == key { + if n.docIDs.HasValue() { + docID := n.currentValue.GetID() + for _, docIDValue := range n.docIDs.Value() { + if docID == docIDValue { return true, nil } } @@ -199,11 +199,11 @@ func (n *selectNode) simpleExplain() (map[string]any, error) { simpleExplainMap[filterLabel] = n.filter.ToMap(n.documentMapping) } - // Add the keys attribute if it exists. - if !n.keys.HasValue() { - simpleExplainMap[keysLabel] = nil + // Add the docIDs attribute if it exists. + if !n.docIDs.HasValue() { + simpleExplainMap[request.DocIDsArgName] = nil } else { - simpleExplainMap[keysLabel] = n.keys.Value() + simpleExplainMap[request.DocIDsArgName] = n.docIDs.Value() } return simpleExplainMap, nil @@ -255,7 +255,7 @@ func (n *selectNode) initSource() ([]aggregateNode, error) { origScan.filter = n.filter n.filter = nil - // If we have both a DocKey and a CID, then we need to run + // If we have both a DocID and a CID, then we need to run // a TimeTravel (History-Traversing Versioned) query, which means // we need to propagate the values to the underlying VersionedFetcher if n.selectReq.Cid.HasValue() { @@ -264,21 +264,21 @@ func (n *selectNode) initSource() ([]aggregateNode, error) { return nil, err } spans := fetcher.NewVersionedSpan( - core.DataStoreKey{DocKey: n.selectReq.DocKeys.Value()[0]}, + core.DataStoreKey{DocID: n.selectReq.DocIDs.Value()[0]}, c, ) // @todo check len origScan.Spans(spans) - } else if n.selectReq.DocKeys.HasValue() { - // If we *just* have a DocKey(s), run a FindByDocKey(s) optimization - // if we have a FindByDockey filter, create a span for it + } else if n.selectReq.DocIDs.HasValue() { + // If we *just* have a DocID(s), run a FindByDocID(s) optimization + // if we have a FindByDocID filter, create a span for it // and propagate it to the scanNode // @todo: When running the optimizer, check if the filter object - // contains a _key equality condition, and upgrade it to a point lookup + // contains a _docID equality condition, and upgrade it to a point lookup // instead of a prefix scan + filter via the Primary Index (0), like here: - spans := make([]core.Span, len(n.selectReq.DocKeys.Value())) - for i, docKey := range n.selectReq.DocKeys.Value() { - dockeyIndexKey := base.MakeDocKey(sourcePlan.collection.Description(), docKey) - spans[i] = core.NewSpan(dockeyIndexKey, dockeyIndexKey.PrefixEnd()) + spans := make([]core.Span, len(n.selectReq.DocIDs.Value())) + for i, docID := range n.selectReq.DocIDs.Value() { + docIDIndexKey := base.MakeDataStoreKeyWithCollectionAndDocID(sourcePlan.collection.Description(), docID) + spans[i] = core.NewSpan(docIDIndexKey, docIDIndexKey.PrefixEnd()) } origScan.Spans(core.NewSpans(spans...)) } @@ -352,7 +352,7 @@ func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, erro // of that Target version we are querying. // So instead of a LatestCommit subquery, we need // a OneCommit subquery, with the supplied parameters. - commitSlct.DocKey = immutable.Some(selectReq.DocKeys.Value()[0]) // @todo check length + commitSlct.DocID = immutable.Some(selectReq.DocIDs.Value()[0]) // @todo check length commitSlct.Cid = selectReq.Cid } @@ -413,7 +413,7 @@ func (p *Planner) SelectFromSource( selectReq: selectReq, docMapper: docMapper{selectReq.DocumentMapping}, filter: selectReq.Filter, - keys: selectReq.DocKeys, + docIDs: selectReq.DocIDs, } limit := selectReq.Limit orderBy := selectReq.OrderBy @@ -468,7 +468,7 @@ func (p *Planner) Select(selectReq *mapper.Select) (planNode, error) { s := &selectNode{ planner: p, filter: selectReq.Filter, - keys: selectReq.DocKeys, + docIDs: selectReq.DocIDs, selectReq: selectReq, docMapper: docMapper{selectReq.DocumentMapping}, } diff --git a/planner/type_join.go b/planner/type_join.go index 47ba07e96b..fc4e6009cf 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -435,7 +435,7 @@ func fetchPrimaryDoc(node, subNode planNode, parentProp string) (bool, error) { subDoc := subNode.Value() ind := subNode.DocumentMap().FirstIndexOfName(parentProp) - docKeyStr, isStr := subDoc.Fields[ind].(string) + docIDStr, isStr := subDoc.Fields[ind].(string) if !isStr { return false, nil } @@ -444,9 +444,9 @@ func fetchPrimaryDoc(node, subNode planNode, parentProp string) (bool, error) { if scan == nil { return false, nil } - rootDocKey := base.MakeDocKey(scan.col.Description(), docKeyStr) + dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(scan.col.Description(), docIDStr) - spans := core.NewSpans(core.NewSpan(rootDocKey, rootDocKey.PrefixEnd())) + spans := core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) node.Spans(spans) @@ -543,15 +543,15 @@ func (join *invertibleTypeJoin) processSecondResult(secondDocs []core.Doc) (any, if join.secondaryFetchLimit == 1 { if len(secondDocs) != 0 { secondResult = secondDocs[0] - secondIDResult = secondDocs[0].GetKey() + secondIDResult = secondDocs[0].GetID() } } else { secondResult = secondDocs - secondDocKeys := make([]string, len(secondDocs)) + secondDocIDs := make([]string, len(secondDocs)) for i, doc := range secondDocs { - secondDocKeys[i] = doc.GetKey() + secondDocIDs[i] = doc.GetID() } - secondIDResult = secondDocKeys + secondIDResult = secondDocIDs } join.root.Value().Fields[join.subSelect.Index] = secondResult if join.secondaryFieldIndex.HasValue() { @@ -573,7 +573,7 @@ func (join *invertibleTypeJoin) Next() (bool, error) { secondDocs, err := fetchDocsWithFieldValue( join.dir.secondNode, join.dir.secondaryField, - firstDoc.GetKey(), + firstDoc.GetID(), join.secondaryFetchLimit, ) if err != nil { diff --git a/planner/type_join.md b/planner/type_join.md index cf4573431f..e566168881 100644 --- a/planner/type_join.md +++ b/planner/type_join.md @@ -11,7 +11,7 @@ type User { type Friend { name: String friendsDate: DateTime - user_id: DocKey + user_id: DocID } - > @@ -23,7 +23,7 @@ type Friend { { query { user { selectTopNode -> (source) selectNode -> (source) scanNode(user) -> filter: NIL - [_key] + [_docID] name // key = bae-KHDFLGHJFLDG @@ -39,13 +39,13 @@ selectTopNode - > selectNode -> MultiNode.children: []planNode -> multiScanNode -> TypeJoinNode(merge**) -> TypeJoinOneMany -> (one) multiScanNode(scanNode(user)**) -> } -> scanNode(user).Value() -> doc -> (many) selectNode - > scanNode(friend) -1. NEXT/VALUES MultiNode.doc = {_key: bae-KHDFLGHJFLDG, name: "BOB"} -2. NEXT/VALUES TypeJoinOneMany.one {_key: bae-KHDFLGHJFLDG, name: "BOB"} +1. NEXT/VALUES MultiNode.doc = {_docID: bae-KHDFLGHJFLDG, name: "BOB"} +2. NEXT/VALUES TypeJoinOneMany.one {_docID: bae-KHDFLGHJFLDG, name: "BOB"} 3. NEXT/VALUES (many).selectNode.doc = {name: "Eric", date: Oct29} LOOP -4. NEXT/VALUES TypeJoinNode {_key: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{{name: "Eric", date: Oct29}}]} +4. NEXT/VALUES TypeJoinNode {_docID: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{{name: "Eric", date: Oct29}}]} 5. NEXT/VALUES (many).selectNode.doc = {name: "Jimmy", date: Oct21} -6. NEXT/VALUES TypeJoinNode {_key: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{name: "Eric", date: Oct29}, {name: "Jimmy", date: Oct21}]} +6. NEXT/VALUES TypeJoinNode {_docID: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{name: "Eric", date: Oct29}, {name: "Jimmy", date: Oct21}]} GOTO LOOP // SPLIT FILTER @@ -65,7 +65,7 @@ query { { data: [ { - _key: bae-ALICE + _docID: bae-ALICE age: 22, name: "Alice", points: 45, @@ -80,7 +80,7 @@ query { }, { - _key: bae-CHARLIE + _docID: bae-CHARLIE age: 22, name: "Charlie", points: 45, @@ -142,7 +142,7 @@ type Address: { ... user: user - # user_id: DocKey + # user_id: DocID } query { diff --git a/planner/update.go b/planner/update.go index 36b5487c5e..78619bd55f 100644 --- a/planner/update.go +++ b/planner/update.go @@ -28,7 +28,8 @@ type updateNode struct { collection client.Collection filter *mapper.Filter - ids []string + + docIDs []string patch string @@ -62,11 +63,11 @@ func (n *updateNode) Next() (bool, error) { } n.currentValue = n.results.Value() - key, err := client.NewDocKeyFromString(n.currentValue.GetKey()) + docID, err := client.NewDocIDFromString(n.currentValue.GetID()) if err != nil { return false, err } - _, err = n.collection.UpdateWithKey(n.p.ctx, key, n.patch) + _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, n.patch) if err != nil { return false, err } @@ -115,7 +116,7 @@ func (n *updateNode) simpleExplain() (map[string]any, error) { simpleExplainMap := map[string]any{} // Add the document id(s) that request wants to update. - simpleExplainMap[idsLabel] = n.ids + simpleExplainMap[request.DocIDsArgName] = n.docIDs // Add the filter attribute if it exists, otherwise have it nil. if n.filter == nil { @@ -157,7 +158,7 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { update := &updateNode{ p: p, filter: parsed.Filter, - ids: parsed.DocKeys.Value(), + docIDs: parsed.DocIDs.Value(), isUpdating: true, patch: parsed.Data, docMapper: docMapper{parsed.DocumentMapping}, diff --git a/request/graphql/parser/commit.go b/request/graphql/parser/commit.go index 8c9d3e47b5..e4d4c01903 100644 --- a/request/graphql/parser/commit.go +++ b/request/graphql/parser/commit.go @@ -31,9 +31,9 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) for _, argument := range field.Arguments { prop := argument.Name.Value - if prop == request.DocKey { + if prop == request.DocIDArgName { raw := argument.Value.(*ast.StringValue) - commit.DocKey = immutable.Some(raw.Value) + commit.DocID = immutable.Some(raw.Value) } else if prop == request.Cid { raw := argument.Value.(*ast.StringValue) commit.Cid = immutable.Some(raw.Value) diff --git a/request/graphql/parser/mutation.go b/request/graphql/parser/mutation.go index 37dea7290b..0802c745d6 100644 --- a/request/graphql/parser/mutation.go +++ b/request/graphql/parser/mutation.go @@ -117,10 +117,10 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re } mut.Filter = filter - } else if prop == request.Id { + } else if prop == request.DocIDArgName { raw := argument.Value.(*ast.StringValue) mut.IDs = immutable.Some([]string{raw.Value}) - } else if prop == request.Ids { + } else if prop == request.DocIDsArgName { raw := argument.Value.(*ast.ListValue) ids := make([]string, len(raw.Values)) for i, val := range raw.Values { diff --git a/request/graphql/parser/query.go b/request/graphql/parser/query.go index c76bde7b32..3213c7489a 100644 --- a/request/graphql/parser/query.go +++ b/request/graphql/parser/query.go @@ -124,16 +124,16 @@ func parseSelect( } slct.Filter = filter - case request.DocKey: // parse single dockey query field - val := astValue.(*ast.StringValue) - slct.DocKeys = immutable.Some([]string{val.Value}) - case request.DocKeys: - docKeyValues := astValue.(*ast.ListValue).Values - docKeys := make([]string, len(docKeyValues)) - for i, value := range docKeyValues { - docKeys[i] = value.(*ast.StringValue).Value + case request.DocIDArgName: // parse single DocID field + docIDValue := astValue.(*ast.StringValue) + slct.DocIDs = immutable.Some([]string{docIDValue.Value}) + case request.DocIDsArgName: + docIDValues := astValue.(*ast.ListValue).Values + docIDs := make([]string, len(docIDValues)) + for i, value := range docIDValues { + docIDs[i] = value.(*ast.StringValue).Value } - slct.DocKeys = immutable.Some(docKeys) + slct.DocIDs = immutable.Some(docIDs) case request.Cid: // parse single CID query field val := astValue.(*ast.StringValue) slct.CID = immutable.Some(val.Value) diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index 85f401fd35..bd0934d437 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -102,8 +102,8 @@ func collectionFromAstDefinition( ) (client.CollectionDefinition, error) { fieldDescriptions := []client.FieldDescription{ { - Name: request.KeyFieldName, - Kind: client.FieldKind_DocKey, + Name: request.DocIDFieldName, + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, } @@ -130,10 +130,10 @@ func collectionFromAstDefinition( // sort the fields lexicographically sort.Slice(fieldDescriptions, func(i, j int) bool { - // make sure that the _key (KeyFieldName) is always at the beginning - if fieldDescriptions[i].Name == request.KeyFieldName { + // make sure that the _docID is always at the beginning + if fieldDescriptions[i].Name == request.DocIDFieldName { return true - } else if fieldDescriptions[j].Name == request.KeyFieldName { + } else if fieldDescriptions[j].Name == request.DocIDFieldName { return false } return fieldDescriptions[i].Name < fieldDescriptions[j].Name @@ -336,8 +336,8 @@ func fieldsFromAST(field *ast.FieldDefinition, // An _id field is added for every 1-N relationship from this object. fieldDescriptions = append(fieldDescriptions, client.FieldDescription{ Name: fmt.Sprintf("%s_id", field.Name.Value), - Kind: client.FieldKind_DocKey, - Typ: defaultCRDTForFieldKind[client.FieldKind_DocKey], + Kind: client.FieldKind_DocID, + Typ: defaultCRDTForFieldKind[client.FieldKind_DocID], RelationType: client.Relation_Type_INTERNAL_ID, }) } else if kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { @@ -422,7 +422,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case *ast.Named: switch astTypeVal.Name.Value { case typeID: - return client.FieldKind_DocKey, nil + return client.FieldKind_DocID, nil case typeBoolean: return client.FieldKind_BOOL, nil case typeInt: diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index f267ae8ed0..7829d5e450 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -24,7 +24,7 @@ var ( //nolint:unused gqlTypeToFieldKindReference = map[gql.Type]client.FieldKind{ - gql.ID: client.FieldKind_DocKey, + gql.ID: client.FieldKind_DocID, gql.Boolean: client.FieldKind_BOOL, gql.Int: client.FieldKind_INT, gql.Float: client.FieldKind_FLOAT, @@ -40,7 +40,7 @@ var ( } fieldKindToGQLType = map[client.FieldKind]gql.Type{ - client.FieldKind_DocKey: gql.ID, + client.FieldKind_DocID: gql.ID, client.FieldKind_BOOL: gql.Boolean, client.FieldKind_BOOL_ARRAY: gql.NewList(gql.NewNonNull(gql.Boolean)), client.FieldKind_NILLABLE_BOOL_ARRAY: gql.NewList(gql.Boolean), @@ -59,7 +59,7 @@ var ( // This map is fine to use defaultCRDTForFieldKind = map[client.FieldKind]client.CType{ - client.FieldKind_DocKey: client.LWW_REGISTER, + client.FieldKind_DocID: client.LWW_REGISTER, client.FieldKind_BOOL: client.LWW_REGISTER, client.FieldKind_BOOL_ARRAY: client.LWW_REGISTER, client.FieldKind_NILLABLE_BOOL_ARRAY: client.LWW_REGISTER, @@ -80,14 +80,17 @@ var ( ) const ( - dockeyArgDescription string = ` -An optional dockey parameter for this field. Only documents with - the given dockey will be returned. If no documents match, the result + docIDFieldDescription string = ` +The immutable identifier/docID (primary key) value for this document. +` + docIDArgDescription string = ` +An optional docID parameter for this field. Only documents with + the given docID will be returned. If no documents match, the result will be null/empty. ` - dockeysArgDescription string = ` -An optional set of dockeys for this field. Only documents with a dockey - matching a dockey in the given set will be returned. If no documents match, + docIDsArgDescription string = ` +An optional set of docIDs for this field. Only documents with a docID + matching a docID in the given set will be returned. If no documents match, the result will be null/empty. If an empty set is provided, this argument will be ignored. ` @@ -132,13 +135,13 @@ Updates documents in this collection using the data provided. Only documents the update will be applied to all documents in the collection. ` updateIDArgDescription string = ` -An optional dockey value that will limit the update to the document with - a matching dockey. If no matching document is found, the operation will +An optional docID value that will limit the update to the document with + a matching docID. If no matching document is found, the operation will succeed, but no documents will be updated. ` updateIDsArgDescription string = ` -An optional set of dockey values that will limit the update to documents - with a matching dockey. If no matching documents are found, the operation will +An optional set of docID values that will limit the update to documents + with a matching docID. If no matching documents are found, the operation will succeed, but no documents will be updated. ` updateFilterArgDescription string = ` @@ -155,13 +158,13 @@ Deletes documents in this collection matching any provided criteria. If no criteria are provided all documents in the collection will be deleted. ` deleteIDArgDescription string = ` -An optional dockey value that will limit the delete to the document with - a matching dockey. If no matching document is found, the operation will +An optional docID value that will limit the delete to the document with + a matching docID. If no matching document is found, the operation will succeed, but no documents will be deleted. ` deleteIDsArgDescription string = ` -An optional set of dockey values that will limit the delete to documents with - a matching dockey. If no matching documents are found, the operation will +An optional set of docID values that will limit the delete to documents with + a matching docID. If no matching documents are found, the operation will succeed, but no documents will be deleted. If an empty set is provided, no documents will be deleted. ` @@ -169,9 +172,6 @@ An optional set of dockey values that will limit the delete to documents with An optional filter for this delete that will limit the delete to documents matching the given criteria. If no matching documents are found, the operation will succeed, but no documents will be deleted. -` - keyFieldDescription string = ` -The immutable primary key (dockey) value for this document. ` groupFieldDescription string = ` The group field may be used to return a set of records belonging to the group. diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go index 2368b58c27..397436bca2 100644 --- a/request/graphql/schema/descriptions_test.go +++ b/request/graphql/schema/descriptions_test.go @@ -40,8 +40,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "User", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -89,8 +89,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "User", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -120,8 +120,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -169,8 +169,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -183,7 +183,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -209,8 +209,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -233,7 +233,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "published_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -267,8 +267,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "User", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -298,8 +298,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -347,8 +347,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -361,7 +361,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -387,8 +387,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -411,7 +411,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "published_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -445,8 +445,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -459,7 +459,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -485,8 +485,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -509,7 +509,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "published_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -543,8 +543,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -557,7 +557,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -583,8 +583,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index f76c5623c6..556700cd7f 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -366,8 +366,8 @@ func (g *Generator) createExpandedFieldList( Description: f.Description, Type: gql.NewList(t), Args: gql.FieldConfigArgument{ - "dockey": schemaTypes.NewArgConfig(gql.String, dockeyArgDescription), - "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), dockeysArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.String, docIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), "filter": schemaTypes.NewArgConfig( g.manager.schema.TypeMap()[typeName+"FilterArg"], listFieldFilterArgDescription, @@ -435,16 +435,16 @@ func (g *Generator) buildTypes( fields := gql.Fields{} if !isEmbeddedObject { - // automatically add the _key: ID field to the type - fields[request.KeyFieldName] = &gql.Field{ - Description: keyFieldDescription, + // automatically add the _docID: ID field to the type + fields[request.DocIDFieldName] = &gql.Field{ + Description: docIDFieldDescription, Type: gql.ID, } } for _, field := range fieldDescriptions { - if field.Name == request.KeyFieldName { - // The `_key` field is included in the fieldDescriptions, + if field.Name == request.DocIDFieldName { + // The `_docID` field is included in the fieldDescriptions, // but we do not wish to override the standard definition // with the collection held definition (particularly the // description) @@ -999,10 +999,10 @@ func (g *Generator) genTypeMutationUpdateField( Description: updateDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "id": schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), - "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filter, updateFilterArgDescription), - "data": schemaTypes.NewArgConfig(gql.String, updateDataArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filter, updateFilterArgDescription), + "data": schemaTypes.NewArgConfig(gql.String, updateDataArgDescription), }, } return field, nil @@ -1017,9 +1017,9 @@ func (g *Generator) genTypeMutationDeleteField( Description: deleteDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "id": schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), - "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filter, deleteFilterArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filter, deleteFilterArgDescription), }, } return field, nil @@ -1065,7 +1065,7 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject { // generate basic filter operator blocks // @todo: Extract object field loop into its own utility func for f, field := range obj.Fields() { - if _, ok := request.ReservedFields[f]; ok && f != request.KeyFieldName { + if _, ok := request.ReservedFields[f]; ok && f != request.DocIDFieldName { continue } // scalars (leafs) @@ -1169,7 +1169,7 @@ func (g *Generator) genTypeOrderArgInput(obj *gql.Object) *gql.InputObject { fields := gql.InputObjectConfigFieldMap{} for f, field := range obj.Fields() { - if _, ok := request.ReservedFields[f]; ok && f != request.KeyFieldName { + if _, ok := request.ReservedFields[f]; ok && f != request.DocIDFieldName { continue } typeMap := g.manager.schema.TypeMap() @@ -1216,10 +1216,10 @@ func (g *Generator) genTypeQueryableFieldList( Description: obj.Description(), Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "dockey": schemaTypes.NewArgConfig(gql.String, dockeyArgDescription), - "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), dockeysArgDescription), - "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), - "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.String, docIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), + "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), + "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), "groupBy": schemaTypes.NewArgConfig( gql.NewList(gql.NewNonNull(config.groupBy)), schemaTypes.GroupByArgDescription, diff --git a/request/graphql/schema/types/commits.go b/request/graphql/schema/types/commits.go index 4da8d2dd3a..1e8d6b5bb4 100644 --- a/request/graphql/schema/types/commits.go +++ b/request/graphql/schema/types/commits.go @@ -33,7 +33,7 @@ var ( // type Commit { // Height: Int // CID: String - // Dockey: String + // DocID: String // CollectionID: Int // SchemaVersionID: String // Delta: String @@ -55,8 +55,8 @@ var ( Description: commitCIDFieldDescription, Type: gql.String, }, - "dockey": &gql.Field{ - Description: commitDockeyFieldDescription, + request.DocIDArgName: &gql.Field{ + Description: commitDocIDFieldDescription, Type: gql.String, }, "collectionID": &gql.Field{ @@ -125,8 +125,8 @@ var ( Description: commitCIDFieldDescription, Type: OrderingEnum, }, - "dockey": &gql.InputObjectFieldConfig{ - Description: commitDockeyFieldDescription, + request.DocIDArgName: &gql.InputObjectFieldConfig{ + Description: commitDocIDFieldDescription, Type: OrderingEnum, }, "collectionID": &gql.InputObjectFieldConfig{ @@ -150,9 +150,9 @@ var ( Value: "cid", Description: commitCIDFieldDescription, }, - "dockey": &gql.EnumValueConfig{ - Value: "dockey", - Description: commitDockeyFieldDescription, + request.DocIDArgName: &gql.EnumValueConfig{ + Value: request.DocIDArgName, + Description: commitDocIDFieldDescription, }, "collectionID": &gql.EnumValueConfig{ Value: "collectionID", @@ -175,10 +175,10 @@ var ( Description: commitsQueryDescription, Type: gql.NewList(CommitObject), Args: gql.FieldConfigArgument{ - "dockey": NewArgConfig(gql.ID, commitDockeyArgDescription), - request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), - "order": NewArgConfig(CommitsOrderArg, OrderArgDescription), - "cid": NewArgConfig(gql.ID, commitCIDArgDescription), + request.DocIDArgName: NewArgConfig(gql.ID, commitDocIDArgDescription), + request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), + "order": NewArgConfig(CommitsOrderArg, OrderArgDescription), + "cid": NewArgConfig(gql.ID, commitCIDArgDescription), "groupBy": NewArgConfig( gql.NewList( gql.NewNonNull( @@ -198,8 +198,8 @@ var ( Description: latestCommitsQueryDescription, Type: gql.NewList(CommitObject), Args: gql.FieldConfigArgument{ - "dockey": NewArgConfig(gql.NewNonNull(gql.ID), commitDockeyArgDescription), - request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), + request.DocIDArgName: NewArgConfig(gql.NewNonNull(gql.ID), commitDocIDArgDescription), + request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), }, } ) diff --git a/request/graphql/schema/types/descriptions.go b/request/graphql/schema/types/descriptions.go index b60c9f009d..42c1ba956e 100644 --- a/request/graphql/schema/types/descriptions.go +++ b/request/graphql/schema/types/descriptions.go @@ -38,9 +38,9 @@ Commit represents an individual commit to a MerkleCRDT, every mutation to a commit composed of the field level commits and, in the case of an update, the prior composite commit. ` - commitDockeyArgDescription string = ` -An optional dockey parameter for this commit query. Only commits for a document - with a matching dockey will be returned. If no documents match, the result + commitDocIDArgDescription string = ` +An optional docID parameter for this commit query. Only commits for a document + with a matching docID will be returned. If no documents match, the result set will be empty. ` commitFieldIDArgDescription string = ` @@ -71,8 +71,8 @@ Height represents the location of the commit in the DAG. All commits (composite, The unique CID of this commit, and the primary means through which to safely identify a specific commit. ` - commitDockeyFieldDescription string = ` -The dockey of the document that this commit is for. + commitDocIDFieldDescription string = ` +The docID of the document that this commit is for. ` commitCollectionIDFieldDescription string = ` The ID of the collection that this commit was committed against. diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index 0a9127d816..fda850e9a9 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -126,7 +126,7 @@ func BackfillBenchmarkDB( fixture fixtures.Generator, docCount, opCount int, doSync bool, -) ([][]client.DocKey, error) { +) ([][]client.DocID, error) { numTypes := len(fixture.Types()) // load fixtures @@ -134,7 +134,7 @@ func BackfillBenchmarkDB( wg.Add(docCount) errCh := make(chan error) waitCh := make(chan struct{}) - dockeys := make([][]client.DocKey, docCount) + listOfDocIDs := make([][]client.DocID, docCount) go func() { // Cut up the job from into writeBatchGroup size grouped jobs. @@ -159,7 +159,7 @@ func BackfillBenchmarkDB( } // create the documents - keys := make([]client.DocKey, numTypes) + docIDs := make([]client.DocID, numTypes) for j := 0; j < numTypes; j++ { doc, err := client.NewDocFromJSON([]byte(docs[j])) if err != nil { @@ -177,17 +177,17 @@ func BackfillBenchmarkDB( log.Info( ctx, "Failed to commit TX for doc %s, retrying...\n", - logging.NewKV("DocKey", doc.Key()), + logging.NewKV("DocID", doc.ID()), ) continue } else if err != nil { errCh <- errors.Wrap("failed to create document", err) } - keys[j] = doc.Key() + docIDs[j] = doc.ID() break } } - dockeys[index] = keys + listOfDocIDs[index] = docIDs wg.Done() batchWg.Done() @@ -205,7 +205,7 @@ func BackfillBenchmarkDB( // finish or err select { case <-waitCh: - return dockeys, nil + return listOfDocIDs, nil case err := <-errCh: return nil, err } diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index dfb63fc86b..2ef7123493 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -40,7 +40,7 @@ func runCollectionBenchGet( } defer db.Close() - dockeys, err := benchutils.BackfillBenchmarkDB( + listOfDocIDs, err := benchutils.BackfillBenchmarkDB( b, ctx, collections, @@ -55,9 +55,9 @@ func runCollectionBenchGet( // run benchmark if doSync { - return runCollectionBenchGetSync(b, ctx, collections, fixture, docCount, opCount, dockeys) + return runCollectionBenchGetSync(b, ctx, collections, fixture, docCount, opCount, listOfDocIDs) } - return runCollectionBenchGetAsync(b, ctx, collections, fixture, docCount, opCount, dockeys) + return runCollectionBenchGetAsync(b, ctx, collections, fixture, docCount, opCount, listOfDocIDs) } func runCollectionBenchGetSync(b *testing.B, @@ -65,14 +65,14 @@ func runCollectionBenchGetSync(b *testing.B, collections []client.Collection, fixture fixtures.Generator, docCount, opCount int, - dockeys [][]client.DocKey, + listOfDocIDs [][]client.DocID, ) error { numTypes := len(fixture.Types()) b.ResetTimer() for i := 0; i < b.N; i++ { // outer benchmark loop for j := 0; j < opCount/numTypes; j++ { // number of Get operations we want to execute for k := 0; k < numTypes; k++ { // apply op to all the related types - collections[k].Get(ctx, dockeys[j][k], false) //nolint:errcheck + collections[k].Get(ctx, listOfDocIDs[j][k], false) //nolint:errcheck } } } @@ -88,7 +88,7 @@ func runCollectionBenchGetAsync(b *testing.B, collections []client.Collection, fixture fixtures.Generator, docCount, opCount int, - dockeys [][]client.DocKey, + listOfDocIDs [][]client.DocID, ) error { var wg sync.WaitGroup numTypes := len(fixture.Types()) @@ -97,10 +97,10 @@ func runCollectionBenchGetAsync(b *testing.B, for j := 0; j < opCount/numTypes; j++ { // number of Get operations we want to execute for k := 0; k < numTypes; k++ { // apply op to all the related types wg.Add(1) - go func(ctx context.Context, col client.Collection, dockey client.DocKey) { - col.Get(ctx, dockey, false) //nolint:errcheck + go func(ctx context.Context, col client.Collection, docID client.DocID) { + col.Get(ctx, docID, false) //nolint:errcheck wg.Done() - }(ctx, collections[k], dockeys[j][k]) + }(ctx, collections[k], listOfDocIDs[j][k]) } } diff --git a/tests/bench/query/index/simple_test.go b/tests/bench/query/index/simple_test.go index e675086a2a..2f15aff59c 100644 --- a/tests/bench/query/index/simple_test.go +++ b/tests/bench/query/index/simple_test.go @@ -22,7 +22,7 @@ var ( userSimpleWithFilterQuery = ` query { User(filter: { Age: { _eq: 30 } }) { - _key + _docID Name Age Points diff --git a/tests/bench/query/planner/simple_test.go b/tests/bench/query/planner/simple_test.go index e911002911..b6bdedac8e 100644 --- a/tests/bench/query/planner/simple_test.go +++ b/tests/bench/query/planner/simple_test.go @@ -21,7 +21,7 @@ var ( userSimpleQuery = ` query { User { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/simple_test.go b/tests/bench/query/simple/simple_test.go index a9791bcbc7..14f2591d89 100644 --- a/tests/bench/query/simple/simple_test.go +++ b/tests/bench/query/simple/simple_test.go @@ -21,7 +21,7 @@ var ( userSimpleQuery = ` query { User { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/utils.go b/tests/bench/query/simple/utils.go index 8c6f82579b..14752e7ae2 100644 --- a/tests/bench/query/simple/utils.go +++ b/tests/bench/query/simple/utils.go @@ -41,7 +41,7 @@ func RunQueryBenchGet( } defer db.Close() - dockeys, err := benchutils.BackfillBenchmarkDB( + listOfDocIDs, err := benchutils.BackfillBenchmarkDB( b, ctx, collections, @@ -54,7 +54,7 @@ func RunQueryBenchGet( return err } - return runQueryBenchGetSync(b, ctx, db, docCount, dockeys, query) + return runQueryBenchGetSync(b, ctx, db, docCount, listOfDocIDs, query) } func runQueryBenchGetSync( @@ -62,11 +62,11 @@ func runQueryBenchGetSync( ctx context.Context, db client.DB, docCount int, - dockeys [][]client.DocKey, + listOfDocIDs [][]client.DocID, query string, ) error { - // run any preprocessing on the query before execution (mostly just dockey insertion if needed) - query = formatQuery(b, query, dockeys) + // run any preprocessing on the query before execution (mostly just docID insertion if needed) + query = formatQuery(b, query, listOfDocIDs) b.ResetTimer() for i := 0; i < b.N; i++ { @@ -89,37 +89,37 @@ func runQueryBenchGetSync( return nil } -func formatQuery(b *testing.B, query string, dockeys [][]client.DocKey) string { - numPlaceholders := strings.Count(query, "{{dockey}}") +func formatQuery(b *testing.B, query string, listOfDocIDs [][]client.DocID) string { + numPlaceholders := strings.Count(query, "{{docID}}") if numPlaceholders == 0 { return query } - // create a copy of dockeys since we'll be mutating it - dockeysCopy := dockeys[:] + // create a copy of docIDs since we'll be mutating it + docIDsCopy := listOfDocIDs[:] // b.Logf("formatting query, replacing %v instances", numPlaceholders) // b.Logf("Query before: %s", query) - if len(dockeysCopy) < numPlaceholders { + if len(docIDsCopy) < numPlaceholders { b.Fatalf( "Invalid number of query placeholders, max is %v requested is %v", - len(dockeys), + len(listOfDocIDs), numPlaceholders, ) } for i := 0; i < numPlaceholders; i++ { - // pick a random dockey, needs to be unique accross all + // pick a random docID, needs to be unique accross all // loop iterations, so remove the selected one so the next // iteration cant potentially pick it. - rIndex := rand.Intn(len(dockeysCopy)) - key := dockeysCopy[rIndex][0] + rIndex := rand.Intn(len(docIDsCopy)) + docID := docIDsCopy[rIndex][0] - // remove selected key - dockeysCopy = append(dockeysCopy[:rIndex], dockeysCopy[rIndex+1:]...) + // remove selected docID + docIDsCopy = append(docIDsCopy[:rIndex], docIDsCopy[rIndex+1:]...) // replace - query = strings.Replace(query, "{{dockey}}", key.String(), 1) + query = strings.Replace(query, "{{docID}}", docID.String(), 1) } // b.Logf("Query After: %s", query) diff --git a/tests/bench/query/simple/with_filter_test.go b/tests/bench/query/simple/with_filter_test.go index 60081167a3..7fbc15989b 100644 --- a/tests/bench/query/simple/with_filter_test.go +++ b/tests/bench/query/simple/with_filter_test.go @@ -21,7 +21,7 @@ var ( userSimpleWithFilterQuery = ` query { User(filter: {Age: {_gt: 10}}) { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/with_limit_offset_test.go b/tests/bench/query/simple/with_limit_offset_test.go index e47d8f347e..d770302cd0 100644 --- a/tests/bench/query/simple/with_limit_offset_test.go +++ b/tests/bench/query/simple/with_limit_offset_test.go @@ -21,7 +21,7 @@ var ( userSimpleWithLimitOffsetQuery = ` query { User(limit: 10, offset: 5) { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/with_multi_lookup_test.go b/tests/bench/query/simple/with_multi_lookup_test.go index 2c744319a3..f862095189 100644 --- a/tests/bench/query/simple/with_multi_lookup_test.go +++ b/tests/bench/query/simple/with_multi_lookup_test.go @@ -18,11 +18,11 @@ import ( ) var ( - // 10x dockey will be replaced in the bench runner func + // 10x `docID`s will be replaced in the bench runner func userSimpleWithMultiLookupQuery = ` query { - User(dockeys: ["{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}"]) { - _key + User(docIDs: ["{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}"]) { + _docID Name Age Points diff --git a/tests/bench/query/simple/with_order_test.go b/tests/bench/query/simple/with_order_test.go index 2b12817713..981e4a699d 100644 --- a/tests/bench/query/simple/with_order_test.go +++ b/tests/bench/query/simple/with_order_test.go @@ -21,7 +21,7 @@ var ( userSimpleWithSortQuery = ` query { User(order: {Age: ASC}) { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/with_single_lookup_test.go b/tests/bench/query/simple/with_single_lookup_test.go index d432f730be..088fbd22d0 100644 --- a/tests/bench/query/simple/with_single_lookup_test.go +++ b/tests/bench/query/simple/with_single_lookup_test.go @@ -18,11 +18,11 @@ import ( ) var ( - // dockey will be replaced in the bench runner func + // The `docID` will be replaced in the bench runner func userSimpleWithSingleLookupQuery = ` query { - User(dockey: "{{dockey}}") { - _key + User(docID: "{{docID}}") { + _docID Name Age Points diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index f29135d201..abef339cfd 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -58,9 +58,9 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name) - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -84,9 +84,9 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er docMapList := make([]map[string]any, len(docs)) for i, doc := range docs { - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -115,7 +115,7 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er func (c *Collection) Update(ctx context.Context, doc *client.Document) error { args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name) - args = append(args, "--key", doc.Key().String()) + args = append(args, "--docID", doc.ID().String()) document, err := doc.ToJSONPatch() if err != nil { @@ -132,7 +132,7 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { } func (c *Collection) Save(ctx context.Context, doc *client.Document) error { - _, err := c.Get(ctx, doc.Key(), true) + _, err := c.Get(ctx, doc.ID(), true) if err == nil { return c.Update(ctx, doc) } @@ -142,16 +142,16 @@ func (c *Collection) Save(ctx context.Context, doc *client.Document) error { return err } -func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { - res, err := c.DeleteWithKey(ctx, docKey) +func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { + res, err := c.DeleteWithDocID(ctx, docID) if err != nil { return false, err } return res.Count == 1, nil } -func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { - _, err := c.Get(ctx, docKey, false) +func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { + _, err := c.Get(ctx, docID, false) if err != nil { return false, err } @@ -162,10 +162,10 @@ func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) - case client.DocKey: - return c.UpdateWithKey(ctx, t, updater) - case []client.DocKey: - return c.UpdateWithKeys(ctx, t, updater) + case client.DocID: + return c.UpdateWithDocID(ctx, t, updater) + case []client.DocID: + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -204,33 +204,33 @@ func (c *Collection) UpdateWithFilter( return c.updateWith(ctx, args) } -func (c *Collection) UpdateWithKey( +func (c *Collection) UpdateWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name) - args = append(args, "--key", key.String()) + args = append(args, "--docID", docID.String()) args = append(args, "--updater", updater) return c.updateWith(ctx, args) } -func (c *Collection) UpdateWithKeys( +func (c *Collection) UpdateWithDocIDs( ctx context.Context, - docKeys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name) args = append(args, "--updater", updater) - keys := make([]string, len(docKeys)) - for i, v := range docKeys { - keys[i] = v.String() + strDocIDs := make([]string, len(docIDs)) + for i, v := range docIDs { + strDocIDs[i] = v.String() } - args = append(args, "--key", strings.Join(keys, ",")) + args = append(args, "--docID", strings.Join(strDocIDs, ",")) return c.updateWith(ctx, args) } @@ -239,10 +239,10 @@ func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.Delete switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) - case client.DocKey: - return c.DeleteWithKey(ctx, t) - case []client.DocKey: - return c.DeleteWithKeys(ctx, t) + case client.DocID: + return c.DeleteWithDocID(ctx, t) + case []client.DocID: + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -276,31 +276,31 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. return c.deleteWith(ctx, args) } -func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name) - args = append(args, "--key", docKey.String()) + args = append(args, "--docID", docID.String()) return c.deleteWith(ctx, args) } -func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name) - keys := make([]string, len(docKeys)) - for i, v := range docKeys { - keys[i] = v.String() + strDocIDs := make([]string, len(docIDs)) + for i, v := range docIDs { + strDocIDs[i] = v.String() } - args = append(args, "--key", strings.Join(keys, ",")) + args = append(args, "--docID", strings.Join(strDocIDs, ",")) return c.deleteWith(ctx, args) } -func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { args := []string{"client", "collection", "get"} args = append(args, "--name", c.Description().Name) - args = append(args, key.String()) + args = append(args, docID.String()) if showDeleted { args = append(args, "--show-deleted") @@ -324,40 +324,40 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } } -func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { - args := []string{"client", "collection", "keys"} +func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { + args := []string{"client", "collection", "docIDs"} args = append(args, "--name", c.Description().Name) stdOut, _, err := c.cmd.executeStream(ctx, args) if err != nil { return nil, err } - docKeyCh := make(chan client.DocKeysResult) + docIDCh := make(chan client.DocIDResult) go func() { dec := json.NewDecoder(stdOut) - defer close(docKeyCh) + defer close(docIDCh) for { - var res http.DocKeyResult + var res http.DocIDResult if err := dec.Decode(&res); err != nil { return } - key, err := client.NewDocKeyFromString(res.Key) + docID, err := client.NewDocIDFromString(res.DocID) if err != nil { return } - docKey := client.DocKeysResult{ - Key: key, + docIDResult := client.DocIDResult{ + ID: docID, } if res.Error != "" { - docKey.Err = fmt.Errorf(res.Error) + docIDResult.Err = fmt.Errorf(res.Error) } - docKeyCh <- docKey + docIDCh <- docIDResult } }() - return docKeyCh, nil + return docIDCh, nil } func (c *Collection) CreateIndex( diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index 52ea3148e5..c425c8de8f 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -71,10 +71,10 @@ func newRandomDocGenerator(types map[string]client.CollectionDefinition, config } type genDoc struct { - // the dockey of the document. Its cached value from doc.Key().String() just to avoid + // the docID of the document. Its cached value from doc.ID().String() just to avoid // calculating it multiple times. - docKey string - doc *client.Document + docID string + doc *client.Document } type randomDocGenerator struct { @@ -117,10 +117,10 @@ func (g *randomDocGenerator) getMaxTotalDemand() int { return totalDemand } -// getNextPrimaryDocKey returns the key of the next primary document to be used as a relation. -func (g *randomDocGenerator) getNextPrimaryDocKey(secondaryType string, field *client.FieldDescription) string { +// getNextPrimaryDocID returns the docID of the next primary document to be used as a relation. +func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.FieldDescription) string { ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) - return g.generatedDocs[field.Schema][ind].docKey + return g.generatedDocs[field.Schema][ind].docID } func (g *randomDocGenerator) generateRandomDocs(order []string) error { @@ -134,12 +134,12 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { for i := 0; i < totalDemand; i++ { newDoc := make(map[string]any) for _, field := range typeDef.Schema.Fields { - if field.Name == request.KeyFieldName { + if field.Name == request.DocIDFieldName { continue } if field.IsRelation() { if field.IsPrimaryRelation() { - newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocKey(typeName, &field) + newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocID(typeName, &field) } } else { fieldConf := g.configurator.config.ForField(typeName, field.Name) @@ -151,7 +151,7 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { return err } g.generatedDocs[typeName] = append(g.generatedDocs[typeName], - genDoc{docKey: doc.Key().String(), doc: doc}) + genDoc{docID: doc.ID().String(), doc: doc}) } } return nil diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index 55a15737ea..4049e7ba4d 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -83,7 +83,7 @@ func (c *typeUsageCounters) addRelationUsage( // getNextTypeIndForField returns the next index to be used for a foreign field. func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.FieldDescription) int { current := c.m[field.Schema][secondaryType][field.Name] - return current.useNextDocKey() + return current.useNextDocIDIndex() } type relationUsage struct { @@ -93,9 +93,9 @@ type relationUsage struct { minSecDocsPerPrimary int // maxSecDocsPerPrimary is the maximum number of primary documents that should be used for the relation. maxSecDocsPerPrimary int - // docKeysCounter is a slice of structs that keep track of the number of times + // docIDsCounter is a slice of structs that keep track of the number of times // each primary document has been used for the relation. - docKeysCounter []struct { + docIDsCounter []struct { // ind is the index of the primary document. ind int // count is the number of times the primary document has been used for the relation. @@ -116,27 +116,27 @@ func newRelationUsage(minSecDocPerPrim, maxSecDocPerPrim, numDocs int, random *r } } -// useNextDocKey determines the next primary document to be used for the relation, tracks +// useNextDocIDIndex determines the next primary document to be used for the relation, tracks // it and returns its index. -func (u *relationUsage) useNextDocKey() int { - docKeyCounterInd := 0 +func (u *relationUsage) useNextDocIDIndex() int { + docIDCounterInd := 0 // if a primary document has a minimum number of secondary documents that should be // generated for it, then it should be used until that minimum is reached. // After that, we can pick a random primary document to use. if u.counter >= u.minSecDocsPerPrimary*u.numAvailablePrimaryDocs { - docKeyCounterInd = u.random.Intn(len(u.docKeysCounter)) + docIDCounterInd = u.random.Intn(len(u.docIDsCounter)) } else { - docKeyCounterInd = u.counter % len(u.docKeysCounter) + docIDCounterInd = u.counter % len(u.docIDsCounter) } - currentInd := u.docKeysCounter[docKeyCounterInd].ind - docCounter := &u.docKeysCounter[docKeyCounterInd] + currentInd := u.docIDsCounter[docIDCounterInd].ind + docCounter := &u.docIDsCounter[docIDCounterInd] docCounter.count++ // if the primary document reached max number of secondary documents, we can remove it // from the slice of primary documents that are available for the relation. if docCounter.count >= u.maxSecDocsPerPrimary { - lastCounterInd := len(u.docKeysCounter) - 1 - *docCounter = u.docKeysCounter[lastCounterInd] - u.docKeysCounter = u.docKeysCounter[:lastCounterInd] + lastCounterInd := len(u.docIDsCounter) - 1 + *docCounter = u.docIDsCounter[lastCounterInd] + u.docIDsCounter = u.docIDsCounter[:lastCounterInd] } u.counter++ @@ -145,14 +145,14 @@ func (u *relationUsage) useNextDocKey() int { // allocateIndexes allocates the indexes for the relation usage tracker. func (u *relationUsage) allocateIndexes() { - docKeysCounter := make([]struct { + docIDsCounter := make([]struct { ind int count int }, u.numAvailablePrimaryDocs) - for i := range docKeysCounter { - docKeysCounter[i].ind = i + for i := range docIDsCounter { + docIDsCounter[i].ind = i } - u.docKeysCounter = docKeysCounter + u.docIDsCounter = docIDsCounter } func newDocGenConfigurator(types map[string]client.CollectionDefinition, config configsMap) docsGenConfigurator { diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 5e4c62a0a2..a9a8d81136 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -60,10 +60,10 @@ func getBooleanField(t *testing.T, doc *client.Document, fieldName string) bool return val } -func getDocKeysFromDocs(docs []*client.Document) []string { +func getDocIDsFromDocs(docs []*client.Document) []string { result := make([]string, 0, len(docs)) for _, doc := range docs { - result = append(result, doc.Key().String()) + result = append(result, doc.ID().String()) } return result } @@ -90,7 +90,7 @@ func removeDuplicateStr(strSlice []string) []string { return list } -func assertDocKeysMatch( +func assertDocIDsMatch( t *testing.T, docs []GeneratedDoc, primaryCol, secondaryCol string, @@ -100,7 +100,7 @@ func assertDocKeysMatch( primaryDocs := filterByCollection(docs, primaryCol) secondaryDocs := filterByCollection(docs, secondaryCol) - docKeys := getDocKeysFromDocs(primaryDocs) + docIDs := getDocIDsFromDocs(primaryDocs) foreignValues := make([]string, 0, len(secondaryDocs)) for _, secDoc := range secondaryDocs { foreignValues = append(foreignValues, getStringField(t, secDoc, foreignField)) @@ -111,7 +111,7 @@ func assertDocKeysMatch( foreignValues = newValues } - assert.ElementsMatch(t, docKeys, foreignValues) + assert.ElementsMatch(t, docIDs, foreignValues) } func assertUniformlyDistributedIntFieldRange(t *testing.T, docs []GeneratedDoc, fieldName string, minVal, maxVal int) { @@ -211,8 +211,8 @@ func assertUniformRelationDistribution( secondaryPerPrimary := make(map[string]int) for _, d := range secondaryCol { - docKey := getStringField(t, d, foreignField) - secondaryPerPrimary[docKey]++ + docID := getStringField(t, d, foreignField) + secondaryPerPrimary[docID]++ } minDocsPerPrimary := math.MaxInt maxDocsPerPrimary := math.MinInt @@ -347,7 +347,7 @@ func TestAutoGenerateFromSchema_RelationOneToOne(t *testing.T) { assert.Len(t, filterByCollection(docs, "User"), numUsers) assert.Len(t, filterByCollection(docs, "Device"), numUsers) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", false) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", false) } func TestAutoGenerateFromSchema_RelationOneToMany(t *testing.T) { @@ -369,7 +369,7 @@ func TestAutoGenerateFromSchema_RelationOneToMany(t *testing.T) { assert.Len(t, filterByCollection(docs, "User"), numUsers) assert.Len(t, filterByCollection(docs, "Device"), numUsers*2) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_RelationOneToManyWithConfiguredNumberOfElements(t *testing.T) { @@ -396,7 +396,7 @@ func TestAutoGenerateFromSchema_RelationOneToManyWithConfiguredNumberOfElements( assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", minDevicesPerUser, maxDevicesPerUser) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_RelationOneToManyToOneWithConfiguredNumberOfElements(t *testing.T) { @@ -430,8 +430,8 @@ func TestAutoGenerateFromSchema_RelationOneToManyToOneWithConfiguredNumberOfElem assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) - assertDocKeysMatch(t, docs, "Device", "Specs", "device_id", false) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "Device", "Specs", "device_id", false) } func TestAutoGenerateFromSchema_RelationOneToManyToOnePrimaryWithConfiguredNumberOfElements(t *testing.T) { @@ -465,8 +465,8 @@ func TestAutoGenerateFromSchema_RelationOneToManyToOnePrimaryWithConfiguredNumbe assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) - assertDocKeysMatch(t, docs, "Specs", "Device", "specs_id", false) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "Specs", "Device", "specs_id", false) } func TestAutoGenerateFromSchema_RelationOneToManyToManyWithNumDocsForSecondaryType(t *testing.T) { @@ -510,9 +510,9 @@ func TestAutoGenerateFromSchema_RelationOneToManyToManyWithNumDocsForSecondaryTy assertUniformRelationDistribution(t, docs, "Device", "Specs", "device_id", 1, 1) assertUniformRelationDistribution(t, docs, "Device", "Component", "device_id", componentsPerDevice, componentsPerDevice) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) - assertDocKeysMatch(t, docs, "Device", "Specs", "device_id", false) - assertDocKeysMatch(t, docs, "Device", "Component", "device_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "Device", "Specs", "device_id", false) + assertDocIDsMatch(t, docs, "Device", "Component", "device_id", true) } func TestAutoGenerateFromSchema_DemandsForDifferentRelationTrees(t *testing.T) { @@ -549,7 +549,7 @@ func TestAutoGenerateFromSchema_DemandsForDifferentRelationTrees(t *testing.T) { assertUniformRelationDistribution(t, docs, "Device", "Component", "device_id", componentsPerDevice, componentsPerDevice) - assertDocKeysMatch(t, docs, "Device", "Component", "device_id", true) + assertDocIDsMatch(t, docs, "Device", "Component", "device_id", true) } func TestAutoGenerateFromSchema_IfTypeDemandedForSameTreeAddsUp_ShouldGenerate(t *testing.T) { @@ -638,7 +638,7 @@ func TestAutoGenerateFromSchema_IfDemand2TypesWithOptions_ShouldAdjust(t *testin assert.Len(t, filterByCollection(docs, "User"), numUsers) assert.Len(t, filterByCollection(docs, "Device"), numDevices) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_IfDemand2TypesWithOptionsAndFieldDemand_ShouldAdjust(t *testing.T) { @@ -668,7 +668,7 @@ func TestAutoGenerateFromSchema_IfDemand2TypesWithOptionsAndFieldDemand_ShouldAd assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", 1, 5) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_IfDemand2TypesWithRangeOptions_ShouldAdjust(t *testing.T) { @@ -699,7 +699,7 @@ func TestAutoGenerateFromSchema_IfDemand2TypesWithRangeOptions_ShouldAdjust(t *t assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", 1, 5) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { diff --git a/tests/integration/backup/one_to_many/export_test.go b/tests/integration/backup/one_to_many/export_test.go index 328d48bd6d..3626535b9d 100644 --- a/tests/integration/backup/one_to_many/export_test.go +++ b/tests/integration/backup/one_to_many/export_test.go @@ -28,7 +28,7 @@ func TestBackupExport_JustUserCollection_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -57,7 +57,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -90,7 +90,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndMultipleDocUpdate_NoError(t * Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-4399f189-138d-5d49-9e25-82e78463677b","_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b","_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, }, }, } diff --git a/tests/integration/backup/one_to_many/import_test.go b/tests/integration/backup/one_to_many/import_test.go index f3c189365d..193867cc8d 100644 --- a/tests/integration/backup/one_to_many/import_test.go +++ b/tests/integration/backup/one_to_many/import_test.go @@ -84,28 +84,28 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr ImportContent: `{ "Book":[ { - "_key":"bae-4399f189-138d-5d49-9e25-82e78463677b", - "_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", + "_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b", + "_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"Game of chains" }, { - "_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", - "_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", + "_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", + "_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"John and the sourcerers' stone" } ], "User":[ { - "_key":"bae-0648f44e-74e8-593b-a662-3310ec278927", - "_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927", "age":31, "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "age":31, "name":"John" } @@ -137,7 +137,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr Book { name author { - _key + _docID } } }`, @@ -145,13 +145,13 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr { "name": "Game of chains", "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", }, }, { "name": "John and the sourcerers' stone", "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", }, }, }, diff --git a/tests/integration/backup/one_to_one/export_test.go b/tests/integration/backup/one_to_one/export_test.go index c5bb798643..4ae32cbebc 100644 --- a/tests/integration/backup/one_to_one/export_test.go +++ b/tests/integration/backup/one_to_one/export_test.go @@ -28,7 +28,7 @@ func TestBackupExport_JustUserCollection_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -57,7 +57,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -101,7 +101,7 @@ func TestBackupExport_DoubleReletionship_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_newKey":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_docIDNew":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -149,7 +149,7 @@ func TestBackupExport_DoubleReletionshipWithUpdate_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_newKey":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"},{"_key":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_newKey":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_docIDNew":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"},{"_docID":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_docIDNew":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}]}`, }, }, } diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index f827c81670..5405dd4225 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -84,22 +84,22 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr ImportContent: `{ "Book":[ { - "_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", - "_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", + "_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", + "_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"John and the sourcerers' stone" } ], "User":[ { - "_key":"bae-0648f44e-74e8-593b-a662-3310ec278927", - "_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927", "age":31, "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "age":31, "name":"John" } @@ -131,7 +131,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr Book { name author { - _key + _docID } } }`, @@ -139,7 +139,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr { "name": "John and the sourcerers' stone", "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", }, }, }, @@ -157,28 +157,28 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndMultipleUpdatedD ImportContent: `{ "Book":[ { - "_key":"bae-4399f189-138d-5d49-9e25-82e78463677b", - "_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", + "_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b", + "_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"Game of chains" }, { - "_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", - "_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", + "_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", + "_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"John and the sourcerers' stone" } ], "User":[ { - "_key":"bae-0648f44e-74e8-593b-a662-3310ec278927", - "_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927", "age":31, "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "age":31, "name":"John" } @@ -211,7 +211,7 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { `, }, testUtils.BackupImport{ - ImportContent: `{"Book":[{"_key":"bae-236c14bd-4621-5d43-bc03-4442f3b8719e","_newKey":"bae-6dbb3738-d3db-5121-acee-6fbdd97ff7a8","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"},{"_key":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_newKey":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}],"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, + ImportContent: `{"Book":[{"_docID":"bae-236c14bd-4621-5d43-bc03-4442f3b8719e","_docIDNew":"bae-6dbb3738-d3db-5121-acee-6fbdd97ff7a8","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"},{"_docID":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_docIDNew":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}],"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, }, testUtils.Request{ Request: ` diff --git a/tests/integration/backup/self_reference/export_test.go b/tests/integration/backup/self_reference/export_test.go index e0d0c606cf..9a0c73a8d1 100644 --- a/tests/integration/backup/self_reference/export_test.go +++ b/tests/integration/backup/self_reference/export_test.go @@ -32,7 +32,7 @@ func TestBackupExport_Simple_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_newKey":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","age":31,"boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519","name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_docIDNew":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","age":31,"boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519","name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -57,7 +57,7 @@ func TestBackupExport_MultipleDocsAndDocUpdate_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_newKey":"bae-067fd15e-32a1-5681-8f41-c423f563e21b","age":31,"boss_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_docIDNew":"bae-067fd15e-32a1-5681-8f41-c423f563e21b","age":31,"boss_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, }, }, } diff --git a/tests/integration/backup/self_reference/import_test.go b/tests/integration/backup/self_reference/import_test.go index 71c44361a1..0a68a66d85 100644 --- a/tests/integration/backup/self_reference/import_test.go +++ b/tests/integration/backup/self_reference/import_test.go @@ -25,13 +25,13 @@ func TestBackupSelfRefImport_Simple_NoError(t *testing.T) { ImportContent: `{ "User":[ { - "_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d", + "_docID":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d", "age":31, "boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519", "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", "age":30, "name":"John" } @@ -71,8 +71,8 @@ func TestBackupSelfRefImport_SelfRef_NoError(t *testing.T) { expectedExportData := `{` + `"User":[` + `{` + - `"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + - `"_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + + `"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + + `"_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + `"age":31,` + `"boss_id":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + `"name":"Bob"` + @@ -269,16 +269,16 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t expectedExportData := `{` + `"Author":[` + `{` + - `"_key":"bae-d760e445-22ef-5956-9947-26de226891f6",` + - `"_newKey":"bae-e3a6ff01-33ff-55f4-88f9-d13db26274c8",` + + `"_docID":"bae-d760e445-22ef-5956-9947-26de226891f6",` + + `"_docIDNew":"bae-e3a6ff01-33ff-55f4-88f9-d13db26274c8",` + `"book_id":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + `"name":"John"` + `}` + `],` + `"Book":[` + `{` + - `"_key":"bae-4059cb15-2b30-5049-b0df-64cc7ad9b5e4",` + - `"_newKey":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + + `"_docID":"bae-4059cb15-2b30-5049-b0df-64cc7ad9b5e4",` + + `"_docIDNew":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + `"name":"John and the sourcerers' stone",` + `"reviewedBy_id":"bae-e3a6ff01-33ff-55f4-88f9-d13db26274c8"` + `}` + diff --git a/tests/integration/backup/simple/export_test.go b/tests/integration/backup/simple/export_test.go index 7ee2e65bd5..d7397b13cb 100644 --- a/tests/integration/backup/simple/export_test.go +++ b/tests/integration/backup/simple/export_test.go @@ -25,7 +25,7 @@ func TestBackupExport_Simple_NoError(t *testing.T) { Doc: `{"name": "John", "age": 30}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -41,7 +41,7 @@ func TestBackupExport_Empty_NoError(t *testing.T) { Doc: `{}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-524bfa06-849c-5daf-b6df-05c2da80844d","_newKey":"bae-524bfa06-849c-5daf-b6df-05c2da80844d"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-524bfa06-849c-5daf-b6df-05c2da80844d","_docIDNew":"bae-524bfa06-849c-5daf-b6df-05c2da80844d"}]}`, }, }, } @@ -98,7 +98,7 @@ func TestBackupExport_JustUserCollection_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } diff --git a/tests/integration/backup/simple/import_test.go b/tests/integration/backup/simple/import_test.go index a53760fa3e..cdfadc6e61 100644 --- a/tests/integration/backup/simple/import_test.go +++ b/tests/integration/backup/simple/import_test.go @@ -20,7 +20,7 @@ func TestBackupImport_Simple_NoError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.BackupImport{ - ImportContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ImportContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, testUtils.Request{ Request: ` @@ -60,7 +60,7 @@ func TestBackupImport_WithInvalidCollection_ReturnError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.BackupImport{ - ImportContent: `{"Invalid":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ImportContent: `{"Invalid":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, ExpectedError: "failed to get collection: datastore: key not found. Name: Invalid", }, }, @@ -77,8 +77,8 @@ func TestBackupImport_WithDocAlreadyExists_ReturnError(t *testing.T) { Doc: `{"name": "John", "age": 30}`, }, testUtils.BackupImport{ - ImportContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, - ExpectedError: "a document with the given dockey already exists", + ImportContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedError: "a document with the given ID already exists", }, }, } diff --git a/tests/integration/collection/update/simple/with_key_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go similarity index 81% rename from tests/integration/collection/update/simple/with_key_test.go rename to tests/integration/collection/update/simple/with_doc_id_test.go index b3a269271f..228438b58b 100644 --- a/tests/integration/collection/update/simple/with_key_test.go +++ b/tests/integration/collection/update/simple/with_doc_id_test.go @@ -20,7 +20,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) -func TestUpdateWithKey(t *testing.T) { +func TestUpdateWithDocID(t *testing.T) { docStr := `{ "name": "John", "age": 21 @@ -33,7 +33,7 @@ func TestUpdateWithKey(t *testing.T) { tests := []testUtils.TestCase{ { - Description: "Test update users with key and invalid JSON", + Description: "Test update users with docID and invalid JSON", Docs: map[string][]string{ "Users": {docStr}, }, @@ -41,7 +41,7 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `{ + _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ name: "Eric" }`) return err @@ -50,7 +50,7 @@ func TestUpdateWithKey(t *testing.T) { }, ExpectedError: "cannot parse JSON: cannot parse object", }, { - Description: "Test update users with key and invalid updator", + Description: "Test update users with docID and invalid updator", Docs: map[string][]string{ "Users": {docStr}, }, @@ -58,14 +58,14 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `"name: Eric"`) + _, err := c.UpdateWithDocID(ctx, doc.ID(), `"name: Eric"`) return err }, }, }, ExpectedError: "the updater of a document is of invalid type", }, { - Description: "Test update users with key and patch updator (not implemented so no change)", + Description: "Test update users with docID and patch updator (not implemented so no change)", Docs: map[string][]string{ "Users": {docStr}, }, @@ -73,7 +73,7 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `[ + _, err := c.UpdateWithDocID(ctx, doc.ID(), `[ { "name": "Eric" }, { @@ -84,7 +84,7 @@ func TestUpdateWithKey(t *testing.T) { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } @@ -101,7 +101,7 @@ func TestUpdateWithKey(t *testing.T) { }, }, }, { - Description: "Test update users with key", + Description: "Test update users with docID", Docs: map[string][]string{ "Users": {docStr}, }, @@ -109,14 +109,14 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `{ + _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ "name": "Eric" }`) if err != nil { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_keys_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go similarity index 79% rename from tests/integration/collection/update/simple/with_keys_test.go rename to tests/integration/collection/update/simple/with_doc_ids_test.go index d36e140852..f32818db39 100644 --- a/tests/integration/collection/update/simple/with_keys_test.go +++ b/tests/integration/collection/update/simple/with_doc_ids_test.go @@ -20,7 +20,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) -func TestUpdateWithKeys(t *testing.T) { +func TestUpdateWithDocIDs(t *testing.T) { docStr1 := `{ "name": "John", "age": 21 @@ -43,7 +43,7 @@ func TestUpdateWithKeys(t *testing.T) { tests := []testUtils.TestCase{ { - Description: "Test update users with keys and invalid JSON", + Description: "Test update users with docIDs and invalid JSON", Docs: map[string][]string{ "Users": { docStr1, @@ -54,7 +54,7 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `{ + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ name: "Eric" }`) return err @@ -63,7 +63,7 @@ func TestUpdateWithKeys(t *testing.T) { }, ExpectedError: "cannot parse JSON: cannot parse object", }, { - Description: "Test update users with keys and invalid updator", + Description: "Test update users with docIDs and invalid updator", Docs: map[string][]string{ "Users": { docStr1, @@ -74,14 +74,14 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `"name: Eric"`) + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `"name: Eric"`) return err }, }, }, ExpectedError: "the updater of a document is of invalid type", }, { - Description: "Test update users with keys and patch updator (not implemented so no change)", + Description: "Test update users with docIDs and patch updator (not implemented so no change)", Docs: map[string][]string{ "Users": { docStr1, @@ -92,7 +92,7 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `[ + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `[ { "name": "Eric" }, { @@ -103,7 +103,7 @@ func TestUpdateWithKeys(t *testing.T) { return err } - d, err := c.Get(ctx, doc1.Key(), false) + d, err := c.Get(ctx, doc1.ID(), false) if err != nil { return err } @@ -115,7 +115,7 @@ func TestUpdateWithKeys(t *testing.T) { assert.Equal(t, "John", name) - d2, err := c.Get(ctx, doc2.Key(), false) + d2, err := c.Get(ctx, doc2.ID(), false) if err != nil { return err } @@ -132,7 +132,7 @@ func TestUpdateWithKeys(t *testing.T) { }, }, }, { - Description: "Test update users with keys", + Description: "Test update users with docIDs", Docs: map[string][]string{ "Users": { docStr1, @@ -143,14 +143,14 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `{ + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ "age": 40 }`) if err != nil { return err } - d, err := c.Get(ctx, doc1.Key(), false) + d, err := c.Get(ctx, doc1.ID(), false) if err != nil { return err } @@ -162,7 +162,7 @@ func TestUpdateWithKeys(t *testing.T) { assert.Equal(t, int64(40), name) - d2, err := c.Get(ctx, doc2.Key(), false) + d2, err := c.Get(ctx, doc2.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index 7d04c0ec04..de2d24f8e2 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -130,7 +130,7 @@ func TestUpdateWithFilter(t *testing.T) { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } @@ -162,7 +162,7 @@ func TestUpdateWithFilter(t *testing.T) { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index d93aafc31a..0c780c8fde 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -30,7 +30,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { ), ) assert.Nil(t, err) - docKey1 := doc1.Key().String() + docID1 := doc1.ID().String() doc2, err := client.NewDocFromJSON( []byte( @@ -40,7 +40,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { ), ) assert.Nil(t, err) - docKey2 := doc2.Key().String() + docID2 := doc2.ID().String() test := testUtils.TestCase{ CollectionCalls: map[string][]func(client.Collection){ @@ -57,10 +57,10 @@ func TestEventsSimpleWithCreate(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some(docKey1), + DocID: immutable.Some(docID1), }, { - DocKey: immutable.Some(docKey2), + DocID: immutable.Some(docID2), }, }, } diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index 40b9cef428..962a16e39a 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -29,7 +29,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { ctx, `mutation { create_Users(data: "{\"name\": \"John\"}") { - _key + _docID } }`, ) @@ -44,7 +44,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { ctx, `mutation { create_Users(data: "{\"name\": \"Shahzad\"}") { - _key + _docID } }`, ) @@ -56,7 +56,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some("bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"), + DocID: immutable.Some("bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"), }, // No event should be received for Shahzad, as the transaction was discarded. }, diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index f84e42f672..df811cd648 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -30,7 +30,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { ), ) assert.Nil(t, err) - docKey1 := doc1.Key().String() + docID1 := doc1.ID().String() test := testUtils.TestCase{ CollectionCalls: map[string][]func(client.Collection){ @@ -40,7 +40,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { assert.Nil(t, err) }, func(c client.Collection) { - wasDeleted, err := c.Delete(context.Background(), doc1.Key()) + wasDeleted, err := c.Delete(context.Background(), doc1.ID()) assert.Nil(t, err) assert.True(t, wasDeleted) }, @@ -48,10 +48,10 @@ func TestEventsSimpleWithDelete(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some(docKey1), + DocID: immutable.Some(docID1), }, { - DocKey: immutable.Some(docKey1), + DocID: immutable.Some(docID1), }, }, } diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index f496678e28..30b8cab9a4 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -30,7 +30,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ), ) assert.Nil(t, err) - docKey1 := doc1.Key().String() + docID1 := doc1.ID().String() doc2, err := client.NewDocFromJSON( []byte( @@ -40,7 +40,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ), ) assert.Nil(t, err) - docKey2 := doc2.Key().String() + docID2 := doc2.ID().String() test := testUtils.TestCase{ CollectionCalls: map[string][]func(client.Collection){ @@ -63,15 +63,15 @@ func TestEventsSimpleWithUpdate(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some(docKey1), - Cid: immutable.Some("bafybeifwfw3g4q6tagffdwq4orrouoosdlsc5rb67q2uj7oplkq7ax5ysm"), + DocID: immutable.Some(docID1), + Cid: immutable.Some("bafybeicbv34oa4hfcnqbka3jqnby4g75ttlj4wfvc7zhvat5xca45ggq2u"), }, { - DocKey: immutable.Some(docKey2), + DocID: immutable.Some(docID2), }, { - DocKey: immutable.Some(docKey1), - Cid: immutable.Some("bafybeihdhik6m5o7cxei7f7ie6lnnbwnjsn42ne6cxab6g7dgi7k2uiiu4"), + DocID: immutable.Some(docID1), + Cid: immutable.Some("bafybeiep6f7sls7z325oqd5oddigxq3fkxwpp5b7um47yz5erxfybjd6ra"), }, }, } diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index c461ed5cc3..30b65bc189 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -54,7 +54,7 @@ type TestCase struct { // are `None` the Update event will still be expected and will contribute // to the asserted count. type ExpectedUpdate struct { - DocKey immutable.Option[string] + DocID immutable.Option[string] // The expected Cid, as a string (results in much more readable errors) Cid immutable.Option[string] SchemaRoot immutable.Option[string] @@ -96,7 +96,7 @@ func ExecuteRequestTestCase( expectedEvent := testCase.ExpectedUpdates[indexOfNextExpectedUpdate] assertIfExpected(t, expectedEvent.Cid, update.Cid.String()) - assertIfExpected(t, expectedEvent.DocKey, update.DocKey) + assertIfExpected(t, expectedEvent.DocID, update.DocID) assertIfExpected(t, expectedEvent.Priority, update.Priority) assertIfExpected(t, expectedEvent.SchemaRoot, update.SchemaRoot) diff --git a/tests/integration/explain/debug/dagscan_test.go b/tests/integration/explain/debug/dagscan_test.go index 647d378907..010f866dd7 100644 --- a/tests/integration/explain/debug/dagscan_test.go +++ b/tests/integration/explain/debug/dagscan_test.go @@ -38,7 +38,7 @@ func TestDebugExplainCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { links { cid } @@ -56,7 +56,7 @@ func TestDebugExplainCommitsDagScanQueryOp(t *testing.T) { func TestDebugExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) commits query-op with only dockey (no field).", + Description: "Explain (debug) commits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -64,7 +64,7 @@ func TestDebugExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { links { cid } @@ -90,7 +90,7 @@ func TestDebugExplainLatestCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { cid links { cid @@ -109,7 +109,7 @@ func TestDebugExplainLatestCommitsDagScanQueryOp(t *testing.T) { func TestDebugExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) latestCommits query-op with only dockey (no field).", + Description: "Explain (debug) latestCommits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -117,7 +117,7 @@ func TestDebugExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { cid links { cid @@ -133,10 +133,10 @@ func TestDebugExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { +func TestDebugExplainLatestCommitsDagScanWithoutDocID_Failure(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) latestCommits query without DocKey.", + Description: "Explain (debug) latestCommits query without docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -152,7 +152,7 @@ func TestDebugExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } @@ -179,7 +179,7 @@ func TestDebugExplainLatestCommitsDagScanWithoutAnyArguments_Failure(t *testing. } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } diff --git a/tests/integration/explain/debug/delete_test.go b/tests/integration/explain/debug/delete_test.go index 083c6163c2..88159152e0 100644 --- a/tests/integration/explain/debug/delete_test.go +++ b/tests/integration/explain/debug/delete_test.go @@ -41,7 +41,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { Request: `mutation @explain(type: debug) { delete_Author(filter: {name: {_eq: "Shahzad"}}) { - _key + _docID } }`, @@ -65,7 +65,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t *te Request: `mutation @explain(type: debug) { delete_Author(filter: {}) { - DeletedKeyByFilter: _key + DeletedKeyByFilter: _docID } }`, @@ -80,7 +80,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t *te func TestDebugExplainMutationRequestWithDeleteUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) mutation request with delete using id.", + Description: "Explain (debug) mutation request with delete using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -88,8 +88,8 @@ func TestDebugExplainMutationRequestWithDeleteUsingId(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(id: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { - _key + delete_Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + _docID } }`, @@ -112,11 +112,11 @@ func TestDebugExplainMutationRequestWithDeleteUsingIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(ids: [ + delete_Author(docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ]) { - AliasKey: _key + AliasKey: _docID } }`, @@ -139,8 +139,8 @@ func TestDebugExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(ids: []) { - _key + delete_Author(docIDs: []) { + _docID } }`, @@ -164,7 +164,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) { Request: `mutation @explain(type: debug) { delete_Author( - ids: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], filter: { _and: [ {age: {_lt: 26}}, @@ -172,7 +172,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) { ] } ) { - _key + _docID } }`, diff --git a/tests/integration/explain/debug/delete_with_error_test.go b/tests/integration/explain/debug/delete_with_error_test.go index 208106e098..a7c5fc1d3a 100644 --- a/tests/integration/explain/debug/delete_with_error_test.go +++ b/tests/integration/explain/debug/delete_with_error_test.go @@ -29,7 +29,7 @@ func TestDebugExplainMutationRequestWithDeleteHavingNoSubSelection(t *testing.T) Request: `mutation @explain(type: debug) { delete_Author( - ids: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/debug/group_with_dockey_child_test.go b/tests/integration/explain/debug/group_with_doc_id_child_test.go similarity index 80% rename from tests/integration/explain/debug/group_with_dockey_child_test.go rename to tests/integration/explain/debug/group_with_doc_id_child_test.go index bc6555b961..43301f1fac 100644 --- a/tests/integration/explain/debug/group_with_dockey_child_test.go +++ b/tests/integration/explain/debug/group_with_doc_id_child_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { +func TestDebugExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys on inner _group.", + Description: "Explain (debug) request with docIDs on inner _group.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -32,7 +32,7 @@ func TestDebugExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { groupBy: [age] ) { age - _group(dockeys: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { + _group(docIDs: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { name } } diff --git a/tests/integration/explain/debug/group_with_dockey_test.go b/tests/integration/explain/debug/group_with_doc_id_test.go similarity index 78% rename from tests/integration/explain/debug/group_with_dockey_test.go rename to tests/integration/explain/debug/group_with_doc_id_test.go index fc53731c6a..ebbfbdb3c9 100644 --- a/tests/integration/explain/debug/group_with_dockey_test.go +++ b/tests/integration/explain/debug/group_with_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { +func TestDebugExplainRequestWithDocIDOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with a dockey on parent groupBy.", + Description: "Explain (debug) request with a document ID on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -30,7 +30,7 @@ func TestDebugExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { Request: `query @explain(type: debug) { Author( groupBy: [age], - dockey: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" + docID: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" ) { age _group { @@ -47,10 +47,10 @@ func TestDebugExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) { +func TestDebugExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys and filter on parent groupBy.", + Description: "Explain (debug) request with document IDs and filter on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -61,7 +61,7 @@ func TestDebugExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) { Author( groupBy: [age], filter: {age: {_eq: 20}}, - dockeys: [ + docIDs: [ "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254", "bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed" ] diff --git a/tests/integration/explain/debug/type_join_with_filter_and_key_test.go b/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go similarity index 90% rename from tests/integration/explain/debug/type_join_with_filter_and_key_test.go rename to tests/integration/explain/debug/type_join_with_filter_doc_id_test.go index 5219c5c874..5a8f2c5ba2 100644 --- a/tests/integration/explain/debug/type_join_with_filter_and_key_test.go +++ b/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { +func TestDebugExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with related and regular filter + keys.", + Description: "Explain (debug) request with related and regular filter + docIDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -33,7 +33,7 @@ func TestDebugExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { name: {_eq: "John Grisham"}, books: {name: {_eq: "Painted House"}} }, - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e" ] @@ -63,10 +63,10 @@ func TestDebugExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { +func TestDebugExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with many related filters + key.", + Description: "Explain (debug) request with many related filters + docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -80,7 +80,7 @@ func TestDebugExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { articles: {name: {_eq: "To my dear readers"}}, books: {name: {_eq: "Theif Lord"}} }, - dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] ) { name age diff --git a/tests/integration/explain/debug/update_test.go b/tests/integration/explain/debug/update_test.go index 8a479837d6..8c8ed82f0b 100644 --- a/tests/integration/explain/debug/update_test.go +++ b/tests/integration/explain/debug/update_test.go @@ -48,7 +48,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) { }, data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -74,13 +74,13 @@ func TestDebugExplainMutationRequestWithUpdateUsingIds(t *testing.T) { Request: `mutation @explain(type: debug) { update_Author( - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -97,7 +97,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingIds(t *testing.T) { func TestDebugExplainMutationRequestWithUpdateUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) mutation request with update using id.", + Description: "Explain (debug) mutation request with update using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -106,10 +106,10 @@ func TestDebugExplainMutationRequestWithUpdateUsingId(t *testing.T) { Request: `mutation @explain(type: debug) { update_Author( - id: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", + docID: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -140,13 +140,13 @@ func TestDebugExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) { _eq: true } }, - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } diff --git a/tests/integration/explain/debug/with_filter_key_test.go b/tests/integration/explain/debug/with_filter_doc_id_test.go similarity index 72% rename from tests/integration/explain/debug/with_filter_key_test.go rename to tests/integration/explain/debug/with_filter_doc_id_test.go index 5ca0939150..89bf3f35aa 100644 --- a/tests/integration/explain/debug/with_filter_key_test.go +++ b/tests/integration/explain/debug/with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithDocKeyFilter(t *testing.T) { +func TestDebugExplainRequestWithDocIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockey filter.", + Description: "Explain (debug) request with docID filter.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -28,7 +28,7 @@ func TestDebugExplainRequestWithDocKeyFilter(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - Author(dockey: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { name age } @@ -42,10 +42,10 @@ func TestDebugExplainRequestWithDocKeyFilter(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { +func TestDebugExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys filter using one key.", + Description: "Explain (debug) request with docIDs filter using one ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -53,7 +53,7 @@ func TestDebugExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - Author(dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { + Author(docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { name age } @@ -67,10 +67,10 @@ func TestDebugExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *testing.T) { +func TestDebugExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys filter using multiple but duplicate keys.", + Description: "Explain (debug) request with docIDs filter using multiple but duplicate IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -79,7 +79,7 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *te Request: `query @explain(type: debug) { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ] @@ -97,10 +97,10 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *te explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing.T) { +func TestDebugExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys filter using multiple unique keys.", + Description: "Explain (debug) request with docIDs filter using multiple unique IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -109,7 +109,7 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing. Request: `query @explain(type: debug) { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] @@ -127,10 +127,10 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing. explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithMatchingKeyFilter(t *testing.T) { +func TestDebugExplainRequestWithMatchingIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with a filter to match key.", + Description: "Explain (debug) request with a filter to match ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -140,7 +140,7 @@ func TestDebugExplainRequestWithMatchingKeyFilter(t *testing.T) { Request: `query @explain(type: debug) { Author( filter: { - _key: { + _docID: { _eq: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" } } diff --git a/tests/integration/explain/debug/with_sum_join_test.go b/tests/integration/explain/debug/with_sum_join_test.go index 66c68be8ed..d098eec2f1 100644 --- a/tests/integration/explain/debug/with_sum_join_test.go +++ b/tests/integration/explain/debug/with_sum_join_test.go @@ -44,7 +44,7 @@ func TestDebugExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) { Request: `query @explain(type: debug) { Author { name - _key + _docID TotalPages: _sum( books: {field: pages} ) diff --git a/tests/integration/explain/default/basic_test.go b/tests/integration/explain/default/basic_test.go index 2d7f515d9e..30a5810de6 100644 --- a/tests/integration/explain/default/basic_test.go +++ b/tests/integration/explain/default/basic_test.go @@ -64,7 +64,7 @@ func TestDefaultExplainRequestWithFullBasicGraph(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/dagscan_test.go b/tests/integration/explain/default/dagscan_test.go index a83402bb67..c18f365f82 100644 --- a/tests/integration/explain/default/dagscan_test.go +++ b/tests/integration/explain/default/dagscan_test.go @@ -38,7 +38,7 @@ func TestDefaultExplainCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { links { cid } @@ -73,7 +73,7 @@ func TestDefaultExplainCommitsDagScanQueryOp(t *testing.T) { func TestDefaultExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) commits query-op with only dockey (no field).", + Description: "Explain (default) commits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -81,7 +81,7 @@ func TestDefaultExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { links { cid } @@ -124,7 +124,7 @@ func TestDefaultExplainLatestCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { cid links { cid @@ -160,7 +160,7 @@ func TestDefaultExplainLatestCommitsDagScanQueryOp(t *testing.T) { func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) latestCommits query-op with only dockey (no field).", + Description: "Explain (default) latestCommits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -168,7 +168,7 @@ func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { cid links { cid @@ -201,10 +201,10 @@ func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { +func TestDefaultExplainLatestCommitsDagScanWithoutDocID_Failure(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) latestCommits query without DocKey.", + Description: "Explain (default) latestCommits query without docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -220,7 +220,7 @@ func TestDefaultExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } @@ -247,7 +247,7 @@ func TestDefaultExplainLatestCommitsDagScanWithoutAnyArguments_Failure(t *testin } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } diff --git a/tests/integration/explain/default/delete_test.go b/tests/integration/explain/default/delete_test.go index 71f454b6e7..660785f6a4 100644 --- a/tests/integration/explain/default/delete_test.go +++ b/tests/integration/explain/default/delete_test.go @@ -41,7 +41,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { Request: `mutation @explain { delete_Author(filter: {name: {_eq: "Shahzad"}}) { - _key + _docID } }`, @@ -57,7 +57,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { "_eq": "Shahzad", }, }, - "ids": []string(nil), + "docIDs": []string(nil), }, }, @@ -100,7 +100,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * Request: `mutation @explain { delete_Author(filter: {}) { - DeletedKeyByFilter: _key + DeletedKeyByFilter: _docID } }`, @@ -112,7 +112,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string(nil), + "docIDs": []string(nil), }, }, @@ -142,7 +142,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) mutation request with delete using id.", + Description: "Explain (default) mutation request with delete using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -150,8 +150,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(id: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { - _key + delete_Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + _docID } }`, @@ -163,7 +163,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, }, @@ -203,11 +203,11 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(ids: [ + delete_Author(docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ]) { - AliasKey: _key + AliasKey: _docID } }`, @@ -219,7 +219,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, @@ -264,8 +264,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(ids: []) { - _key + delete_Author(docIDs: []) { + _docID } }`, @@ -277,7 +277,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string{}, + "docIDs": []string{}, }, }, @@ -311,7 +311,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) Request: `mutation @explain { delete_Author( - ids: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], filter: { _and: [ {age: {_lt: 26}}, @@ -319,7 +319,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) ] } ) { - _key + _docID } }`, @@ -344,7 +344,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) }, }, }, - "ids": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test", }, diff --git a/tests/integration/explain/default/delete_with_error_test.go b/tests/integration/explain/default/delete_with_error_test.go index cbe11c2591..14ef207451 100644 --- a/tests/integration/explain/default/delete_with_error_test.go +++ b/tests/integration/explain/default/delete_with_error_test.go @@ -29,7 +29,7 @@ func TestDefaultExplainMutationRequestWithDeleteHavingNoSubSelection(t *testing. Request: `mutation @explain { delete_Author( - ids: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/default/fixture.go b/tests/integration/explain/default/fixture.go index 306e3e133f..399a59ac20 100644 --- a/tests/integration/explain/default/fixture.go +++ b/tests/integration/explain/default/fixture.go @@ -24,7 +24,7 @@ var basicPattern = dataMap{ var emptyChildSelectsAttributeForAuthor = dataMap{ "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "limit": nil, diff --git a/tests/integration/explain/default/group_with_average_test.go b/tests/integration/explain/default/group_with_average_test.go index 32f935785d..4346ec6a9a 100644 --- a/tests/integration/explain/default/group_with_average_test.go +++ b/tests/integration/explain/default/group_with_average_test.go @@ -63,7 +63,7 @@ func TestDefaultExplainRequestWithGroupByWithAverageOnAnInnerField(t *testing.T) "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "orderBy": nil, @@ -155,7 +155,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupOnAField(t *testing. { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "orderBy": nil, "filter": nil, @@ -237,7 +237,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupOnAFieldAndNestedGro { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "orderBy": nil, "filter": nil, @@ -320,7 +320,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupAndNestedGroupByWith { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "orderBy": nil, "filter": nil, diff --git a/tests/integration/explain/default/group_with_dockey_child_test.go b/tests/integration/explain/default/group_with_doc_id_child_test.go similarity index 86% rename from tests/integration/explain/default/group_with_dockey_child_test.go rename to tests/integration/explain/default/group_with_doc_id_child_test.go index 35726cda32..6ce3b8c041 100644 --- a/tests/integration/explain/default/group_with_dockey_child_test.go +++ b/tests/integration/explain/default/group_with_doc_id_child_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys on inner _group.", + Description: "Explain (default) request with docIDs on inner _group.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -32,7 +32,7 @@ func TestDefaultExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { groupBy: [age] ) { age - _group(dockeys: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { + _group(docIDs: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { name } } @@ -49,7 +49,7 @@ func TestDefaultExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": []string{"bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"}, + "docIDs": []string{"bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"}, "filter": nil, "groupBy": nil, "limit": nil, diff --git a/tests/integration/explain/default/group_with_dockey_test.go b/tests/integration/explain/default/group_with_doc_id_test.go similarity index 89% rename from tests/integration/explain/default/group_with_dockey_test.go rename to tests/integration/explain/default/group_with_doc_id_test.go index 31555bc94a..d458ad9015 100644 --- a/tests/integration/explain/default/group_with_dockey_test.go +++ b/tests/integration/explain/default/group_with_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { +func TestDefaultExplainRequestWithDocIDOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with a dockey on parent groupBy.", + Description: "Explain (default) request with a docID on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -30,7 +30,7 @@ func TestDefaultExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { Request: `query @explain { Author( groupBy: [age], - dockey: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" + docID: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" ) { age _group { @@ -75,10 +75,10 @@ func TestDefaultExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys and filter on parent groupBy.", + Description: "Explain (default) request with docIDs and filter on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -89,7 +89,7 @@ func TestDefaultExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) Author( groupBy: [age], filter: {age: {_eq: 20}}, - dockeys: [ + docIDs: [ "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254", "bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed" ] diff --git a/tests/integration/explain/default/group_with_filter_child_test.go b/tests/integration/explain/default/group_with_filter_child_test.go index bcb53e0e86..a8522962eb 100644 --- a/tests/integration/explain/default/group_with_filter_child_test.go +++ b/tests/integration/explain/default/group_with_filter_child_test.go @@ -47,7 +47,7 @@ func TestDefaultExplainRequestWithFilterOnInnerGroupSelection(t *testing.T) { "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "filter": dataMap{ "age": dataMap{ "_gt": int32(63), @@ -116,7 +116,7 @@ func TestDefaultExplainRequestWithFilterOnParentGroupByAndInnerGroupSelection(t "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "filter": dataMap{ "age": dataMap{ "_gt": int32(63), diff --git a/tests/integration/explain/default/group_with_limit_child_test.go b/tests/integration/explain/default/group_with_limit_child_test.go index 13d4730638..fb6dc83f77 100644 --- a/tests/integration/explain/default/group_with_limit_child_test.go +++ b/tests/integration/explain/default/group_with_limit_child_test.go @@ -51,7 +51,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnInnerGroupSelection(t *testing "limit": uint64(2), "offset": uint64(1), }, - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "orderBy": nil, @@ -104,7 +104,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnMultipleInnerGroupSelections(t "limit": uint64(1), "offset": uint64(2), }, - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "orderBy": nil, @@ -115,7 +115,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnMultipleInnerGroupSelections(t "limit": uint64(2), "offset": uint64(0), }, - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "orderBy": nil, diff --git a/tests/integration/explain/default/group_with_limit_test.go b/tests/integration/explain/default/group_with_limit_test.go index 967cda469c..b88496c7dd 100644 --- a/tests/integration/explain/default/group_with_limit_test.go +++ b/tests/integration/explain/default/group_with_limit_test.go @@ -121,7 +121,7 @@ func TestDefaultExplainRequestWithLimitOnParentGroupByAndInnerGroupSelection(t * "offset": uint64(0), }, "orderBy": nil, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "filter": nil, }, diff --git a/tests/integration/explain/default/group_with_order_child_test.go b/tests/integration/explain/default/group_with_order_child_test.go index 55d14ef469..e8ba14d697 100644 --- a/tests/integration/explain/default/group_with_order_child_test.go +++ b/tests/integration/explain/default/group_with_order_child_test.go @@ -53,7 +53,7 @@ func TestDefaultExplainRequestWithDescendingOrderOnInnerGroupSelection(t *testin "fields": []string{"age"}, }, }, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "filter": nil, @@ -105,7 +105,7 @@ func TestDefaultExplainRequestWithAscendingOrderOnInnerGroupSelection(t *testing "fields": []string{"age"}, }, }, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "filter": nil, @@ -164,7 +164,7 @@ func TestDefaultExplainRequestWithOrderOnNestedParentGroupByAndOnNestedParentsIn }, }, "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "filter": nil, }, diff --git a/tests/integration/explain/default/group_with_order_test.go b/tests/integration/explain/default/group_with_order_test.go index 7de88087df..43e6b7ba05 100644 --- a/tests/integration/explain/default/group_with_order_test.go +++ b/tests/integration/explain/default/group_with_order_test.go @@ -180,7 +180,7 @@ func TestDefaultExplainRequestWithOrderOnParentGroupByAndOnInnerGroupSelection(t "fields": []string{"age"}, }, }, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "filter": nil, diff --git a/tests/integration/explain/default/invalid_type_arg_test.go b/tests/integration/explain/default/invalid_type_arg_test.go index 391d56492a..2759eb8bd0 100644 --- a/tests/integration/explain/default/invalid_type_arg_test.go +++ b/tests/integration/explain/default/invalid_type_arg_test.go @@ -28,7 +28,7 @@ func TestInvalidExplainRequestTypeReturnsError(t *testing.T) { Request: `query @explain(type: invalid) { Author { - _key + _docID name age } diff --git a/tests/integration/explain/default/type_join_many_test.go b/tests/integration/explain/default/type_join_many_test.go index 9fa66bf76a..3b700b132b 100644 --- a/tests/integration/explain/default/type_join_many_test.go +++ b/tests/integration/explain/default/type_join_many_test.go @@ -84,7 +84,7 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_one_test.go b/tests/integration/explain/default/type_join_one_test.go index 472a6f2164..8a7fac0925 100644 --- a/tests/integration/explain/default/type_join_one_test.go +++ b/tests/integration/explain/default/type_join_one_test.go @@ -85,7 +85,7 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, @@ -225,7 +225,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_test.go b/tests/integration/explain/default/type_join_test.go index c3ca250565..fd1676aed9 100644 --- a/tests/integration/explain/default/type_join_test.go +++ b/tests/integration/explain/default/type_join_test.go @@ -119,7 +119,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, @@ -177,7 +177,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_with_filter_and_key_test.go b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go similarity index 92% rename from tests/integration/explain/default/type_join_with_filter_and_key_test.go rename to tests/integration/explain/default/type_join_with_filter_doc_id_test.go index 2290de03e6..7b320b01b7 100644 --- a/tests/integration/explain/default/type_join_with_filter_and_key_test.go +++ b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { +func TestDefaultExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with related and regular filter + keys.", + Description: "Explain (default) request with related and regular filter + docIDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -33,7 +33,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { name: {_eq: "John Grisham"}, books: {name: {_eq: "Painted House"}} }, - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e" ] @@ -59,7 +59,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e", }, @@ -103,10 +103,10 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { +func TestDefaultExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with many related filters + key.", + Description: "Explain (default) request with many related filters + docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -120,7 +120,7 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { articles: {name: {_eq: "To my dear readers"}}, books: {name: {_eq: "Theif Lord"}} }, - dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] ) { name age @@ -150,7 +150,7 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": dataMap{ diff --git a/tests/integration/explain/default/type_join_with_filter_test.go b/tests/integration/explain/default/type_join_with_filter_test.go index 799ad2677d..78ed484b0c 100644 --- a/tests/integration/explain/default/type_join_with_filter_test.go +++ b/tests/integration/explain/default/type_join_with_filter_test.go @@ -55,7 +55,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": nil, + "docIDs": nil, "filter": dataMap{ "books": dataMap{ "name": dataMap{ @@ -138,7 +138,7 @@ func TestDefaultExplainRequestWithManyRelatedFilters(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": nil, + "docIDs": nil, "filter": dataMap{ "articles": dataMap{ "name": dataMap{ diff --git a/tests/integration/explain/default/update_test.go b/tests/integration/explain/default/update_test.go index 0b5ee28920..cd2af141c3 100644 --- a/tests/integration/explain/default/update_test.go +++ b/tests/integration/explain/default/update_test.go @@ -48,7 +48,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) }, data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -69,7 +69,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) "_eq": true, }, }, - "ids": []string(nil), + "docIDs": []string(nil), }, }, { @@ -111,13 +111,13 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { Request: `mutation @explain { update_Author( - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -134,7 +134,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { "age": float64(59), }, "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, @@ -170,7 +170,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) mutation request with update using id.", + Description: "Explain (default) mutation request with update using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -179,10 +179,10 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { Request: `mutation @explain { update_Author( - id: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", + docID: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -199,7 +199,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { "age": float64(59), }, "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, }, @@ -244,13 +244,13 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) _eq: true } }, - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -271,7 +271,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) "_eq": true, }, }, - "ids": []string{ + "docIDs": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, diff --git a/tests/integration/explain/default/with_filter_key_test.go b/tests/integration/explain/default/with_filter_doc_id_test.go similarity index 85% rename from tests/integration/explain/default/with_filter_key_test.go rename to tests/integration/explain/default/with_filter_doc_id_test.go index 7f181a07f5..a5807d1da7 100644 --- a/tests/integration/explain/default/with_filter_key_test.go +++ b/tests/integration/explain/default/with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { +func TestDefaultExplainRequestWithDocIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockey filter.", + Description: "Explain (default) request with docID filter.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -28,7 +28,7 @@ func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - Author(dockey: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { name age } @@ -40,7 +40,7 @@ func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": nil, @@ -69,10 +69,10 @@ func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys filter using one key.", + Description: "Explain (default) request with docIDs filter using one ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -80,7 +80,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - Author(dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { + Author(docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { name age } @@ -92,7 +92,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": nil, @@ -121,10 +121,10 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys filter using multiple but duplicate keys.", + Description: "Explain (default) request with docIDs filter using multiple but duplicate IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -133,7 +133,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t * Request: `query @explain { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ] @@ -149,7 +149,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t * { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, @@ -183,10 +183,10 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t * explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys filter using multiple unique keys.", + Description: "Explain (default) request with docIDs filter using multiple unique IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -195,7 +195,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testin Request: `query @explain { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] @@ -211,7 +211,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testin { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, @@ -245,10 +245,10 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testin explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { +func TestDefaultExplainRequestWithMatchingIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with a filter to match key.", + Description: "Explain (default) request with a filter to match ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -258,7 +258,7 @@ func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { Request: `query @explain { Author( filter: { - _key: { + _docID: { _eq: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" } } @@ -274,7 +274,7 @@ func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, }, }, @@ -285,7 +285,7 @@ func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { "collectionID": "3", "collectionName": "Author", "filter": dataMap{ - "_key": dataMap{ + "_docID": dataMap{ "_eq": "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, }, diff --git a/tests/integration/explain/default/with_sum_join_test.go b/tests/integration/explain/default/with_sum_join_test.go index 74d330fefd..5117031959 100644 --- a/tests/integration/explain/default/with_sum_join_test.go +++ b/tests/integration/explain/default/with_sum_join_test.go @@ -42,7 +42,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) { Request: `query @explain { Author { name - _key + _docID TotalPages: _sum( books: {field: pages} ) diff --git a/tests/integration/explain/execute/dagscan_test.go b/tests/integration/explain/execute/dagscan_test.go index 9b91ff5003..3edc6e71f0 100644 --- a/tests/integration/explain/execute/dagscan_test.go +++ b/tests/integration/explain/execute/dagscan_test.go @@ -30,7 +30,7 @@ func TestExecuteExplainCommitsDagScan(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - commits (dockey: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { + commits (docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { links { cid } @@ -75,7 +75,7 @@ func TestExecuteExplainLatestCommitsDagScan(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - latestCommits(dockey: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { + latestCommits(docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { cid links { cid diff --git a/tests/integration/explain/execute/delete_test.go b/tests/integration/explain/execute/delete_test.go index e924ce334c..75aa515c1a 100644 --- a/tests/integration/explain/execute/delete_test.go +++ b/tests/integration/explain/execute/delete_test.go @@ -20,7 +20,7 @@ import ( func TestExecuteExplainMutationRequestWithDeleteUsingID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (execute) mutation request with deletion using id.", + Description: "Explain (execute) mutation request with deletion using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -30,7 +30,7 @@ func TestExecuteExplainMutationRequestWithDeleteUsingID(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { - delete_ContactAddress(ids: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { + delete_ContactAddress(docIDs: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { city } }`, diff --git a/tests/integration/explain/execute/fixture.go b/tests/integration/explain/execute/fixture.go index ec83514778..7de5e6a959 100644 --- a/tests/integration/explain/execute/fixture.go +++ b/tests/integration/explain/execute/fixture.go @@ -81,7 +81,7 @@ func create2AuthorDocuments() []testUtils.CreateDoc { return []testUtils.CreateDoc{ { CollectionID: 2, - // _key: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" + // _docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" Doc: `{ "name": "John Grisham", "age": 65, @@ -91,7 +91,7 @@ func create2AuthorDocuments() []testUtils.CreateDoc { }, { CollectionID: 2, - // _key: "bae-68cb395d-df73-5bcb-b623-615a140dee12" + // _docID: "bae-68cb395d-df73-5bcb-b623-615a140dee12" Doc: `{ "name": "Cornelia Funke", "age": 62, @@ -107,7 +107,7 @@ func create2AuthorContactDocuments() []testUtils.CreateDoc { { CollectionID: 3, // "author_id": "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" - // _key: "bae-4db5359b-7dbe-5778-b96f-d71d1e6d0871" + // _docID: "bae-4db5359b-7dbe-5778-b96f-d71d1e6d0871" Doc: `{ "cell": "5197212301", "email": "john_grisham@example.com", @@ -117,7 +117,7 @@ func create2AuthorContactDocuments() []testUtils.CreateDoc { { CollectionID: 3, // "author_id": "bae-68cb395d-df73-5bcb-b623-615a140dee12", - // _key: "bae-1f19fc5d-de4d-59a5-bbde-492be1757d65" + // _docID: "bae-1f19fc5d-de4d-59a5-bbde-492be1757d65" Doc: `{ "cell": "5197212302", "email": "cornelia_funke@example.com", @@ -132,7 +132,7 @@ func create2AddressDocuments() []testUtils.CreateDoc { { CollectionID: 4, // "contact_id": "bae-4db5359b-7dbe-5778-b96f-d71d1e6d0871" - // _key: bae-c8448e47-6cd1-571f-90bd-364acb80da7b + // _docID: bae-c8448e47-6cd1-571f-90bd-364acb80da7b Doc: `{ "city": "Waterloo", "country": "Canada" @@ -141,7 +141,7 @@ func create2AddressDocuments() []testUtils.CreateDoc { { CollectionID: 4, // "contact_id": ""bae-1f19fc5d-de4d-59a5-bbde-492be1757d65"" - // _key: bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692 + // _docID: bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692 Doc: `{ "city": "Brampton", "country": "Canada" diff --git a/tests/integration/explain/execute/query_deleted_docs_test.go b/tests/integration/explain/execute/query_deleted_docs_test.go index cb1ebbcaa7..7872eb4847 100644 --- a/tests/integration/explain/execute/query_deleted_docs_test.go +++ b/tests/integration/explain/execute/query_deleted_docs_test.go @@ -26,12 +26,12 @@ func TestExecuteExplainQueryDeletedDocs(t *testing.T) { create2AddressDocuments(), testUtils.Request{ Request: `mutation { - delete_ContactAddress(ids: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { - _key + delete_ContactAddress(docIDs: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { + _docID } }`, Results: []map[string]any{ - {"_key": "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"}, + {"_docID": "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"}, }, }, testUtils.ExplainRequest{ diff --git a/tests/integration/explain/execute/update_test.go b/tests/integration/explain/execute/update_test.go index a1fa92b091..fa54f7f331 100644 --- a/tests/integration/explain/execute/update_test.go +++ b/tests/integration/explain/execute/update_test.go @@ -20,7 +20,7 @@ import ( func TestExecuteExplainMutationRequestWithUpdateUsingIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (execute) mutation request with update using ids.", + Description: "Explain (execute) mutation request with update using document IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -31,7 +31,7 @@ func TestExecuteExplainMutationRequestWithUpdateUsingIDs(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { update_ContactAddress( - ids: [ + docIDs: [ "bae-c8448e47-6cd1-571f-90bd-364acb80da7b", "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692" ], diff --git a/tests/integration/explain/simple/basic_test.go b/tests/integration/explain/simple/basic_test.go index 9920458952..d94deb01a9 100644 --- a/tests/integration/explain/simple/basic_test.go +++ b/tests/integration/explain/simple/basic_test.go @@ -29,7 +29,7 @@ func TestSimpleExplainRequest(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: simple) { Author { - _key + _docID name age } @@ -40,7 +40,7 @@ func TestSimpleExplainRequest(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index 0cea5023e6..69731e7b46 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -18,7 +18,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -const johnDockey = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" +const johnDocID = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { test := testUtils.TestCase{ @@ -60,7 +60,7 @@ func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { CollectionID: 0, FieldName: "age", Unique: true, - ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDocID, "age", 21).Error(), }, testUtils.GetIndexes{ CollectionID: 0, @@ -99,7 +99,7 @@ func TestUniqueIndexCreate_UponAddingDocWithExistingFieldValue_ReturnError(t *te "name": "John", "age": 21 }`, - ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDocID, "age", 21).Error(), }, testUtils.Request{ Request: `query { diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go index 6fadbc5d85..3b37756b6c 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go @@ -108,7 +108,7 @@ func TestMutationCreateOneToMany_AliasedRelationNameInvalidIDManySide_CreatedDoc } func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to many create mutation using relation id from many side, with alias.", @@ -126,7 +126,7 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin "name": "Painted House", "author": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ @@ -174,9 +174,9 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin } func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDocID(t *testing.T) { - // These keys MUST be shared by both tests below. - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + // These IDs MUST be shared by both tests below. + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" nonAliasedTest := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", @@ -194,18 +194,18 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo "name": "Painted House", "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ Request: `query { Book { - _key + _docID } }`, Results: []map[string]any{ { - "_key": bookKey, // Must be same as below. + "_docID": bookID, // Must be same as below. }, }, }, @@ -213,7 +213,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo } executeTestCase(t, nonAliasedTest) - // Check that `bookKey` is same in both above and the alised version below. + // Check that `bookID` is same in both above and the alised version below. // Note: Everything should be same, only diff should be the use of alias. aliasedTest := testUtils.TestCase{ @@ -232,18 +232,18 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo "name": "Painted House", "author": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ Request: `query { Book { - _key + _docID } }`, Results: []map[string]any{ { - "_key": bookKey, // Must be same as above. + "_docID": bookID, // Must be same as below. }, }, }, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index 1e491e3626..da8bd1b7b0 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -74,7 +74,7 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Err "name": "Painted House", "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Err } func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySide(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one create mutation with an alias relation.", @@ -100,7 +100,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid "name": "John Grisham", "published": "%s" }`, - bookKey, + bookID, ), }, testUtils.Request{ @@ -146,7 +146,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid } func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondarySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation from secondary side with alias relation.", @@ -164,7 +164,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondaryS "name": "Painted House", "author": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index d22a77de6c..cf985bfa18 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -74,7 +74,7 @@ func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testin "name": "Painted House", "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testin } func TestMutationCreateOneToOne(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one create mutation", @@ -100,7 +100,7 @@ func TestMutationCreateOneToOne(t *testing.T) { "name": "John Grisham", "published_id": "%s" }`, - bookKey, + bookID, ), }, testUtils.Request{ @@ -148,7 +148,7 @@ func TestMutationCreateOneToOne(t *testing.T) { } func TestMutationCreateOneToOneSecondarySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation from secondary side", @@ -166,7 +166,7 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { "name": "Painted House", "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ @@ -214,7 +214,7 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { } func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one create mutation, errors due to link already existing, primary side", @@ -231,7 +231,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( "name": "John Grisham", "published_id": "%s" }`, - bookKey, + bookID, ), }, testUtils.CreateDoc{ @@ -240,7 +240,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( "name": "Saadi Shirazi", "published_id": "%s" }`, - bookKey, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -251,7 +251,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( } func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondary(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation, errors due to link already existing, secondary side", @@ -268,7 +268,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondar "name": "Painted House", "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.CreateDoc{ @@ -277,7 +277,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondar "name": "Golestan", "author_id": "%s" }`, - authorKey, + authorID, ), ExpectedError: "target document is already linked to another document.", }, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go index 0cc3807ddc..946c081929 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go @@ -24,7 +24,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. Actions: []any{ testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -32,7 +32,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }, testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -43,12 +43,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(0), Request: `mutation { create_Book(data: "{\"name\": \"Book By Website\",\"rating\": 4.0, \"publisher_id\": \"bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -56,12 +56,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(1), Request: `mutation { create_Book(data: "{\"name\": \"Book By Online\",\"rating\": 4.0, \"publisher_id\": \"bae-8a381044-9206-51e7-8bc8-dc683d5f2523\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", }, }, }, @@ -70,26 +70,26 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(0), Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", "published": map[string]any{ - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", }, }, { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", "name": "Online", "published": nil, }, @@ -100,27 +100,27 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(1), Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": nil, }, { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", "published": map[string]any{ - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", }, }, }, @@ -136,30 +136,30 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. // Assert books -> publisher direction outside the transactions. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", "publisher": map[string]any{ - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", }, }, { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", "publisher": map[string]any{ - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", }, }, }, @@ -176,7 +176,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing Actions: []any{ testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -184,7 +184,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }, testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -195,12 +195,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(0), Request: `mutation { create_Book(data: "{\"name\": \"Book By Website\",\"rating\": 4.0, \"publisher_id\": \"bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -208,12 +208,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(1), Request: `mutation { create_Book(data: "{\"name\": \"Book By Online\",\"rating\": 4.0, \"publisher_id\": \"bae-8a381044-9206-51e7-8bc8-dc683d5f2523\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", }, }, }, @@ -222,21 +222,21 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(0), Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", "publisher": map[string]any{ - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", }, }, }, @@ -246,21 +246,21 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(1), Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", "publisher": map[string]any{ - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", }, }, }, @@ -276,30 +276,30 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing // Assert publishers -> books direction outside the transactions. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", "published": map[string]any{ - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", }, }, { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", "published": map[string]any{ - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", }, }, }, diff --git a/tests/integration/mutation/create/simple_test.go b/tests/integration/mutation/create/simple_test.go index 54f3de9536..cedac8c58e 100644 --- a/tests/integration/mutation/create/simple_test.go +++ b/tests/integration/mutation/create/simple_test.go @@ -75,7 +75,7 @@ func TestMutationCreate(t *testing.T) { Request: ` query { Users { - _key + _docID name age } @@ -83,9 +83,9 @@ func TestMutationCreate(t *testing.T) { `, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(27), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(27), }, }, }, @@ -124,7 +124,7 @@ func TestMutationCreate_GivenDuplicate_Errors(t *testing.T) { "name": "John", "age": 27 }`, - ExpectedError: "a document with the given dockey already exists.", + ExpectedError: "a document with the given ID already exists", }, }, } @@ -146,7 +146,7 @@ func TestMutationCreate_GivenEmptyData_Errors(t *testing.T) { testUtils.Request{ Request: `mutation { create_Users(data: "") { - _key + _docID } }`, ExpectedError: "given data payload is empty", diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index 7cf879737e..1d98ead005 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -39,7 +39,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafybeifwfw3g4q6tagffdwq4orrouoosdlsc5rb67q2uj7oplkq7ax5ysm", + "cid": "bafybeicbv34oa4hfcnqbka3jqnby4g75ttlj4wfvc7zhvat5xca45ggq2u", }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index a30cf60050..4d75d3b916 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -20,7 +20,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testing.T) { +func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *testing.T) { jsonString1 := `{ "name": "John", "age": 30 @@ -32,7 +32,7 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin "name": "John and the philosopher are stoned", "rating": 9.9, "author_id": "%s" - }`, doc1.Key()) + }`, doc1.ID()) doc2, err := client.NewDocFromJSON([]byte(jsonString2)) require.NoError(t, err) @@ -40,12 +40,12 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin "name": "John has a chamber of secrets", "rating": 9.9, "author_id": "%s" - }`, doc1.Key()) + }`, doc1.ID()) // doc3, err := client.NewDocFromJSON([]byte(jsonString1)) // require.NoError(t, err) test := testUtils.TestCase{ - Description: "One to many delete document using single key show deleted.", + Description: "One to many delete document using single document id, show deleted.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -75,13 +75,13 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin }, testUtils.Request{ Request: fmt.Sprintf(`mutation { - delete_Book(id: "%s") { - _key + delete_Book(docID: "%s") { + _docID } - }`, doc2.Key()), + }`, doc2.ID()), Results: []map[string]any{ { - "_key": doc2.Key().String(), + "_docID": doc2.ID().String(), }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go index 851ec73da0..2efa3ef960 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go @@ -53,13 +53,13 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(id: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { - _key + delete_Author(docID: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-2f80f359-535d-508e-ba58-088a309ce3c3", + "_docID": "bae-2f80f359-535d-508e-ba58-088a309ce3c3", }, }, }, @@ -67,7 +67,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, { - Description: "Relational delete mutation with an aliased _key name.", + Description: "Relational delete mutation with an aliased _docID name.", Actions: []any{ testUtils.CreateDoc{ // Books @@ -101,8 +101,8 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(id: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { - AliasOfKey: _key + delete_Author(docID: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { + AliasOfKey: _docID } }`, Results: []map[string]any{ @@ -115,7 +115,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, { - Description: "Relational Delete of an updated document and an aliased _key name.", + Description: "Relational Delete of an updated document and an aliased _docID name.", Actions: []any{ testUtils.CreateDoc{ // Books @@ -166,8 +166,8 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(id: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { - Key: _key + delete_Author(docID: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { + Key: _docID } }`, Results: []map[string]any{ diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go index 166642ae13..6447551393 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go @@ -25,7 +25,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -35,7 +35,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -45,13 +45,13 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -62,17 +62,17 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": nil, }, @@ -91,7 +91,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -101,7 +101,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -111,13 +111,13 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -128,10 +128,10 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } @@ -151,7 +151,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -161,7 +161,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -171,13 +171,13 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -186,21 +186,21 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes TransactionID: immutable.Some(1), Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", "published": map[string]any{ - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", }, }, }, @@ -212,17 +212,17 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": nil, }, @@ -241,7 +241,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -251,7 +251,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -261,13 +261,13 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te // Delete a linked book that exists in transaction 0. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -276,21 +276,21 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te TransactionID: immutable.Some(1), Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", "publisher": map[string]any{ - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", }, }, }, @@ -302,10 +302,10 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } @@ -325,7 +325,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + // "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -335,7 +335,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -346,13 +346,13 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) // book gets correctly unlinked too. TransactionID: immutable.Some(0), Request: `mutation { - delete_Publisher(id: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { - _key + delete_Publisher(docID: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", }, }, }, @@ -363,10 +363,10 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) // Assert after transaction(s) have been commited. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } @@ -386,7 +386,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + // "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -396,7 +396,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -407,13 +407,13 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T // book gets correctly unlinked too. TransactionID: immutable.Some(0), Request: `mutation { - delete_Publisher(id: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { - _key + delete_Publisher(docID: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", }, }, }, @@ -424,17 +424,17 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T // Assert after transaction(s) have been commited. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", "name": "Book By Online", "publisher": nil, }, diff --git a/tests/integration/mutation/delete/simple_test.go b/tests/integration/mutation/delete/simple_test.go index 5b28d100d3..63c5744c1b 100644 --- a/tests/integration/mutation/delete/simple_test.go +++ b/tests/integration/mutation/delete/simple_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithoutSubSelection(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist without sub selection, should give error.", + Description: "Delete without sub-selection, should give error.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -41,7 +41,7 @@ func TestMutationDeletion_WithoutSubSelection(t *testing.T) { func TestMutationDeletion_WithoutSubSelectionFields(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist without _key sub-selection.", + Description: "Delete without sub-selection fields, should give error.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/mutation/delete/with_deleted_field_test.go b/tests/integration/mutation/delete/with_deleted_field_test.go index 25784b52d2..55e1a9f2dd 100644 --- a/tests/integration/mutation/delete/with_deleted_field_test.go +++ b/tests/integration/mutation/delete/with_deleted_field_test.go @@ -18,7 +18,7 @@ import ( // This test documents a bug, see: // https://github.com/sourcenetwork/defradb/issues/1846 -func TestMutationDeletion_WithoDeletedField(t *testing.T) { +func TestMutationDeletion_WithDeletedField(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ @@ -36,16 +36,16 @@ func TestMutationDeletion_WithoDeletedField(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(id: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { + delete_User(docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { _deleted - _key + _docID } }`, Results: []map[string]any{ { // This should be true, as it has been deleted. "_deleted": false, - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", }, }, }, diff --git a/tests/integration/mutation/delete/with_id_alias_test.go b/tests/integration/mutation/delete/with_id_alias_test.go index 1890092b00..5709b7cadc 100644 --- a/tests/integration/mutation/delete/with_id_alias_test.go +++ b/tests/integration/mutation/delete/with_id_alias_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDAndAlias(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple delete mutation with an aliased _key name.", + Description: "Simple delete mutation with an alias field name.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,8 +34,8 @@ func TestMutationDeletion_WithIDAndAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - fancyKey: _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + fancyKey: _docID } }`, Results: []map[string]any{ diff --git a/tests/integration/mutation/delete/with_id_test.go b/tests/integration/mutation/delete/with_id_test.go index 68adcc7e6f..78c923693e 100644 --- a/tests/integration/mutation/delete/with_id_test.go +++ b/tests/integration/mutation/delete/with_id_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { test := testUtils.TestCase{ - Description: "Deletion using id that doesn't exist, where the collection is empty.", + Description: "Deletion using document id that doesn't exist, where the collection is empty.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -29,8 +29,8 @@ func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _docID } }`, Results: []map[string]any{}, @@ -43,7 +43,7 @@ func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { func TestMutationDeletion_WithIDUnknownValueAndUnrelatedRecordInCollection(t *testing.T) { test := testUtils.TestCase{ - Description: "Deletion using id that doesn't exist, where the collection is non-empty.", + Description: "Deletion using document id that doesn't exist, where the collection is non-empty.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -59,8 +59,8 @@ func TestMutationDeletion_WithIDUnknownValueAndUnrelatedRecordInCollection(t *te }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _docID } }`, Results: []map[string]any{}, diff --git a/tests/integration/mutation/delete/with_id_txn_test.go b/tests/integration/mutation/delete/with_id_txn_test.go index c4f2ad6bdc..aeb6a4772b 100644 --- a/tests/integration/mutation/delete/with_id_txn_test.go +++ b/tests/integration/mutation/delete/with_id_txn_test.go @@ -37,13 +37,13 @@ func TestMutationDeletion_WithIDAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, }, }, @@ -51,7 +51,7 @@ func TestMutationDeletion_WithIDAndTxn(t *testing.T) { TransactionID: immutable.Some(0), Request: `query { User { - _key + _docID } }`, Results: []map[string]any{}, diff --git a/tests/integration/mutation/delete/with_ids_alias_test.go b/tests/integration/mutation/delete/with_ids_alias_test.go index 1c6be23278..e91432e787 100644 --- a/tests/integration/mutation/delete/with_ids_alias_test.go +++ b/tests/integration/mutation/delete/with_ids_alias_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDsAndSelectAlias(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist, when given multiple keys with alias.", + Description: "Delete multiple documents that exist, when given multiple IDs with alias.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -48,16 +48,16 @@ func TestMutationDeletion_WithIDsAndSelectAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - AliasKey: _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { + AliasID: _docID } }`, Results: []map[string]any{ { - "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + "AliasID": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", }, { - "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "AliasID": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", }, }, }, diff --git a/tests/integration/mutation/delete/with_ids_filter_test.go b/tests/integration/mutation/delete/with_ids_filter_test.go index 8d93bdf9cf..02ddb656f7 100644 --- a/tests/integration/mutation/delete/with_ids_filter_test.go +++ b/tests/integration/mutation/delete/with_ids_filter_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDsAndEmptyFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Deletion of using ids and filter, known id and empty filter.", + Description: "Deletion of using document ids and filter, known id and empty filter.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,13 +34,13 @@ func TestMutationDeletion_WithIDsAndEmptyFilter(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"], filter: {}) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"], filter: {}) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, }, }, diff --git a/tests/integration/mutation/delete/with_ids_test.go b/tests/integration/mutation/delete/with_ids_test.go index 48adcb2e48..18371c2d70 100644 --- a/tests/integration/mutation/delete/with_ids_test.go +++ b/tests/integration/mutation/delete/with_ids_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist, when given multiple keys.", + Description: "Delete multiple documents that exist, when given multiple IDs.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -39,16 +39,16 @@ func TestMutationDeletion_WithIDs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", }, }, }, @@ -81,8 +81,8 @@ func TestMutationDeletion_WithEmptyIDs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: []) { - _key + delete_User(docIDs: []) { + _docID } }`, Results: []map[string]any{}, @@ -122,8 +122,8 @@ func TestMutationDeletion_WithIDsSingleUnknownID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { - _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { + _docID } }`, Results: []map[string]any{}, @@ -147,8 +147,8 @@ func TestMutationDeletion_WithIDsMultipleUnknownID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { - _key + delete_User(docIDs: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { + _docID } }`, Results: []map[string]any{}, @@ -177,13 +177,13 @@ func TestMutationDeletion_WithIDsKnownAndUnknown(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, }, }, diff --git a/tests/integration/mutation/delete/with_ids_txn_test.go b/tests/integration/mutation/delete/with_ids_txn_test.go index ab3ed174f1..c59ec5c262 100644 --- a/tests/integration/mutation/delete/with_ids_txn_test.go +++ b/tests/integration/mutation/delete/with_ids_txn_test.go @@ -20,7 +20,7 @@ import ( func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple multi-key delete mutation with one key that exists and txn.", + Description: "Simple multi-docIDs delete mutation with one ID that exists and txn.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -43,21 +43,21 @@ func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { - _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "_docID": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", }, }, }, testUtils.Request{ TransactionID: immutable.Some(0), Request: `query { - User(dockeys: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { - _key + User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { + _docID } }`, Results: []map[string]any{}, diff --git a/tests/integration/mutation/delete/with_ids_update_alias_test.go b/tests/integration/mutation/delete/with_ids_update_alias_test.go index a76dccffe2..076f4f9967 100644 --- a/tests/integration/mutation/delete/with_ids_update_alias_test.go +++ b/tests/integration/mutation/delete/with_ids_update_alias_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithUpdateAndIDsAndSelectAlias(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist, when given multiple keys with alias after update.", + Description: "Delete multiple documents that exist, when given multiple IDs with alias after update.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -56,16 +56,16 @@ func TestMutationDeletion_WithUpdateAndIDsAndSelectAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - AliasKey: _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { + AliasID: _docID } }`, Results: []map[string]any{ { - "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + "AliasID": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", }, { - "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "AliasID": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", }, }, }, diff --git a/tests/integration/mutation/mix/with_txn_test.go b/tests/integration/mutation/mix/with_txn_test.go index 8a88db606a..50cbee7809 100644 --- a/tests/integration/mutation/mix/with_txn_test.go +++ b/tests/integration/mutation/mix/with_txn_test.go @@ -34,25 +34,25 @@ func TestMutationWithTxnDeletesUserGivenSameTransaction(t *testing.T) { TransactionID: immutable.Some(0), Request: `mutation { create_User(data: "{\"name\": \"John\",\"age\": 27}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(id: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { - _key + delete_User(docID: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, @@ -78,20 +78,20 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. TransactionID: immutable.Some(0), Request: `mutation { create_User(data: "{\"name\": \"John\",\"age\": 27}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, testUtils.Request{ TransactionID: immutable.Some(1), Request: `mutation { - delete_User(id: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { - _key + delete_User(docID: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { + _docID } }`, Results: []map[string]any{}, @@ -100,16 +100,16 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. TransactionID: immutable.Some(0), Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(27), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(27), }, }, }, @@ -117,7 +117,7 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. TransactionID: immutable.Some(1), Request: `query { User { - _key + _docID name age } @@ -152,12 +152,12 @@ func TestMutationWithTxnDoesUpdateUserGivenSameTransactions(t *testing.T) { TransactionID: immutable.Some(0), Request: `mutation { update_User(data: "{\"age\": 28}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, @@ -165,16 +165,16 @@ func TestMutationWithTxnDoesUpdateUserGivenSameTransactions(t *testing.T) { TransactionID: immutable.Some(0), Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, @@ -206,16 +206,16 @@ func TestMutationWithTxnDoesNotUpdateUserGivenDifferentTransactions(t *testing.T TransactionID: immutable.Some(0), Request: `mutation { update_User(data: "{\"age\": 28}") { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, @@ -223,16 +223,16 @@ func TestMutationWithTxnDoesNotUpdateUserGivenDifferentTransactions(t *testing.T TransactionID: immutable.Some(1), Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(27), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(27), }, }, }, @@ -265,16 +265,16 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) TransactionID: immutable.Some(0), Request: `mutation { update_User(data: "{\"age\": 28}") { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, @@ -282,16 +282,16 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) TransactionID: immutable.Some(1), Request: `mutation { update_User(data: "{\"age\": 29}") { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(29), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(29), }, }, }, @@ -306,16 +306,16 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) // Query after transactions have been commited: Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, diff --git a/tests/integration/mutation/special/invalid_operation_test.go b/tests/integration/mutation/special/invalid_operation_test.go index 4a8ae2f4c9..1694a37c67 100644 --- a/tests/integration/mutation/special/invalid_operation_test.go +++ b/tests/integration/mutation/special/invalid_operation_test.go @@ -30,7 +30,7 @@ func TestMutationInvalidMutation(t *testing.T) { testUtils.Request{ Request: `mutation { dostuff_User(data: "") { - _key + _docID } }`, ExpectedError: "Cannot query field \"dostuff_User\" on type \"Mutation\".", diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go index cb87d336f0..882fddd891 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go @@ -18,8 +18,8 @@ import ( ) func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from single side (wrong)", @@ -43,7 +43,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -54,7 +54,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing `{ "published_id": "%s" }`, - bookKey, + bookID, ), ExpectedError: "The given field does not exist. Name: published_id", }, @@ -67,8 +67,8 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side", @@ -86,7 +86,7 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -96,7 +96,7 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. `{ "author_id": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), }, testUtils.Request{ @@ -138,8 +138,8 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. } func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side, with a wrong field.", @@ -163,7 +163,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -174,7 +174,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro "notName": "Unpainted Condo", "author_id": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, @@ -185,8 +185,8 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro } func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side", @@ -210,7 +210,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -220,7 +220,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { `{ "author_id": "%s" }`, - author2Key, + author2ID, ), }, testUtils.Request{ diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 46d4eb6f32..576b089d1c 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -20,8 +20,8 @@ import ( ) func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collection(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", @@ -52,7 +52,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -63,7 +63,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio `{ "published": "%s" }`, - bookKey, + bookID, ), ExpectedError: "The given field does not exist. Name: published", }, @@ -74,8 +74,8 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio } func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", @@ -105,7 +105,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -116,7 +116,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te `{ "published": "%s" }`, - bookKey, + bookID, ), ExpectedError: "The given field or alias to field does not exist. Name: published", }, @@ -129,8 +129,8 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", @@ -154,7 +154,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -164,7 +164,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), }, testUtils.Request{ @@ -212,8 +212,8 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", @@ -235,7 +235,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -245,7 +245,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), ExpectedError: "The given field does not exist. Name: author", }, @@ -256,8 +256,8 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll } func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side, with a wrong field.", @@ -287,7 +287,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -298,7 +298,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie "notName": "Unpainted Condo", "author": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, @@ -309,8 +309,8 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie } func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", @@ -340,7 +340,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing. "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -350,7 +350,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing. `{ "author": "%s" }`, - author2Key, + author2ID, ), }, testUtils.Request{ diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index 57633fd126..c68dcce5a3 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -20,8 +20,8 @@ import ( ) func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from single side", @@ -50,7 +50,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -60,7 +60,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin `{ "published": "%s" }`, - bookKey, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -71,8 +71,8 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin } func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", @@ -101,7 +101,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -111,7 +111,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test `{ "author": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "target document is already linked to another document.", }, @@ -122,9 +122,9 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test } func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorKey := "bae-" + invalidLenSubKey + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidAuthorID := "bae-" + invalidLenSubID test := testUtils.TestCase{ Description: "One to one update mutation using invalid alias relation id", @@ -147,7 +147,7 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -157,9 +157,9 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", }, }, } @@ -168,8 +168,8 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * } func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", @@ -192,7 +192,7 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -202,9 +202,9 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -213,8 +213,8 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ } func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using relation alias name from secondary side, with a wrong field.", @@ -243,7 +243,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWron "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -254,7 +254,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWron "notName": "Unpainted Condo", "author": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index 2a4c93644d..5b0980baab 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -22,7 +22,7 @@ import ( // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. func TestMutationUpdateOneToOneNoChild(t *testing.T) { - unknownKey := "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + unknownID := "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" test := testUtils.TestCase{ Description: "One to one create mutation, from the wrong side", @@ -40,7 +40,7 @@ func TestMutationUpdateOneToOneNoChild(t *testing.T) { `{ "published_id": "%s" }`, - unknownKey, + unknownID, ), }, testUtils.Request{ @@ -61,7 +61,7 @@ func TestMutationUpdateOneToOneNoChild(t *testing.T) { } func TestMutationUpdateOneToOne(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one update mutation", @@ -85,7 +85,7 @@ func TestMutationUpdateOneToOne(t *testing.T) { `{ "published_id": "%s" }`, - bookKey, + bookID, ), }, testUtils.Request{ @@ -133,7 +133,7 @@ func TestMutationUpdateOneToOne(t *testing.T) { } func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation, from the secondary side", @@ -157,7 +157,7 @@ func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { `{ "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ @@ -204,8 +204,8 @@ func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { } func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from single side (wrong)", @@ -229,7 +229,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -239,7 +239,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { `{ "published_id": "%s" }`, - bookKey, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -250,8 +250,8 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { } func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side", @@ -275,7 +275,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -285,7 +285,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) `{ "author_id": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "target document is already linked to another document.", }, @@ -296,9 +296,9 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) } func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorKey := "bae-" + invalidLenSubKey + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidAuthorID := "bae-" + invalidLenSubID test := testUtils.TestCase{ Description: "One to one update mutation using invalid relation id", @@ -316,7 +316,7 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -326,9 +326,9 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T `{ "author_id": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", }, }, } @@ -337,8 +337,8 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T } func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side", @@ -356,7 +356,7 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -366,9 +366,9 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t `{ "author_id": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -377,8 +377,8 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t } func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side, with a wrong field.", @@ -407,7 +407,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_ "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -418,7 +418,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_ "notName": "Unpainted Condo", "author_id": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, diff --git a/tests/integration/mutation/update/underscored_schema_test.go b/tests/integration/mutation/update/underscored_schema_test.go index 7639458ae5..fa25f22d06 100644 --- a/tests/integration/mutation/update/underscored_schema_test.go +++ b/tests/integration/mutation/update/underscored_schema_test.go @@ -19,6 +19,7 @@ import ( func TestMutationUpdateUnderscoredSchema(t *testing.T) { test := testUtils.TestCase{ Description: "Simple update of schema with underscored name", + Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/mutation/update/with_delete_test.go b/tests/integration/mutation/update/with_delete_test.go index 444d16f87c..d949bbaa8c 100644 --- a/tests/integration/mutation/update/with_delete_test.go +++ b/tests/integration/mutation/update/with_delete_test.go @@ -46,7 +46,7 @@ func TestUpdateSave_DeletedDoc_DoesNothing(t *testing.T) { Doc: `{ "name": "Fred" }`, - ExpectedError: "a document with the given dockey has been deleted", + ExpectedError: "a document with the given ID has been deleted", }, }, } diff --git a/tests/integration/mutation/update/with_filter_test.go b/tests/integration/mutation/update/with_filter_test.go index 1b47ee6840..455ff99bbf 100644 --- a/tests/integration/mutation/update/with_filter_test.go +++ b/tests/integration/mutation/update/with_filter_test.go @@ -38,7 +38,7 @@ func TestMutationUpdate_WithBooleanFilter_ResultFilteredOut(t *testing.T) { // The update will result in a record that no longer matches the filter Request: `mutation { update_Users(filter: {verified: {_eq: true}}, data: "{\"verified\":false}") { - _key + _docID name verified } diff --git a/tests/integration/mutation/update/with_id_test.go b/tests/integration/mutation/update/with_id_test.go index 59b47bc234..ddc0fe7128 100644 --- a/tests/integration/mutation/update/with_id_test.go +++ b/tests/integration/mutation/update/with_id_test.go @@ -18,7 +18,7 @@ import ( func TestMutationUpdate_WithId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple update mutation with id", + Description: "Simple update mutation with document id", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -43,7 +43,7 @@ func TestMutationUpdate_WithId(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(id: "bae-cc36febf-4029-52b3-a876-c99c6293f588", data: "{\"points\": 59}") { + update_Users(docID: "bae-cc36febf-4029-52b3-a876-c99c6293f588", data: "{\"points\": 59}") { name points } @@ -63,7 +63,7 @@ func TestMutationUpdate_WithId(t *testing.T) { func TestMutationUpdate_WithNonExistantId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple update mutation with non existant id", + Description: "Simple update mutation with non existant document id", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -82,8 +82,8 @@ func TestMutationUpdate_WithNonExistantId(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(id: "bae-does-not-exist", data: "{\"points\": 59}") { - _key + update_Users(docID: "bae-does-not-exist", data: "{\"points\": 59}") { + _docID name points } diff --git a/tests/integration/mutation/update/with_ids_test.go b/tests/integration/mutation/update/with_ids_test.go index 6e8ff33dab..8d7a4aa6f0 100644 --- a/tests/integration/mutation/update/with_ids_test.go +++ b/tests/integration/mutation/update/with_ids_test.go @@ -51,7 +51,7 @@ func TestMutationUpdate_WithIds(t *testing.T) { testUtils.Request{ Request: `mutation { update_Users( - ids: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901"], + docIDs: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901"], data: "{\"points\": 59}" ) { name diff --git a/tests/integration/net/order/tcp_test.go b/tests/integration/net/order/tcp_test.go index a66856be3e..e33ca7c1e1 100644 --- a/tests/integration/net/order/tcp_test.go +++ b/tests/integration/net/order/tcp_test.go @@ -156,7 +156,7 @@ func TestP2FullPReplicator(t *testing.T) { }, ReplicatorResult: map[int]map[string]map[string]any{ 1: { - doc.Key().String(): { + doc.ID().String(): { "Age": int64(21), }, }, diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 84f41f98d6..09aa44bb13 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -63,13 +63,13 @@ type P2PTestCase struct { SeedDocuments []string DocumentsToReplicate []*client.Document - // node/dockey/values + // node/docID/values Updates map[int]map[int][]string Results map[int]map[int]map[string]any ReplicatorResult map[int]map[string]map[string]any } -func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node, []client.DocKey, error) { +func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node, []client.DocID, error) { ctx := context.Background() log.Info(ctx, "Building new memory store") @@ -83,11 +83,11 @@ func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node } // seed the database with a set of documents - dockeys := []client.DocKey{} + docIDs := []client.DocID{} for _, document := range seeds { - dockey, err := seedDocument(ctx, db, document) + docID, err := seedDocument(ctx, db, document) require.NoError(t, err) - dockeys = append(dockeys, dockey) + docIDs = append(docIDs, docID) } // init the P2P node @@ -120,7 +120,7 @@ func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node cfg.Net.P2PAddress = n.ListenAddrs()[0].String() - return n, dockeys, nil + return n, docIDs, nil } func seedSchema(ctx context.Context, db client.DB) error { @@ -128,23 +128,23 @@ func seedSchema(ctx context.Context, db client.DB) error { return err } -func seedDocument(ctx context.Context, db client.DB, document string) (client.DocKey, error) { +func seedDocument(ctx context.Context, db client.DB, document string) (client.DocID, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { - return client.DocKey{}, err + return client.DocID{}, err } doc, err := client.NewDocFromJSON([]byte(document)) if err != nil { - return client.DocKey{}, err + return client.DocID{}, err } err = col.Save(ctx, doc) if err != nil { - return client.DocKey{}, err + return client.DocID{}, err } - return doc.Key(), nil + return doc.ID(), nil } func saveDocument(ctx context.Context, db client.DB, document *client.Document) error { @@ -156,13 +156,13 @@ func saveDocument(ctx context.Context, db client.DB, document *client.Document) return col.Save(ctx, document) } -func updateDocument(ctx context.Context, db client.DB, dockey client.DocKey, update string) error { +func updateDocument(ctx context.Context, db client.DB, docID client.DocID, update string) error { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return err } - doc, err := getDocument(ctx, db, dockey) + doc, err := getDocument(ctx, db, docID) if err != nil { return err } @@ -174,13 +174,13 @@ func updateDocument(ctx context.Context, db client.DB, dockey client.DocKey, upd return col.Save(ctx, doc) } -func getDocument(ctx context.Context, db client.DB, dockey client.DocKey) (*client.Document, error) { +func getDocument(ctx context.Context, db client.DB, docID client.DocID) (*client.Document, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return nil, err } - doc, err := col.Get(ctx, dockey, false) + doc, err := col.Get(ctx, docID, false) if err != nil { return nil, err } @@ -190,7 +190,7 @@ func getDocument(ctx context.Context, db client.DB, dockey client.DocKey) (*clie func executeTestCase(t *testing.T, test P2PTestCase) { ctx := context.Background() - dockeys := []client.DocKey{} + docIDs := []client.DocID{} nodes := []*net.Node{} for i, cfg := range test.NodeConfig { @@ -215,7 +215,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { require.NoError(t, err) if i == 0 { - dockeys = d + docIDs = d } nodes = append(nodes, n) } @@ -249,7 +249,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for d, updates := range updateMap { for _, update := range updates { log.Info(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) - err := updateDocument(ctx, nodes[n].DB, dockeys[d], update) + err := updateDocument(ctx, nodes[n].DB, docIDs[d], update) require.NoError(t, err) // wait for peers to sync @@ -277,7 +277,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for d, results := range resultsMap { for field, result := range results { - doc, err := getDocument(ctx, nodes[n2].DB, dockeys[d]) + doc, err := getDocument(ctx, nodes[n2].DB, docIDs[d]) require.NoError(t, err) val, err := doc.Get(field) @@ -318,9 +318,9 @@ func executeTestCase(t *testing.T, test P2PTestCase) { require.NoError(t, err) log.Info(ctx, fmt.Sprintf("Node %d synced", rep)) - for dockey, results := range test.ReplicatorResult[rep] { + for docID, results := range test.ReplicatorResult[rep] { for field, result := range results { - d, err := client.NewDocKeyFromString(dockey) + d, err := client.NewDocIDFromString(docID) require.NoError(t, err) doc, err := getDocument(ctx, nodes[rep].DB, d) diff --git a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go index 450902074a..8fd73fe06a 100644 --- a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go +++ b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go @@ -52,6 +52,8 @@ func TestP2PSubscribeAddGetMultiple(t *testing.T) { testUtils.RandomNetworkingConfig(), testUtils.RandomNetworkingConfig(), testUtils.SchemaUpdate{ + // Note: If a test is failing here in the error trace, you likely need to change the + // order of these schema types declared below (some renaming can cause this). Schema: ` type Users { name: String diff --git a/tests/integration/net/state/simple/replicator/with_create_test.go b/tests/integration/net/state/simple/replicator/with_create_test.go index f877457c9c..08433629c4 100644 --- a/tests/integration/net/state/simple/replicator/with_create_test.go +++ b/tests/integration/net/state/simple/replicator/with_create_test.go @@ -474,10 +474,10 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { testUtils.WaitForSync{}, testUtils.Request{ // The document should have been synced, and should contain the same values - // including dockey and schema version id. + // including document id and schema version id. Request: `query { Users { - _key + _docID age name _version { @@ -487,12 +487,12 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - "age": int64(21), - "name": "John", + "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "age": int64(21), + "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", + "schemaVersionId": "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", }, }, }, @@ -537,11 +537,11 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { }`, }, testUtils.Request{ - // Assert that the dockey and schema version id are the same across all nodes, + // Assert that the document id and schema version id are the same across all nodes, // even though the schema field order is different. Request: `query { Users { - _key + _docID _version { schemaVersionId } @@ -549,10 +549,10 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", + "schemaVersionId": "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", }, }, }, diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index 1ee63bcedd..a0dd120ec4 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -36,13 +36,13 @@ func TestQueryCommits(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -79,22 +79,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiftg4c3aioppm2mn5f7wuqynbezricqdzpvspkd74jm7lq2jrst6m", + "cid": "bafybeifnoeodhrvpimwnuwcxmz2fxci6cwrw5ck5vo5n6rkkdt47hepyhm", }, { - "cid": "bafybeielma57bnbv5oizjsv7szhu6jq45rxfcdof62opaygyyqp2j7qd5e", + "cid": "bafybeihx6t43wc23xzak7raultfzpvnetrsi7vhzglray3r7k4gdksbuk4", }, { - "cid": "bafybeigvf4bcuc53dphwniloxt3kqqoersoghdprxsjkb6xqq7wup34usy", + "cid": "bafybeicvpe4oyfrgcuhf2eqqgp2iwuifgl73d6jo4pdlg3x3vqmnusgxv4", }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -125,16 +125,16 @@ func TestQueryCommitsWithSchemaVersionIdField(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, }, @@ -285,6 +285,8 @@ func TestQueryCommitsWithFieldIDFieldWithUpdate(t *testing.T) { testUtils.Request{ Request: ` query { + + commits { fieldId } diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index 46d767620e..d34b5c7f0f 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,14 +38,14 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq" + cid: "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, }, }, @@ -56,7 +56,7 @@ func TestQueryCommitsWithCid(t *testing.T) { } func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { - // cid is for a field commit, see TestQueryCommitsWithDockeyAndFieldId + // cid is for a field commit, see TestQueryCommitsWithDocIDAndFieldId test := testUtils.TestCase{ Description: "Simple all commits query with cid", Actions: []any{ @@ -71,14 +71,14 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq" + cid: "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, }, }, diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index f3bc9bc35c..8977a84bbb 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -36,13 +36,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -81,16 +81,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { Results: []map[string]any{ { // "Age" field head - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, }, @@ -137,27 +137,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { Results: []map[string]any{ { // Composite head - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", "height": int64(3), }, { // Composite head -1 - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { // "Age" field head - "cid": "bafybeidwgrk2xyu25pmwvpkfs4hnswtgej6gopkf26jrgm6lpbofa3rs3e", + "cid": "bafybeieirgdstog2griwuuxgb4c3frgka55yoodjwdznraoieqcxfdijw4", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, }, @@ -195,22 +195,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeihayvvwwsjvd3yefenc4ubebriluyg4rdzxmizrhefk4agotcqlp4", + "cid": "bafybeiasu5mdp6652oux4avwugv6gbd6ciqqsuj2zjv4ypksmiwndgwkeq", }, { - "cid": "bafybeiezcqlaqvozdw3ogdf2dxukwrf5m3xydd7lyy6ylcqycx5uqqepfm", + "cid": "bafybeia7shc4tpafpzblxqjyxmb7fayegsvaol3p2ucujaawig3wtopibu", }, { - "cid": "bafybeicr2lalkqj6weqcafm32posw22hjmybwohau57eswg5a442qilc2q", + "cid": "bafybeifwn57hy5m5rddplfxdomes34ykck775yvinc522nowspkvawqr6q", }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go similarity index 72% rename from tests/integration/query/commits/with_dockey_cid_test.go rename to tests/integration/query/commits/with_doc_id_cid_test.go index be1c3bf580..2003158565 100644 --- a/tests/integration/query/commits/with_dockey_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndCidForDifferentDoc(t *testing.T) { +func TestQueryCommitsWithDocIDAndCidForDifferentDoc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and cid", + Description: "Simple all commits query with docID and cid, for different doc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -31,7 +31,7 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDoc(t *testing.T) { testUtils.Request{ Request: ` { commits( - dockey: "bae-not-this-doc", + docID: "bae-not-this-doc", cid: "bafybeica4js2abwqjjrz7dcialbortbz32uxp7ufxu7yljbwvmhjqqxzny" ) { cid @@ -45,9 +45,9 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDoc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndCidForDifferentDocWithUpdate(t *testing.T) { +func TestQueryCommitsWithDocIDAndCidForDifferentDocWithUpdate(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and cid", + Description: "Simple all commits query with docID and cid, for different doc with update", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -67,7 +67,7 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDocWithUpdate(t *testing.T) { testUtils.Request{ Request: ` { commits( - dockey: "bae-not-this-doc", + docID: "bae-not-this-doc", cid: "bafybeica4js2abwqjjrz7dcialbortbz32uxp7ufxu7yljbwvmhjqqxzny" ) { cid @@ -81,9 +81,9 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDocWithUpdate(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndCid(t *testing.T) { +func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and cid", + Description: "Simple all commits query with docID and cid, with update", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -103,15 +103,15 @@ func TestQueryCommitsWithDockeyAndCid(t *testing.T) { testUtils.Request{ Request: ` { commits( - dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - cid: "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm" + docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + cid: "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go similarity index 68% rename from tests/integration/query/commits/with_dockey_count_test.go rename to tests/integration/query/commits/with_doc_id_count_test.go index 7a61fa5fef..ba5b0eb589 100644 --- a/tests/integration/query/commits/with_dockey_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndLinkCount(t *testing.T) { +func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple latest commits query with dockey and link count", + Description: "Simple latest commits query with docID and link count", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,22 +30,22 @@ func TestQueryCommitsWithDockeyAndLinkCount(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid _count(field: links) } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "_count": 0, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "_count": 0, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_dockey_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go similarity index 66% rename from tests/integration/query/commits/with_dockey_field_test.go rename to tests/integration/query/commits/with_doc_id_field_test.go index b588300fb6..87b6edb06c 100644 --- a/tests/integration/query/commits/with_dockey_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndUnknownField(t *testing.T) { +func TestQueryCommitsWithDocIDAndUnknownField(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and unknown field", + Description: "Simple all commits query with docID and unknown field", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,7 +30,7 @@ func TestQueryCommitsWithDockeyAndUnknownField(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "not a field") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "not a field") { cid } }`, @@ -42,9 +42,9 @@ func TestQueryCommitsWithDockeyAndUnknownField(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndUnknownFieldId(t *testing.T) { +func TestQueryCommitsWithDocIDAndUnknownFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and unknown field id", + Description: "Simple all commits query with docID and unknown field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -56,7 +56,7 @@ func TestQueryCommitsWithDockeyAndUnknownFieldId(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "999999") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "999999") { cid } }`, @@ -69,10 +69,10 @@ func TestQueryCommitsWithDockeyAndUnknownFieldId(t *testing.T) { } // This test is for documentation reasons only. This is not -// desired behaviour (should return all commits for dockey-field). -func TestQueryCommitsWithDockeyAndField(t *testing.T) { +// desired behaviour (should return all commits for docID-field). +func TestQueryCommitsWithDocIDAndField(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field", + Description: "Simple all commits query with docID and field", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -84,7 +84,7 @@ func TestQueryCommitsWithDockeyAndField(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "Age") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "Age") { cid } }`, @@ -98,9 +98,9 @@ func TestQueryCommitsWithDockeyAndField(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryCommitsWithDockeyAndFieldId(t *testing.T) { +func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -112,13 +112,13 @@ func TestQueryCommitsWithDockeyAndFieldId(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { cid } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, }, }, @@ -130,9 +130,9 @@ func TestQueryCommitsWithDockeyAndFieldId(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryCommitsWithDockeyAndCompositeFieldId(t *testing.T) { +func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -144,13 +144,13 @@ func TestQueryCommitsWithDockeyAndCompositeFieldId(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { cid } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_group_order_test.go b/tests/integration/query/commits/with_doc_id_group_order_test.go similarity index 79% rename from tests/integration/query/commits/with_dockey_group_order_test.go rename to tests/integration/query/commits/with_doc_id_group_order_test.go index d29a3683ea..d858bcf819 100644 --- a/tests/integration/query/commits/with_dockey_group_order_test.go +++ b/tests/integration/query/commits/with_doc_id_group_order_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsOrderedAndGroupedByDocKey(t *testing.T) { +func TestQueryCommitsOrderedAndGroupedByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query, grouped and ordered by dockey", + Description: "Simple all commits query, grouped and ordered by docID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -37,16 +37,16 @@ func TestQueryCommitsOrderedAndGroupedByDocKey(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(groupBy: [dockey], order: {dockey: DESC}) { - dockey + commits(groupBy: [docID], order: {docID: DESC}) { + docID } }`, Results: []map[string]any{ { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { - "dockey": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", + "docID": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go similarity index 75% rename from tests/integration/query/commits/with_dockey_limit_offset_test.go rename to tests/integration/query/commits/with_doc_id_limit_offset_test.go index a8c6665bca..e6a622aa3c 100644 --- a/tests/integration/query/commits/with_dockey_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndLimitAndOffset(t *testing.T) { +func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, limit and offset", + Description: "Simple all commits query with docID, limit and offset", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -51,16 +51,16 @@ func TestQueryCommitsWithDockeyAndLimitAndOffset(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2, offset: 1) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2, offset: 1) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go similarity index 75% rename from tests/integration/query/commits/with_dockey_limit_test.go rename to tests/integration/query/commits/with_doc_id_limit_test.go index b9f8e51f8b..23b045b708 100644 --- a/tests/integration/query/commits/with_dockey_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndLimit(t *testing.T) { +func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and limit", + Description: "Simple all commits query with docID and limit", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -44,16 +44,16 @@ func TestQueryCommitsWithDockeyAndLimit(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go similarity index 74% rename from tests/integration/query/commits/with_dockey_order_limit_offset_test.go rename to tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 195e2b3a8e..118262d5c9 100644 --- a/tests/integration/query/commits/with_dockey_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndOrderAndLimitAndOffset(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order, limit and offset", + Description: "Simple all commits query with docID, order, limit and offset", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -51,18 +51,18 @@ func TestQueryCommitsWithDockeyAndOrderAndLimitAndOffset(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}, limit: 2, offset: 4) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}, limit: 2, offset: 4) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_dockey_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go similarity index 55% rename from tests/integration/query/commits/with_dockey_order_test.go rename to tests/integration/query/commits/with_doc_id_order_test.go index 2b4e8f6156..47f0ce3802 100644 --- a/tests/integration/query/commits/with_dockey_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndOrderHeightDesc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order height desc", + Description: "Simple all commits query with docID, order height desc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -37,30 +37,30 @@ func TestQueryCommitsWithDockeyAndOrderHeightDesc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: DESC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: DESC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, }, @@ -71,9 +71,9 @@ func TestQueryCommitsWithDockeyAndOrderHeightDesc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderHeightAsc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order height asc", + Description: "Simple all commits query with docID, order height asc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -92,30 +92,30 @@ func TestQueryCommitsWithDockeyAndOrderHeightAsc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, }, @@ -126,9 +126,9 @@ func TestQueryCommitsWithDockeyAndOrderHeightAsc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderCidDesc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order cid desc", + Description: "Simple all commits query with docID, order cid desc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -147,30 +147,30 @@ func TestQueryCommitsWithDockeyAndOrderCidDesc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: DESC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: DESC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", - "height": int64(2), + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "height": int64(1), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", - "height": int64(1), + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "height": int64(2), }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, }, @@ -181,9 +181,9 @@ func TestQueryCommitsWithDockeyAndOrderCidDesc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderCidAsc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order cid asc", + Description: "Simple all commits query with docID, order cid asc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -202,30 +202,30 @@ func TestQueryCommitsWithDockeyAndOrderCidAsc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: ASC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: ASC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", - "height": int64(1), + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "height": int64(2), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", - "height": int64(2), + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, }, @@ -236,9 +236,9 @@ func TestQueryCommitsWithDockeyAndOrderCidAsc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderAndMultiUpdatesCidAsc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, multiple updates with order cid asc", + Description: "Simple all commits query with docID, multiple updates with order cid asc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -271,46 +271,46 @@ func TestQueryCommitsWithDockeyAndOrderAndMultiUpdatesCidAsc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", "height": int64(3), }, { - "cid": "bafybeidwgrk2xyu25pmwvpkfs4hnswtgej6gopkf26jrgm6lpbofa3rs3e", + "cid": "bafybeieirgdstog2griwuuxgb4c3frgka55yoodjwdznraoieqcxfdijw4", "height": int64(3), }, { - "cid": "bafybeiahvakoy5joy563em7hlzvqcarxqdp2nin4gnxythoj4fqjh7umzu", + "cid": "bafybeidoph22zh2c4kh2tx5qbg62nbrulvald6w5hgvp5x5rjurdbz3ibi", "height": int64(4), }, { - "cid": "bafybeighft6vokgntjvpirwdt233xizmnhxtawiqeahwypxv7u26dwseoe", + "cid": "bafybeiacs2yvfbjgk3xfz5zgt43gswo4jhreieenwkb4whpstjas5cpbdy", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_dockey_prop_test.go b/tests/integration/query/commits/with_doc_id_prop_test.go similarity index 74% rename from tests/integration/query/commits/with_dockey_prop_test.go rename to tests/integration/query/commits/with_doc_id_prop_test.go index daf21ba1c7..6404114217 100644 --- a/tests/integration/query/commits/with_dockey_prop_test.go +++ b/tests/integration/query/commits/with_doc_id_prop_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyProperty(t *testing.T) { +func TestQueryCommitsWithDocIDProperty(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple commits query with dockey property", + Description: "Simple commits query with docID property", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -31,18 +31,18 @@ func TestQueryCommitsWithDockeyProperty(t *testing.T) { testUtils.Request{ Request: `query { commits { - dockey + docID } }`, Results: []map[string]any{ { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_test.go b/tests/integration/query/commits/with_doc_id_test.go similarity index 58% rename from tests/integration/query/commits/with_dockey_test.go rename to tests/integration/query/commits/with_doc_id_test.go index 9dde4bc41a..b69f278c10 100644 --- a/tests/integration/query/commits/with_dockey_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithUnknownDockey(t *testing.T) { +func TestQueryCommitsWithUnknownDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with unknown dockey", + Description: "Simple all commits query with unknown document ID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,7 +30,7 @@ func TestQueryCommitsWithUnknownDockey(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "unknown dockey") { + commits(docID: "unknown document ID") { cid } }`, @@ -42,9 +42,9 @@ func TestQueryCommitsWithUnknownDockey(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockey(t *testing.T) { +func TestQueryCommitsWithDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey", + Description: "Simple all commits query with docID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -56,19 +56,19 @@ func TestQueryCommitsWithDockey(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -78,9 +78,9 @@ func TestQueryCommitsWithDockey(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { +func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, with links", + Description: "Simple all commits query with docID, with links", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -92,7 +92,7 @@ func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid links { cid @@ -102,22 +102,22 @@ func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "links": []map[string]any{}, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "links": []map[string]any{}, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, @@ -130,9 +130,9 @@ func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndUpdate(t *testing.T) { +func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, multiple results", + Description: "Simple all commits query with docID, multiple results", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -151,30 +151,30 @@ func TestQueryCommitsWithDockeyAndUpdate(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, }, @@ -188,9 +188,9 @@ func TestQueryCommitsWithDockeyAndUpdate(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (first results includes link._head, second // includes link._Name). -func TestQueryCommitsWithDockeyAndUpdateAndLinks(t *testing.T) { +func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, multiple results and links", + Description: "Simple all commits query with docID, multiple results and links", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -209,7 +209,7 @@ func TestQueryCommitsWithDockeyAndUpdateAndLinks(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid links { cid @@ -219,44 +219,44 @@ func TestQueryCommitsWithDockeyAndUpdateAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "_head", }, }, }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "links": []map[string]any{}, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "links": []map[string]any{}, }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "links": []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "name": "_head", }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "name": "age", }, }, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_dockey_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go similarity index 69% rename from tests/integration/query/commits/with_dockey_typename_test.go rename to tests/integration/query/commits/with_doc_id_typename_test.go index f8573785f4..4c360c297e 100644 --- a/tests/integration/query/commits/with_dockey_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyWithTypeName(t *testing.T) { +func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and typename", + Description: "Simple all commits query with docID and typename", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,22 +30,22 @@ func TestQueryCommitsWithDockeyWithTypeName(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid __typename } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "__typename": "Commit", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "__typename": "Commit", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index 008dc871d4..f8cd4e961f 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -17,7 +17,7 @@ import ( ) // This test is for documentation reasons only. This is not -// desired behaviour (should return all commits for dockey-field). +// desired behaviour (should return all commits for docID-field). func TestQueryCommitsWithField(t *testing.T) { test := testUtils.TestCase{ Description: "Simple all commits query with field", @@ -66,7 +66,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, }, }, @@ -80,7 +80,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { // desired behaviour (Users should not be specifying field ids). func TestQueryCommitsWithCompositeFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -98,7 +98,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -112,7 +112,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { // desired behaviour (Users should not be specifying field ids). func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -131,8 +131,8 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing. }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index d031d70540..64439c97e1 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -89,10 +89,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", }, }, }, @@ -100,13 +100,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -142,7 +142,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "_group": []map[string]any{ { "height": int64(1), @@ -150,7 +150,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "_group": []map[string]any{ { "height": int64(1), @@ -158,7 +158,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "_group": []map[string]any{ { "height": int64(1), @@ -173,9 +173,9 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithGroupByDocKey(t *testing.T) { +func TestQueryCommitsWithGroupByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query, group by dockey", + Description: "Simple all commits query, group by document ID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -208,16 +208,16 @@ func TestQueryCommitsWithGroupByDocKey(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(groupBy: [dockey]) { - dockey + commits(groupBy: [docID]) { + docID } }`, Results: []map[string]any{ { - "dockey": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", + "docID": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", }, { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, }, }, diff --git a/tests/integration/query/latest_commits/simple_test.go b/tests/integration/query/latest_commits/simple_test.go index 7a6e28f6d8..e31ee22da8 100644 --- a/tests/integration/query/latest_commits/simple_test.go +++ b/tests/integration/query/latest_commits/simple_test.go @@ -38,7 +38,7 @@ func TestQueryLatestCommits(t *testing.T) { }`, }, }, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", } executeTestCase(t, test) diff --git a/tests/integration/query/latest_commits/with_collectionid_prop_test.go b/tests/integration/query/latest_commits/with_collectionid_prop_test.go index afdd6ae7ee..78ffab9b3c 100644 --- a/tests/integration/query/latest_commits/with_collectionid_prop_test.go +++ b/tests/integration/query/latest_commits/with_collectionid_prop_test.go @@ -37,7 +37,7 @@ func TestQueryLastCommitsWithCollectionIdProperty(t *testing.T) { }, testUtils.Request{ Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { collectionID } }`, @@ -49,7 +49,7 @@ func TestQueryLastCommitsWithCollectionIdProperty(t *testing.T) { }, testUtils.Request{ Request: `query { - latestCommits(dockey: "bae-de8c99bf-ee0e-5655-8a72-919c2d459a30") { + latestCommits(docID: "bae-de8c99bf-ee0e-5655-8a72-919c2d459a30") { collectionID } }`, diff --git a/tests/integration/query/latest_commits/with_dockey_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go similarity index 65% rename from tests/integration/query/latest_commits/with_dockey_field_test.go rename to tests/integration/query/latest_commits/with_doc_id_field_test.go index dce5da651f..d320aff1a8 100644 --- a/tests/integration/query/latest_commits/with_dockey_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -18,11 +18,11 @@ import ( // This test is for documentation reasons only. This is not // desired behaviour (it looks totally broken to me). -func TestQueryLatestCommitsWithDocKeyAndFieldName(t *testing.T) { +func TestQueryLatestCommitsWithDocIDAndFieldName(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and field name", + Description: "Simple latest commits query with docID and field name", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "age") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "age") { cid links { cid @@ -46,11 +46,11 @@ func TestQueryLatestCommitsWithDocKeyAndFieldName(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryLatestCommitsWithDocKeyAndFieldId(t *testing.T) { +func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and field id", + Description: "Simple latest commits query with docID and field id", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { cid links { cid @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithDocKeyAndFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "links": []map[string]any{}, }, }, @@ -79,11 +79,11 @@ func TestQueryLatestCommitsWithDocKeyAndFieldId(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryLatestCommitsWithDocKeyAndCompositeFieldId(t *testing.T) { +func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and composite field id", + Description: "Simple latest commits query with docID and composite field id", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { cid links { cid @@ -101,14 +101,14 @@ func TestQueryLatestCommitsWithDocKeyAndCompositeFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, diff --git a/tests/integration/query/latest_commits/with_dockey_prop_test.go b/tests/integration/query/latest_commits/with_doc_id_prop_test.go similarity index 75% rename from tests/integration/query/latest_commits/with_dockey_prop_test.go rename to tests/integration/query/latest_commits/with_doc_id_prop_test.go index b7ffd80d65..247d536532 100644 --- a/tests/integration/query/latest_commits/with_dockey_prop_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_prop_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryLastCommitsWithDockeyProperty(t *testing.T) { +func TestQueryLastCommitsWithDocIDProperty(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple latest commits query with dockey property", + Description: "Simple latest commits query with docID property", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,13 +30,13 @@ func TestQueryLastCommitsWithDockeyProperty(t *testing.T) { }, testUtils.Request{ Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { - dockey + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + docID } }`, Results: []map[string]any{ { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, }, }, diff --git a/tests/integration/query/latest_commits/with_dockey_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go similarity index 58% rename from tests/integration/query/latest_commits/with_dockey_test.go rename to tests/integration/query/latest_commits/with_doc_id_test.go index e07d34836f..55e0546cdf 100644 --- a/tests/integration/query/latest_commits/with_dockey_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -16,11 +16,11 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryLatestCommitsWithDocKey(t *testing.T) { +func TestQueryLatestCommitsWithDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey", + Description: "Simple latest commits query with docID", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid links { cid @@ -38,14 +38,14 @@ func TestQueryLatestCommitsWithDocKey(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, @@ -56,11 +56,11 @@ func TestQueryLatestCommitsWithDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQueryLatestCommitsWithDocKeyWithSchemaVersionIdField(t *testing.T) { +func TestQueryLatestCommitsWithDocIDWithSchemaVersionIdField(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and schema versiion id field", + Description: "Simple latest commits query with docID and schema versiion id field", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid schemaVersionId } @@ -75,8 +75,8 @@ func TestQueryLatestCommitsWithDocKeyWithSchemaVersionIdField(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, } diff --git a/tests/integration/query/latest_commits/with_field_test.go b/tests/integration/query/latest_commits/with_field_test.go index 70c4635cd4..67ae607c47 100644 --- a/tests/integration/query/latest_commits/with_field_test.go +++ b/tests/integration/query/latest_commits/with_field_test.go @@ -39,7 +39,7 @@ func TestQueryLatestCommitsWithField(t *testing.T) { }`, }, }, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", } executeTestCase(t, test) @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithFieldId(t *testing.T) { }`, }, }, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", } executeTestCase(t, test) diff --git a/tests/integration/query/one_to_many/with_cid_dockey_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go similarity index 81% rename from tests/integration/query/one_to_many/with_cid_dockey_test.go rename to tests/integration/query/one_to_many/with_cid_doc_id_test.go index be7589c707..56c324802f 100644 --- a/tests/integration/query/one_to_many/with_cid_dockey_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -18,13 +18,13 @@ import ( // This test is for documentation reasons only. This is not // desired behaviour (should just return empty). -// func TestQueryOneToManyWithUnknownCidAndDocKey(t *testing.T) { +// func TestQueryOneToManyWithUnknownCidAndDocID(t *testing.T) { // test := testUtils.RequestTestCase{ -// Description: "One-to-many relation query from one side with unknown cid and dockey", +// Description: "One-to-many relation query from one side with unknown cid and docID", // Request: `query { // Book ( // cid: "bafybeicgwjdyqyuntdop5ytpsfrqg5a4t2r25pfv6prfppl5ta5k5altca", -// dockey: "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" +// docID: "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" // ) { // name // author { @@ -63,13 +63,13 @@ import ( // testUtils.AssertPanic(t, func() { executeTestCase(t, test) }) // } -func TestQueryOneToManyWithCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with cid and dockey", + Description: "One-to-many relation query from one side with cid and docID", Request: `query { Book ( - cid: "bafybeigq7vjp6btvgms2k6ajgtcvygv4bvejk5pin44jbib43rwqa2j64q" - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name author { @@ -112,13 +112,13 @@ func TestQueryOneToManyWithCidAndDocKey(t *testing.T) { // desired behaviour (no way to get state of child a time of // parent creation without explicit child cid, which is also not tied // to parent state). -func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with child update and parent cid and dockey", + Description: "One-to-many relation query from one side with child update and parent cid and docID", Request: `query { Book ( - cid: "bafybeigq7vjp6btvgms2k6ajgtcvygv4bvejk5pin44jbib43rwqa2j64q", - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name author { @@ -168,21 +168,22 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with parent update and parent cid and dockey", + Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { - Book ( - cid: "bafybeigq7vjp6btvgms2k6ajgtcvygv4bvejk5pin44jbib43rwqa2j64q", - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" - ) { + Book ( + cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + ) { + name + rating + author { name - rating - author { - name - } } - }`, + } + }`, + Docs: map[int][]string{ //books 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d @@ -224,13 +225,13 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQueryOneToManyWithParentUpdateAndLastCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with parent update and parent cid and dockey", + Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { Book ( - cid: "bafybeigukwqfzjxvuaok53gradxpvz7ag6l73b77lpjdcfglizmnv6zurq", - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + cid: "bafybeifnz3yz3rkd2bc2uv6i7ucfdlqji5wevs5anziwpr76ia45ygtbk4", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name rating diff --git a/tests/integration/query/one_to_many/with_dockey_test.go b/tests/integration/query/one_to_many/with_doc_id_test.go similarity index 91% rename from tests/integration/query/one_to_many/with_dockey_test.go rename to tests/integration/query/one_to_many/with_doc_id_test.go index fd75677c1c..34021f53ad 100644 --- a/tests/integration/query/one_to_many/with_dockey_test.go +++ b/tests/integration/query/one_to_many/with_doc_id_test.go @@ -16,14 +16,14 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryOneToManyWithChildDocKey(t *testing.T) { +func TestQueryOneToManyWithChildDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with child dockey", + Description: "One-to-many relation query from one side with child docID", Request: `query { Author { name published ( - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name } diff --git a/tests/integration/query/one_to_many/with_dockeys_test.go b/tests/integration/query/one_to_many/with_doc_ids_test.go similarity index 90% rename from tests/integration/query/one_to_many/with_dockeys_test.go rename to tests/integration/query/one_to_many/with_doc_ids_test.go index 1c58e5947f..821a24c334 100644 --- a/tests/integration/query/one_to_many/with_dockeys_test.go +++ b/tests/integration/query/one_to_many/with_doc_ids_test.go @@ -16,14 +16,14 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryOneToManyWithChildDocKeys(t *testing.T) { +func TestQueryOneToManyWithChildDocIDs(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with child dockeys", + Description: "One-to-many relation query from one side with child docIDs", Request: `query { Author { name published ( - dockeys: ["bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca"] + docIDs: ["bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca"] ) { name } diff --git a/tests/integration/query/one_to_many/with_filter_related_id_test.go b/tests/integration/query/one_to_many/with_filter_related_id_test.go index 87c895e5c1..98c3af4b59 100644 --- a/tests/integration/query/one_to_many/with_filter_related_id_test.go +++ b/tests/integration/query/one_to_many/with_filter_related_id_test.go @@ -22,7 +22,7 @@ func TestQueryFromManySideWithEqFilterOnRelatedType(t *testing.T) { Description: "One-to-many query from many side with _eq filter on related field type.", Request: `query { - Book(filter: {author: {_key: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}}) { + Book(filter: {author: {_docID: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}}) { name } }`, @@ -178,7 +178,7 @@ func TestQueryFromManySideWithSameFiltersInDifferentWayOnRelatedType(t *testing. Request: `query { Book( filter: { - author: {_key: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}, + author: {_docID: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}, author_id: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"} } ) { @@ -258,7 +258,7 @@ func TestQueryFromSingleSideWithEqFilterOnRelatedType(t *testing.T) { Description: "One-to-many query from single side with _eq filter on related field type.", Request: `query { - Author(filter: {published: {_key: {_eq: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d"}}}) { + Author(filter: {published: {_docID: {_eq: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d"}}}) { name } }`, diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 7c813d9359..9f17d2ffe7 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -170,7 +170,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe Request: `query { Book(groupBy: [author]) { author { - _key + _docID name } _group { @@ -243,8 +243,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe Results: []map[string]any{ { "author": map[string]any{ - "name": "Voltaire", - "_key": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", + "name": "Voltaire", + "_docID": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", }, "_group": []map[string]any{ { @@ -267,8 +267,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, { "author": map[string]any{ - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", }, "_group": []map[string]any{ { @@ -299,8 +299,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, { "author": map[string]any{ - "name": "Simon Pelloutier", - "_key": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", + "name": "Simon Pelloutier", + "_docID": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", }, "_group": []map[string]any{ { @@ -473,7 +473,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide Book(groupBy: [author]) { author_id author { - _key + _docID name } _group { @@ -547,8 +547,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide { "author_id": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", "author": map[string]any{ - "name": "Voltaire", - "_key": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", + "name": "Voltaire", + "_docID": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", }, "_group": []map[string]any{ { @@ -572,8 +572,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide { "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "author": map[string]any{ - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", }, "_group": []map[string]any{ { @@ -605,8 +605,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide { "author_id": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", "author": map[string]any{ - "name": "Simon Pelloutier", - "_key": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", + "name": "Simon Pelloutier", + "_docID": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", }, "_group": []map[string]any{ { diff --git a/tests/integration/query/one_to_many_to_many/joins_test.go b/tests/integration/query/one_to_many_to_many/joins_test.go index 492bb10731..f883f9ae9f 100644 --- a/tests/integration/query/one_to_many_to_many/joins_test.go +++ b/tests/integration/query/one_to_many_to_many/joins_test.go @@ -21,13 +21,13 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { Description: "1-N-M Query to ensure joins are linked properly.", Request: `query { Author { - _key + _docID name book { - _key + _docID name publisher { - _key + _docID name } } @@ -140,59 +140,59 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { Results: []map[string]any{ { - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "book": []map[string]any{ { - "_key": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", + "_docID": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", "name": "The Associate", "publisher": []map[string]any{}, }, { - "_key": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", - "name": "Sooley", + "_docID": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", + "name": "Sooley", "publisher": []map[string]any{ { - "_key": "bae-cecb7841-fb4c-5403-a6d7-3654694dd073", - "name": "First of Two Publishers of Sooley", + "_docID": "bae-cecb7841-fb4c-5403-a6d7-3654694dd073", + "name": "First of Two Publishers of Sooley", }, { - "_key": "bae-d7e35ac3-dcf3-5537-91dd-3d27e378ba5d", - "name": "Second of Two Publishers of Sooley", + "_docID": "bae-d7e35ac3-dcf3-5537-91dd-3d27e378ba5d", + "name": "Second of Two Publishers of Sooley", }, }, }, { - "_key": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", - "name": "Theif Lord", + "_docID": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", + "name": "Theif Lord", "publisher": []map[string]any{ { - "_key": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", - "name": "Only Publisher of Theif Lord", + "_docID": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", + "name": "Only Publisher of Theif Lord", }, }, }, { - "_key": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", - "name": "Painted House", + "_docID": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", + "name": "Painted House", "publisher": []map[string]any{ { - "_key": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", - "name": "Only Publisher of Painted House", + "_docID": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", + "name": "Only Publisher of Painted House", }, }, }, { - "_key": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", - "name": "A Time for Mercy", + "_docID": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", + "name": "A Time for Mercy", "publisher": []map[string]any{ { - "_key": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", - "name": "Only Publisher of A Time for Mercy", + "_docID": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", + "name": "Only Publisher of A Time for Mercy", }, }, }, @@ -200,22 +200,22 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { }, { - "_key": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", - "book": []map[string]any{}, - "name": "Not a Writer", + "_docID": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", + "book": []map[string]any{}, + "name": "Not a Writer", }, { - "name": "Cornelia Funke", - "_key": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "name": "Cornelia Funke", + "_docID": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", "book": []map[string]any{ { - "_key": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", - "name": "The Rooster Bar", + "_docID": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", + "name": "The Rooster Bar", "publisher": []map[string]any{ { - "_key": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", - "name": "Only Publisher of The Rooster Bar", + "_docID": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", + "name": "Only Publisher of The Rooster Bar", }, }, }, diff --git a/tests/integration/query/one_to_many_to_one/joins_test.go b/tests/integration/query/one_to_many_to_one/joins_test.go index e30b1b699a..57b76a15b9 100644 --- a/tests/integration/query/one_to_many_to_one/joins_test.go +++ b/tests/integration/query/one_to_many_to_one/joins_test.go @@ -153,13 +153,13 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { testUtils.Request{ Request: `query { Author { - _key + _docID name book { - _key + _docID name publisher { - _key + _docID name } } @@ -167,63 +167,63 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { }`, Results: []map[string]any{ { - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "book": []map[string]any{ { - "_key": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", + "_docID": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", "name": "The Associate", "publisher": nil, }, { - "_key": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", - "name": "Sooley", + "_docID": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", + "name": "Sooley", "publisher": map[string]any{ - "_key": "bae-cd2a319a-e013-559e-aad9-282b48fd3f72", - "name": "Only Publisher of Sooley", + "_docID": "bae-cd2a319a-e013-559e-aad9-282b48fd3f72", + "name": "Only Publisher of Sooley", }, }, { - "_key": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", - "name": "Theif Lord", + "_docID": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", + "name": "Theif Lord", "publisher": map[string]any{ - "_key": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", - "name": "Only Publisher of Theif Lord", + "_docID": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", + "name": "Only Publisher of Theif Lord", }, }, { - "_key": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", - "name": "Painted House", + "_docID": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", + "name": "Painted House", "publisher": map[string]any{ - "_key": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", - "name": "Only Publisher of Painted House", + "_docID": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", + "name": "Only Publisher of Painted House", }, }, { - "_key": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", - "name": "A Time for Mercy", + "_docID": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", + "name": "A Time for Mercy", "publisher": map[string]any{ - "_key": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", - "name": "Only Publisher of A Time for Mercy", + "_docID": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", + "name": "Only Publisher of A Time for Mercy", }, }, }, }, { - "_key": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", - "book": []map[string]any{}, - "name": "Not a Writer", + "_docID": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", + "book": []map[string]any{}, + "name": "Not a Writer", }, { - "name": "Cornelia Funke", - "_key": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "name": "Cornelia Funke", + "_docID": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", "book": []map[string]any{ { - "_key": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", - "name": "The Rooster Bar", + "_docID": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", + "name": "The Rooster Bar", "publisher": map[string]any{ - "_key": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", - "name": "Only Publisher of The Rooster Bar", + "_docID": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", + "name": "Only Publisher of The Rooster Bar", }, }, }, diff --git a/tests/integration/query/simple/simple_test.go b/tests/integration/query/simple/simple_test.go index 6911b08ea8..abdc0cd1f3 100644 --- a/tests/integration/query/simple/simple_test.go +++ b/tests/integration/query/simple/simple_test.go @@ -21,7 +21,7 @@ func TestQuerySimple(t *testing.T) { Description: "Simple query with no filter", Request: `query { Users { - _key + _docID Name Age } @@ -36,9 +36,9 @@ func TestQuerySimple(t *testing.T) { }, Results: []map[string]any{ { - "_key": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "Name": "John", - "Age": int64(21), + "_docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "Name": "John", + "Age": int64(21), }, }, } diff --git a/tests/integration/query/simple/with_cid_dockey_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go similarity index 67% rename from tests/integration/query/simple/with_cid_dockey_test.go rename to tests/integration/query/simple/with_cid_doc_id_test.go index 7bd1eb4971..1fa00d05d1 100644 --- a/tests/integration/query/simple/with_cid_dockey_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -16,13 +16,13 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithInvalidCidAndInvalidDocKey(t *testing.T) { +func TestQuerySimpleWithInvalidCidAndInvalidDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with invalid cid and invalid dockey", + Description: "Simple query with invalid cid and invalid docID", Request: `query { Users ( cid: "any non-nil string value - this will be ignored", - dockey: "invalid docKey" + docID: "invalid docID" ) { Name } @@ -43,13 +43,13 @@ func TestQuerySimpleWithInvalidCidAndInvalidDocKey(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (should just return empty). -func TestQuerySimpleWithUnknownCidAndInvalidDocKey(t *testing.T) { +func TestQuerySimpleWithUnknownCidAndInvalidDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with unknown cid and invalid dockey", + Description: "Simple query with unknown cid and invalid docID", Request: `query { Users ( cid: "bafybeid57gpbwi4i6bg7g357vwwyzsmr4bjo22rmhoxrwqvdxlqxcgaqvu", - dockey: "invalid docKey" + docID: "invalid docID" ) { Name } @@ -68,13 +68,13 @@ func TestQuerySimpleWithUnknownCidAndInvalidDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with cid and dockey", + Description: "Simple query with cid and docID", Request: `query { Users ( - cid: "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name } @@ -97,13 +97,13 @@ func TestQuerySimpleWithCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndFirstCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (first) cid and dockey", + Description: "Simple query with (first) cid and docID", Request: `query { Users ( - cid: "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name Age @@ -138,13 +138,13 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndLastCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (last) cid and dockey", + Description: "Simple query with (last) cid and docID", Request: `query { Users ( - cid: "bafybeiav54zfepx5n2zcm2g34q5ur5w2dosb2ssxjckq3esy5dg6nftxse" - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeibnj6yitgmynodaxnvtl22rhzclhsrc5asmocwyccsbsamobibpsy", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name Age @@ -179,13 +179,13 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndMiddleCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (middle) cid and dockey", + Description: "Simple query with (middle) cid and docID", Request: `query { Users ( - cid: "bafybeicrati3sbl3esju7eus3dwi53aggd6thhtporh7vj5mv77vvs3mdy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeify36bauenmsov4rijdmency367boy234mjezpvg4dj6r47ay3jwq", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name Age @@ -220,21 +220,22 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndFirstCidAndDocKeyAndSchemaVersion(t *testing.T) { +func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (first) cid and dockey and yielded schema version", + Description: "Simple query with (first) cid and docID and yielded schema version", Request: `query { - Users ( - cid: "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" - ) { - Name - Age - _version { - schemaVersionId - } - } - }`, + Users ( + cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + ) { + Name + Age + _version { + schemaVersionId + } + } + }`, + Docs: map[int][]string{ 0: { `{ @@ -259,7 +260,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocKeyAndSchemaVersion(t *testing.T) "Age": int64(21), "_version": []map[string]any{ { - "schemaVersionId": "bafkreicqyapc7zxw5tt2ymybau5m54lhmm5ahrl22oaktnhidul757a4ba", + "schemaVersionId": "bafkreidvd63bawkelxe3wtf7a65klkq4x3dvenqafyasndyal6fvffkeam", }, }, }, diff --git a/tests/integration/query/simple/with_key_test.go b/tests/integration/query/simple/with_doc_id_filter_test.go similarity index 79% rename from tests/integration/query/simple/with_key_test.go rename to tests/integration/query/simple/with_doc_id_filter_test.go index f6854da643..5477665e1c 100644 --- a/tests/integration/query/simple/with_key_test.go +++ b/tests/integration/query/simple/with_doc_id_filter_test.go @@ -16,11 +16,11 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithKeyFilterBlock(t *testing.T) { +func TestQuerySimpleWithDocIDFilterBlock(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with basic filter (key by filter block)", + Description: "Simple query with basic filter (docID by filter block)", Request: `query { - Users(filter: {_key: {_eq: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"}}) { + Users(filter: {_docID: {_eq: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"}}) { Name Age } diff --git a/tests/integration/query/simple/with_dockey_test.go b/tests/integration/query/simple/with_doc_id_test.go similarity index 73% rename from tests/integration/query/simple/with_dockey_test.go rename to tests/integration/query/simple/with_doc_id_test.go index 5af4dac7ab..6067baea38 100644 --- a/tests/integration/query/simple/with_dockey_test.go +++ b/tests/integration/query/simple/with_doc_id_test.go @@ -16,12 +16,12 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithDocKeyFilter(t *testing.T) { +func TestQuerySimpleWithDocIDFilter(t *testing.T) { tests := []testUtils.RequestTestCase{ { - Description: "Simple query with basic filter (key by DocKey arg)", + Description: "Simple query with basic filter (by docID arg)", Request: `query { - Users(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { + Users(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { Name Age } @@ -42,9 +42,9 @@ func TestQuerySimpleWithDocKeyFilter(t *testing.T) { }, }, { - Description: "Simple query with basic filter (key by DocKey arg), no results", + Description: "Simple query with basic filter (by docID arg), no results", Request: `query { - Users(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009g") { + Users(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009g") { Name Age } @@ -60,9 +60,9 @@ func TestQuerySimpleWithDocKeyFilter(t *testing.T) { Results: []map[string]any{}, }, { - Description: "Simple query with basic filter (key by DocKey arg), partial results", + Description: "Simple query with basic filter (by docID arg), partial results", Request: `query { - Users(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { + Users(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { Name Age } diff --git a/tests/integration/query/simple/with_dockeys_test.go b/tests/integration/query/simple/with_doc_ids_test.go similarity index 68% rename from tests/integration/query/simple/with_dockeys_test.go rename to tests/integration/query/simple/with_doc_ids_test.go index 8bbd0067da..c28fb5d075 100644 --- a/tests/integration/query/simple/with_dockeys_test.go +++ b/tests/integration/query/simple/with_doc_ids_test.go @@ -16,12 +16,12 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithDocKeysFilter(t *testing.T) { +func TestQuerySimpleWithDocIDsFilter(t *testing.T) { tests := []testUtils.RequestTestCase{ { - Description: "Simple query with basic filter (single key by DocKeys arg)", + Description: "Simple query with basic filter (single ID by docIDs arg)", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { Name Age } @@ -42,9 +42,9 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { }, }, { - Description: "Simple query with basic filter (single key by DocKeys arg), no results", + Description: "Simple query with basic filter (single ID by docIDs arg), no results", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009g"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009g"]) { Name Age } @@ -60,9 +60,9 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { Results: []map[string]any{}, }, { - Description: "Simple query with basic filter (duplicate key by DocKeys arg), partial results", + Description: "Simple query with basic filter (duplicate ID by docIDs arg), partial results", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { Name Age } @@ -87,9 +87,9 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { }, }, { - Description: "Simple query with basic filter (multiple key by DocKeys arg), partial results", + Description: "Simple query with basic filter (multiple ID by docIDs arg), partial results", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-1378ab62-e064-5af4-9ea6-49941c8d8f94"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-1378ab62-e064-5af4-9ea6-49941c8d8f94"]) { Name Age } @@ -128,11 +128,11 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { } } -func TestQuerySimpleReturnsNothinGivenEmptyDocKeysFilter(t *testing.T) { +func TestQuerySimpleReturnsNothinGivenEmptyDocIDsFilter(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with empty DocKeys arg", + Description: "Simple query with empty docIDs arg", Request: `query { - Users(dockeys: []) { + Users(docIDs: []) { Name Age } diff --git a/tests/integration/query/simple/with_group_dockey_test.go b/tests/integration/query/simple/with_group_doc_id_test.go similarity index 85% rename from tests/integration/query/simple/with_group_dockey_test.go rename to tests/integration/query/simple/with_group_doc_id_test.go index c40a27efc2..177934ebdc 100644 --- a/tests/integration/query/simple/with_group_dockey_test.go +++ b/tests/integration/query/simple/with_group_doc_id_test.go @@ -16,13 +16,13 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithGroupByWithGroupWithDocKey(t *testing.T) { +func TestQuerySimpleWithGroupByWithGroupWithDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with DocKey filter on _group", + Description: "Simple query with docID filter on _group", Request: `query { Users(groupBy: [Age]) { Age - _group(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { + _group(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { Name } } diff --git a/tests/integration/query/simple/with_group_dockeys_test.go b/tests/integration/query/simple/with_group_doc_ids_test.go similarity index 83% rename from tests/integration/query/simple/with_group_dockeys_test.go rename to tests/integration/query/simple/with_group_doc_ids_test.go index 8d11607819..9db3bae934 100644 --- a/tests/integration/query/simple/with_group_dockeys_test.go +++ b/tests/integration/query/simple/with_group_doc_ids_test.go @@ -16,13 +16,13 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithGroupByWithGroupWithDocKeys(t *testing.T) { +func TestQuerySimpleWithGroupByWithGroupWithDocIDs(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with DocKeys filter on _group", + Description: "Simple query with docIDs filter on _group", Request: `query { Users(groupBy: [Age]) { Age - _group(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-9b2e1434-9d61-5eb1-b3b9-82e8e40729a7"]) { + _group(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-9b2e1434-9d61-5eb1-b3b9-82e8e40729a7"]) { Name } } diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 868d3b54af..2aa571eff7 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -46,14 +46,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", + "cid": "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", "links": []map[string]any{ { - "cid": "bafybeibphw52n3t5nn2xn32sfdsf4hbll3iddsc6or2ebnnrmpz2cbovyy", + "cid": "bafybeigpazmunkmlf5p5jw6fl4supfslupgp2kksvqr7quvhfhsddfa44e", "name": "Age", }, { - "cid": "bafybeifgqmrklbyw3x35zzzao3d7baownrv3z4v7vzfbmk2r5omv5icgu4", + "cid": "bafybeibxsjz4krbv3jcbobpdm2igdcvunitu332o6ebsxup53wglkyn6ee", "name": "Name", }, }, @@ -90,7 +90,7 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { "Name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreicqyapc7zxw5tt2ymybau5m54lhmm5ahrl22oaktnhidul757a4ba", + "schemaVersionId": "bafkreidvd63bawkelxe3wtf7a65klkq4x3dvenqafyasndyal6fvffkeam", }, }, }, @@ -100,17 +100,17 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithEmbeddedLatestCommitWithDockey(t *testing.T) { - const dockey = "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" +func TestQuerySimpleWithEmbeddedLatestCommitWithDocID(t *testing.T) { + const docID = "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" test := testUtils.RequestTestCase{ - Description: "Embedded commits query within object query with dockey", + Description: "Embedded commits query within object query with document ID", Request: `query { Users { Name - _key + _docID _version { - dockey + docID } } }`, @@ -124,11 +124,11 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithDockey(t *testing.T) { }, Results: []map[string]any{ { - "Name": "John", - "_key": dockey, + "Name": "John", + "_docID": docID, "_version": []map[string]any{ { - "dockey": dockey, + "docID": docID, }, }, }, @@ -171,14 +171,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", + "cid": "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", "L1": []map[string]any{ { - "cid": "bafybeibphw52n3t5nn2xn32sfdsf4hbll3iddsc6or2ebnnrmpz2cbovyy", + "cid": "bafybeigpazmunkmlf5p5jw6fl4supfslupgp2kksvqr7quvhfhsddfa44e", "name": "Age", }, { - "cid": "bafybeifgqmrklbyw3x35zzzao3d7baownrv3z4v7vzfbmk2r5omv5icgu4", + "cid": "bafybeibxsjz4krbv3jcbobpdm2igdcvunitu332o6ebsxup53wglkyn6ee", "name": "Name", }, }, diff --git a/tests/integration/schema/aggregates/inline_array_test.go b/tests/integration/schema/aggregates/inline_array_test.go index f5c6199e39..75c9d76414 100644 --- a/tests/integration/schema/aggregates/inline_array_test.go +++ b/tests/integration/schema/aggregates/inline_array_test.go @@ -405,7 +405,7 @@ var aggregateGroupArg = map[string]any{ }, }, map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "IDOperatorBlock", }, diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 96a3b98a56..97671738fb 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -10,7 +10,11 @@ package schema -import "sort" +import ( + "sort" + + "github.com/sourcenetwork/defradb/client/request" +) type Field = map[string]any type fields []Field @@ -76,7 +80,7 @@ var DefaultEmbeddedObjFields = concat( ) var keyField = Field{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "kind": "SCALAR", "name": "ID", @@ -138,15 +142,15 @@ var cidArg = Field{ "inputFields": nil, }, } -var dockeyArg = Field{ - "name": "dockey", +var docIDArg = Field{ + "name": request.DocIDArgName, "type": map[string]any{ "name": "String", "inputFields": nil, }, } -var dockeysArg = Field{ - "name": "dockeys", +var docIDsArg = Field{ + "name": request.DocIDsArgName, "type": map[string]any{ "name": nil, "inputFields": nil, @@ -201,7 +205,7 @@ type argDef struct { func buildOrderArg(objectName string, fields []argDef) Field { inputFields := []any{ - makeInputObject("_key", "Ordering", nil), + makeInputObject("_docID", "Ordering", nil), } for _, field := range fields { @@ -226,7 +230,7 @@ func buildFilterArg(objectName string, fields []argDef) Field { "kind": "INPUT_OBJECT", "name": filterArgName, }), - makeInputObject("_key", "IDOperatorBlock", nil), + makeInputObject("_docID", "IDOperatorBlock", nil), makeInputObject("_not", filterArgName, nil), makeInputObject("_or", nil, map[string]any{ "kind": "INPUT_OBJECT", diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go index 17f38408bf..e3780a3653 100644 --- a/tests/integration/schema/filter_test.go +++ b/tests/integration/schema/filter_test.go @@ -76,7 +76,7 @@ func TestFilterForSimpleSchema(t *testing.T) { }, }, map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "IDOperatorBlock", "ofType": nil, @@ -132,8 +132,8 @@ var testFilterForSimpleSchemaArgProps = map[string]any{ var defaultUserArgsWithoutFilter = trimFields( fields{ cidArg, - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, showDeletedArg, groupByArg, limitArg, @@ -214,7 +214,7 @@ func TestFilterForOneToOneSchema(t *testing.T) { }, }, map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "IDOperatorBlock", "ofType": nil, @@ -284,8 +284,8 @@ var testFilterForOneToOneSchemaArgProps = map[string]any{ var defaultBookArgsWithoutFilter = trimFields( fields{ cidArg, - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, showDeletedArg, groupByArg, limitArg, diff --git a/tests/integration/schema/get_schema_test.go b/tests/integration/schema/get_schema_test.go index e6d5f166ac..ae63d49812 100644 --- a/tests/integration/schema/get_schema_test.go +++ b/tests/integration/schema/get_schema_test.go @@ -71,9 +71,9 @@ func TestGetSchema_GivenNoSchemaGivenUnknownName(t *testing.T) { } func TestGetSchema_ReturnsAllSchema(t *testing.T) { - usersSchemaVersion1ID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" - usersSchemaVersion2ID := "bafkreicseqwxooxo2wf2bgzdalwtm2rtsj7x4mgsir4rp4htmpnwnffwre" - booksSchemaVersion1ID := "bafkreigbfibfn7g6neen2gghc54dzocexefi7vshc3opgvy6j7jflar2nm" + usersSchemaVersion1ID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" + usersSchemaVersion2ID := "bafkreiabmj6ypcc6alqswrscgpj6rqbhogsojgv7fopr5rgrluvxtwente" + booksSchemaVersion1ID := "bafkreiaiku34mjr2za5yo6yc4pzoupenwzjq7d5pclgfdiihdnjq33fn5y" test := testUtils.TestCase{ Actions: []any{ @@ -97,25 +97,14 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { }, testUtils.GetSchema{ ExpectedResults: []client.SchemaDescription{ - { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - }, - }, { Name: "Users", Root: usersSchemaVersion1ID, VersionID: usersSchemaVersion2ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -132,8 +121,19 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { VersionID: booksSchemaVersion1ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, }, }, }, @@ -146,8 +146,8 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { - usersSchemaVersion1ID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" - usersSchemaVersion2ID := "bafkreicseqwxooxo2wf2bgzdalwtm2rtsj7x4mgsir4rp4htmpnwnffwre" + usersSchemaVersion1ID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" + usersSchemaVersion2ID := "bafkreiabmj6ypcc6alqswrscgpj6rqbhogsojgv7fopr5rgrluvxtwente" test := testUtils.TestCase{ Actions: []any{ @@ -172,25 +172,14 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { testUtils.GetSchema{ Root: immutable.Some(usersSchemaVersion1ID), ExpectedResults: []client.SchemaDescription{ - { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - }, - }, { Name: "Users", Root: usersSchemaVersion1ID, VersionID: usersSchemaVersion2ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -201,6 +190,17 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { }, }, }, + { + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + }, + }, }, }, }, @@ -210,8 +210,8 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { - usersSchemaVersion1ID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" - usersSchemaVersion2ID := "bafkreicseqwxooxo2wf2bgzdalwtm2rtsj7x4mgsir4rp4htmpnwnffwre" + usersSchemaVersion1ID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" + usersSchemaVersion2ID := "bafkreiabmj6ypcc6alqswrscgpj6rqbhogsojgv7fopr5rgrluvxtwente" test := testUtils.TestCase{ Actions: []any{ @@ -236,25 +236,14 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { testUtils.GetSchema{ Name: immutable.Some("Users"), ExpectedResults: []client.SchemaDescription{ - { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - }, - }, { Name: "Users", Root: usersSchemaVersion1ID, VersionID: usersSchemaVersion2ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -265,6 +254,17 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { }, }, }, + { + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + }, + }, }, }, }, diff --git a/tests/integration/schema/group_test.go b/tests/integration/schema/group_test.go index 35a5171c73..5ac89b95ec 100644 --- a/tests/integration/schema/group_test.go +++ b/tests/integration/schema/group_test.go @@ -62,7 +62,7 @@ func TestGroupByFieldForTheManySideInSchema(t *testing.T) { // Internal fields. map[string]any{"name": "_deleted"}, map[string]any{"name": "_group"}, - map[string]any{"name": "_key"}, + map[string]any{"name": "_docID"}, map[string]any{"name": "_version"}, // User defined schema fields> @@ -124,7 +124,7 @@ func TestGroupByFieldForTheSingleSideInSchema(t *testing.T) { // Internal fields. map[string]any{"name": "_deleted"}, map[string]any{"name": "_group"}, - map[string]any{"name": "_key"}, + map[string]any{"name": "_docID"}, map[string]any{"name": "_version"}, // User defined schema fields> diff --git a/tests/integration/schema/input_type_test.go b/tests/integration/schema/input_type_test.go index e50920dc3b..c9798ffc9f 100644 --- a/tests/integration/schema/input_type_test.go +++ b/tests/integration/schema/input_type_test.go @@ -77,8 +77,8 @@ func TestInputTypeOfOrderFieldWhereSchemaHasManyRelationType(t *testing.T) { "args": append( trimFields( fields{ - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, buildFilterArg("group", []argDef{ { fieldName: "members", @@ -98,7 +98,7 @@ func TestInputTypeOfOrderFieldWhereSchemaHasManyRelationType(t *testing.T) { "ofType": nil, "inputFields": []any{ map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "Ordering", "ofType": nil, @@ -187,7 +187,7 @@ func TestInputTypeOfOrderFieldWhereSchemaHasRelationType(t *testing.T) { "ofType": nil, "inputFields": []any{ map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "Ordering", "ofType": nil, @@ -258,8 +258,8 @@ var testInputTypeOfOrderFieldWhereSchemaHasRelationTypeArgProps = map[string]any var defaultGroupArgsWithoutOrder = trimFields( fields{ - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, buildFilterArg("author", []argDef{ { fieldName: "age", diff --git a/tests/integration/schema/migrations/query/simple_test.go b/tests/integration/schema/migrations/query/simple_test.go index a13fd32be9..b758356cac 100644 --- a/tests/integration/schema/migrations/query/simple_test.go +++ b/tests/integration/schema/migrations/query/simple_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQuery(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -115,8 +115,8 @@ func TestSchemaMigrationQueryMultipleDocs(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -178,8 +178,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -254,8 +254,8 @@ func TestSchemaMigrationQueryMigratesToIntermediaryVersion(t *testing.T) { // Register a migration from schema version 1 to schema version 2 **only** - // there should be no migration from version 2 to version 3. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -325,7 +325,7 @@ func TestSchemaMigrationQueryMigratesFromIntermediaryVersion(t *testing.T) { // Register a migration from schema version 2 to schema version 3 **only** - // there should be no migration from version 1 to version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", DestinationSchemaVersionID: "bafkreigrpkox3omi3c3sp5zoupcjg2b32mysztjozaqsceafsdtkadzufe", Lens: model.Lens{ Lenses: []model.LensModule{ @@ -394,8 +394,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -411,7 +411,7 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", DestinationSchemaVersionID: "bafkreigrpkox3omi3c3sp5zoupcjg2b32mysztjozaqsceafsdtkadzufe", Lens: model.Lens{ Lenses: []model.LensModule{ @@ -539,8 +539,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingScalarField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -600,8 +600,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingInlineArrayField(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreidvca2kcxlxab2wi25xhiyxmug66css4cqzqqxu4rdyuanl7u5rde", - DestinationSchemaVersionID: "bafkreiedmg3qox3a24rkhkx3wahahpyixlxkicetsk3ctkh3f7xcbdrrli", + SourceSchemaVersionID: "bafkreigjtl5r3lq6dkbod766let7ewqirc2ai6l2c5j5fxxc43zmvqqs24", + DestinationSchemaVersionID: "bafkreicwipnhoplttqy7spj2ksgk7vwmxmdtwt6g23os2kmqgvb22wfg3m", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -663,8 +663,8 @@ func TestSchemaMigrationQueryMigrationRemovesExistingField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -724,8 +724,8 @@ func TestSchemaMigrationQueryMigrationPreservesExistingFieldWhenFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -798,8 +798,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -860,8 +860,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcAndDstFieldNotRe }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_dockey_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go similarity index 81% rename from tests/integration/schema/migrations/query/with_dockey_test.go rename to tests/integration/schema/migrations/query/with_doc_id_test.go index a3a983d8bb..2ce1fd8ac3 100644 --- a/tests/integration/schema/migrations/query/with_dockey_test.go +++ b/tests/integration/schema/migrations/query/with_doc_id_test.go @@ -21,9 +21,9 @@ import ( ) // This test asserts that spans are being passed correctly through the new Lens fetcher. -func TestSchemaMigrationQueryByDocKey(t *testing.T) { +func TestSchemaMigrationQueryByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema migration, query by key", + Description: "Test schema migration, query by docID", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -52,8 +52,8 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -69,7 +69,7 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { + Users (docID: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { name verified } @@ -88,7 +88,7 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { } // This test asserts that lenses are being correctly returned to the pool for reuse after -// fetch completion. Querying by dockey should mean that the fetcher only scans the dockey +// fetch completion. Querying by docID should mean that the fetcher only scans the docID // prefix, and thus will only migrate a single document per query (unlike filters etc which // will migrate all documents at the time of writing). If the return mechanic was very faulty // then this test *should* deadlock. @@ -99,9 +99,9 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { // // At the time of writing, the lens pool size is hardcoded to 5, so we should test with 6 // documents/queries, if the size changes so should this test. -func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { +func TestSchemaMigrationQueryMultipleQueriesByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema migration, multiple queries by key", + Description: "Test schema migration, multiple queries by docID", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -158,8 +158,8 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -175,7 +175,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { + Users (docID: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { name verified } @@ -189,7 +189,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-92393ad0-07b6-5753-8dbb-19c9c41374ed") { + Users (docID: "bae-92393ad0-07b6-5753-8dbb-19c9c41374ed") { name verified } @@ -203,7 +203,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-403d7337-f73e-5c81-8719-e853938c8985") { + Users (docID: "bae-403d7337-f73e-5c81-8719-e853938c8985") { name verified } @@ -217,7 +217,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { + Users (docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { name verified } @@ -231,7 +231,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-3f1174ba-d9bc-5a6a-b0bc-8f19581f199d") { + Users (docID: "bae-3f1174ba-d9bc-5a6a-b0bc-8f19581f199d") { name verified } @@ -245,7 +245,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-0698bda7-2c69-5028-a26a-0a1c491b793b") { + Users (docID: "bae-0698bda7-2c69-5028-a26a-0a1c491b793b") { name verified } diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index 0fc5d2da79..4b06bf6586 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -46,8 +46,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu", - DestinationSchemaVersionID: "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a", + SourceSchemaVersionID: "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du", + DestinationSchemaVersionID: "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -136,8 +136,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu", - DestinationSchemaVersionID: "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a", + SourceSchemaVersionID: "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du", + DestinationSchemaVersionID: "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -238,7 +238,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSch // Register a migration from version 2 to version 3 on both nodes. // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", DestinationSchemaVersionID: "bafkreigrpkox3omi3c3sp5zoupcjg2b32mysztjozaqsceafsdtkadzufe", Lens: model.Lens{ Lenses: []model.LensModule{ diff --git a/tests/integration/schema/migrations/query/with_restart_test.go b/tests/integration/schema/migrations/query/with_restart_test.go index deac59c725..3b51c92ada 100644 --- a/tests/integration/schema/migrations/query/with_restart_test.go +++ b/tests/integration/schema/migrations/query/with_restart_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithRestart(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_set_default_test.go b/tests/integration/schema/migrations/query/with_set_default_test.go index 9d61d609e7..55a3fc9968 100644 --- a/tests/integration/schema/migrations/query/with_set_default_test.go +++ b/tests/integration/schema/migrations/query/with_set_default_test.go @@ -22,7 +22,7 @@ import ( ) func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t *testing.T) { - schemaVersionID2 := "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a" + schemaVersionID2 := "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -50,7 +50,7 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu", + SourceSchemaVersionID: "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du", DestinationSchemaVersionID: schemaVersionID2, Lens: model.Lens{ Lenses: []model.LensModule{ @@ -89,8 +89,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * } func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t *testing.T) { - schemaVersionID1 := "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu" - schemaVersionID2 := "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a" + schemaVersionID1 := "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du" + schemaVersionID2 := "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -164,8 +164,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t } func TestSchemaMigrationQuery_WithSetDefaultToOriginalVersionThatDocWasCreatedAt_ClearsMigrations(t *testing.T) { - schemaVersionID1 := "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu" - schemaVersionID2 := "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a" + schemaVersionID1 := "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du" + schemaVersionID2 := "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm" test := testUtils.TestCase{ Description: "Test schema migration", diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index fcd01d6748..4bb0395365 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -47,8 +47,8 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -109,8 +109,8 @@ func TestSchemaMigrationQueryWithTxnAndCommit(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index 478ffd8e24..9fbf2b914a 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -129,8 +129,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredAfterUpdate(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index c1982f5325..29769f1bac 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -91,8 +91,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -124,8 +124,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, }, { - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index 9fa0eb021a..ed8e05abf7 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { - schemaVersionID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" + schemaVersionID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" test := testUtils.TestCase{ Actions: []any{ @@ -52,8 +52,8 @@ func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { Root: schemaVersionID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, }, }, }, diff --git a/tests/integration/schema/updates/add/field/create_test.go b/tests/integration/schema/updates/add/field/create_test.go index d59df6c294..af771d025c 100644 --- a/tests/integration/schema/updates/add/field/create_test.go +++ b/tests/integration/schema/updates/add/field/create_test.go @@ -43,16 +43,16 @@ func TestSchemaUpdatesAddFieldWithCreate(t *testing.T) { testUtils.Request{ Request: `query { Users { - _key + _docID name email } }`, Results: []map[string]any{ { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", - "name": "John", - "email": nil, + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "name": "John", + "email": nil, }, }, }, @@ -98,21 +98,21 @@ func TestSchemaUpdatesAddFieldWithCreateAfterSchemaUpdate(t *testing.T) { testUtils.Request{ Request: `query { Users { - _key + _docID name email } }`, Results: []map[string]any{ { - "_key": "bae-1ff978e7-b6ab-5ca7-8344-7fdcff65f94e", - "name": "Shahzad", - "email": "sqlizded@yahoo.ca", + "_docID": "bae-1ff978e7-b6ab-5ca7-8344-7fdcff65f94e", + "name": "Shahzad", + "email": "sqlizded@yahoo.ca", }, { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", - "name": "John", - "email": nil, + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "name": "John", + "email": nil, }, }, }, diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index 7cf8af8480..12cf973d59 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -17,8 +17,8 @@ import ( ) func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoin(t *testing.T) { - initialSchemaVersionId := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - updatedSchemaVersionId := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + initialSchemaVersionId := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + updatedSchemaVersionId := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, version join", @@ -105,8 +105,8 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi } func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndCommitQuery(t *testing.T) { - initialSchemaVersionId := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - updatedSchemaVersionId := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + initialSchemaVersionId := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + updatedSchemaVersionId := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, commits query", diff --git a/tests/integration/schema/updates/add/field/kind/dockey_test.go b/tests/integration/schema/updates/add/field/kind/doc_id_test.go similarity index 83% rename from tests/integration/schema/updates/add/field/kind/dockey_test.go rename to tests/integration/schema/updates/add/field/kind/doc_id_test.go index 6d8aca4736..edac43150f 100644 --- a/tests/integration/schema/updates/add/field/kind/dockey_test.go +++ b/tests/integration/schema/updates/add/field/kind/doc_id_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestSchemaUpdatesAddFieldKindDocKey(t *testing.T) { +func TestSchemaUpdatesAddFieldKindDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind DocKey (1)", + Description: "Test schema update, add field with kind DocID (1)", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -48,9 +48,9 @@ func TestSchemaUpdatesAddFieldKindDocKey(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindDocKeyWithCreate(t *testing.T) { +func TestSchemaUpdatesAddFieldKindDocIDWithCreate(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind DocKey (1) and create", + Description: "Test schema update, add field with kind DocID (1) and create", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -92,9 +92,9 @@ func TestSchemaUpdatesAddFieldKindDocKeyWithCreate(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindDocKeySubstitutionWithCreate(t *testing.T) { +func TestSchemaUpdatesAddFieldKindDocIDSubstitutionWithCreate(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind DocKey substitution and create", + Description: "Test schema update, add field with kind DocID substitution and create", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index a3dc12fb13..fb14d6ef30 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -470,12 +470,12 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { testUtils.Request{ Request: `mutation { create_Users(data: "{\"name\": \"John\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": key1, + "_docID": key1, }, }, }, diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index 21afdec279..abaa1d4564 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -470,12 +470,12 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { testUtils.Request{ Request: `mutation { create_Users(data: "{\"name\": \"John\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": key1, + "_docID": key1, }, }, }, diff --git a/tests/integration/schema/updates/add/field/simple_test.go b/tests/integration/schema/updates/add/field/simple_test.go index 69ddfd1734..04bafb2694 100644 --- a/tests/integration/schema/updates/add/field/simple_test.go +++ b/tests/integration/schema/updates/add/field/simple_test.go @@ -20,8 +20,8 @@ import ( ) func TestSchemaUpdatesAddFieldSimple(t *testing.T) { - schemaVersion1ID := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - schemaVersion2ID := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + schemaVersion1ID := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + schemaVersion2ID := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -58,8 +58,8 @@ func TestSchemaUpdatesAddFieldSimple(t *testing.T) { Root: schemaVersion1ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -117,8 +117,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_Errors(t *testing.T) { } func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testing.T) { - schemaVersion1ID := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - schemaVersion2ID := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + schemaVersion1ID := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + schemaVersion2ID := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -149,8 +149,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testi Root: schemaVersion1ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { diff --git a/tests/integration/schema/updates/move/simple_test.go b/tests/integration/schema/updates/move/simple_test.go index e6d8bb1556..e16226c1cf 100644 --- a/tests/integration/schema/updates/move/simple_test.go +++ b/tests/integration/schema/updates/move/simple_test.go @@ -17,7 +17,7 @@ import ( ) func TestSchemaUpdatesMoveCollectionDoesNothing(t *testing.T) { - schemaVersionID := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" + schemaVersionID := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" test := testUtils.TestCase{ Description: "Test schema update, move collection", diff --git a/tests/integration/schema/with_update_set_default_test.go b/tests/integration/schema/with_update_set_default_test.go index 55242d7a2a..602e6d48d6 100644 --- a/tests/integration/schema/with_update_set_default_test.go +++ b/tests/integration/schema/with_update_set_default_test.go @@ -92,7 +92,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToOriginal_NewFieldIsNotQueriable( SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetDefaultSchemaVersion{ - SchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", + SchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", }, testUtils.Request{ Request: `query { @@ -129,7 +129,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToNew_AllowsQueryingOfNewField(t * SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetDefaultSchemaVersion{ - SchemaVersionID: "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m", + SchemaVersionID: "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/subscription/subscription_test.go b/tests/integration/subscription/subscription_test.go index 7d51a240ad..49f8bf1f55 100644 --- a/tests/integration/subscription/subscription_test.go +++ b/tests/integration/subscription/subscription_test.go @@ -23,21 +23,21 @@ func TestSubscriptionWithCreateMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), - "name": "John", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": int64(27), + "name": "John", }, { - "_key": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", - "age": int64(31), - "name": "Addo", + "_docID": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", + "age": int64(31), + "name": "Addo", }, }, }, @@ -78,16 +78,16 @@ func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User(filter: {age: {_lt: 30}}) { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), - "name": "John", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": int64(27), + "name": "John", }, }, }, @@ -116,7 +116,7 @@ func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User(filter: {age: {_gt: 30}}) { - _key + _docID name age } @@ -148,16 +148,16 @@ func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User(filter: {age: {_lt: 30}}) { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), - "name": "John", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": int64(27), + "name": "John", }, }, }, @@ -216,7 +216,7 @@ func TestSubscriptionWithUpdateMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User { - _key + _docID name age points @@ -224,7 +224,7 @@ func TestSubscriptionWithUpdateMutations(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", "age": int64(27), "name": "John", "points": float64(45), @@ -274,7 +274,7 @@ func TestSubscriptionWithUpdateAllMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User { - _key + _docID name age points @@ -282,13 +282,13 @@ func TestSubscriptionWithUpdateAllMutations(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", "age": int64(27), "name": "John", "points": float64(55), }, { - "_key": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", + "_docID": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", "age": int64(31), "name": "Addo", "points": float64(55), diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 2ae73ddeca..ccfeba4d7a 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -829,7 +829,7 @@ func refreshDocuments( // Just use the collection from the first relevant node, as all will be the same for this // purpose. collection := getNodeCollections(action.NodeID, s.collections)[0][action.CollectionID] - if err := doc.RemapAliasFieldsAndDockey(collection.Schema().Fields); err != nil { + if err := doc.RemapAliasFieldsAndDocID(collection.Schema().Fields); err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway continue @@ -837,7 +837,7 @@ func refreshDocuments( // The document may have been mutated by other actions, so to be sure we have the latest // version without having to worry about the individual update mechanics we fetch it. - doc, err = collection.Get(s.ctx, doc.Key(), false) + doc, err = collection.Get(s.ctx, doc.ID(), false) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway @@ -1155,7 +1155,7 @@ func createDocViaGQL( request := fmt.Sprintf( `mutation { create_%s(data: %s) { - _key + _docID } }`, collection.Name(), @@ -1174,11 +1174,11 @@ func createDocViaGQL( return nil, nil } - docKeyString := resultantDocs[0]["_key"].(string) - docKey, err := client.NewDocKeyFromString(docKeyString) + docIDString := resultantDocs[0]["_docID"].(string) + docID, err := client.NewDocIDFromString(docIDString) require.NoError(s.t, err) - doc, err := collection.Get(s.ctx, docKey, false) + doc, err := collection.Get(s.ctx, docID, false) require.NoError(s.t, err) return doc, nil @@ -1199,7 +1199,7 @@ func deleteDoc( actionNodes, nodeID, func() error { - _, err := collections[action.CollectionID].DeleteWithKey(s.ctx, doc.Key()) + _, err := collections[action.CollectionID].DeleteWithDocID(s.ctx, doc.ID()) return err }, ) @@ -1287,12 +1287,12 @@ func updateDocViaGQL( request := fmt.Sprintf( `mutation { - update_%s(id: "%s", data: %s) { - _key + update_%s(docID: "%s", data: %s) { + _docID } }`, collection.Name(), - doc.Key().String(), + doc.ID().String(), escapedJson, ) diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index 4adce90805..9dc6da0dd6 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -110,7 +110,7 @@ type docGenerator struct { func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) map[string]any { result := make(map[string]any) for _, field := range typeDef.Schema.Fields { - if field.IsRelation() || field.Name == request.KeyFieldName { + if field.IsRelation() || field.Name == request.DocIDFieldName { continue } result[field.Name] = doc[field.Name] @@ -123,7 +123,7 @@ func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) ma return result } -// generatePrimary generates primary docs for the given secondary doc and adds foreign keys +// generatePrimary generates primary docs for the given secondary doc and adds foreign docID // to the secondary doc to reference the primary docs. func (this *docGenerator) generatePrimary( secDocMap map[string]any, @@ -145,13 +145,13 @@ func (this *docGenerator) generatePrimary( if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } - docKey := primDoc.Key().String() - requestedSecondary[secDocField.Name+request.RelatedObjectID] = docKey + docID := primDoc.ID().String() + requestedSecondary[secDocField.Name+request.RelatedObjectID] = docID subResult = append(subResult, gen.GeneratedDoc{Col: &primType, Doc: primDoc}) result = append(result, subResult...) secondaryDocs, err := this.generateSecondaryDocs( - secDocMapField.(map[string]any), docKey, &primType, secType.Description.Name) + secDocMapField.(map[string]any), docID, &primType, secType.Description.Name) if err != nil { return nil, nil, err } @@ -164,12 +164,12 @@ func (this *docGenerator) generatePrimary( } // generateRelatedDocs generates related docs (primary and secondary) for the given doc and -// adds foreign keys to the given doc to reference the primary docs. +// adds foreign docID to the given doc to reference the primary docs. func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName string) ([]gen.GeneratedDoc, error) { typeDef := this.types[typeName] // create first primary docs and link them to the given doc so that we can define - // dockey for the complete document. + // docID for the complete document. requested, result, err := this.generatePrimary(docMap, &typeDef) if err != nil { return nil, err @@ -181,7 +181,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st result = append(result, gen.GeneratedDoc{Col: &typeDef, Doc: doc}) - secondaryDocs, err := this.generateSecondaryDocs(docMap, doc.Key().String(), &typeDef, "") + secondaryDocs, err := this.generateSecondaryDocs(docMap, doc.ID().String(), &typeDef, "") if err != nil { return nil, err } @@ -191,7 +191,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st func (this *docGenerator) generateSecondaryDocs( primaryDocMap map[string]any, - docKey string, + docID string, primaryType *client.CollectionDefinition, parentTypeName string, ) ([]gen.GeneratedDoc, error) { @@ -202,7 +202,7 @@ func (this *docGenerator) generateSecondaryDocs( if !field.IsPrimaryRelation() && (parentTypeName == "" || parentTypeName != field.Schema) { docs, err := this.generateSecondaryDocsForField( - primaryDocMap, primaryType.Description.Name, &field, docKey) + primaryDocMap, primaryType.Description.Name, &field, docID) if err != nil { return nil, err } @@ -219,7 +219,7 @@ func (this *docGenerator) generateSecondaryDocsForField( primaryDoc map[string]any, primaryTypeName string, relField *client.FieldDescription, - primaryDocKey string, + primaryDocID string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} relTypeDef := this.types[relField.Schema] @@ -230,7 +230,7 @@ func (this *docGenerator) generateSecondaryDocsForField( switch relVal := primaryDoc[relField.Name].(type) { case []map[string]any: for _, relDoc := range relVal { - relDoc[primaryPropName] = primaryDocKey + relDoc[primaryPropName] = primaryDocID actions, err := this.generateRelatedDocs(relDoc, relTypeDef.Description.Name) if err != nil { return nil, err @@ -238,7 +238,7 @@ func (this *docGenerator) generateSecondaryDocsForField( result = append(result, actions...) } case map[string]any: - relVal[primaryPropName] = primaryDocKey + relVal[primaryPropName] = primaryDocID actions, err := this.generateRelatedDocs(relVal, relTypeDef.Description.Name) if err != nil { return nil, err diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index 1092280015..b63617690d 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -36,7 +36,7 @@ func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { docs, err := CreateFromSDL(schema, docsList) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs(docsList.Docs), docs) + errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -57,7 +57,7 @@ func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, }), docs) @@ -96,11 +96,11 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": mustGetDocKeyFromDocMap(map[string]any{"name": "John"})}, - {"model": "MacBook", "owner_id": mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"})}, + {"model": "iPhone", "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"})}, + {"model": "MacBook", "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"})}, }), docs) if errorMsg != "" { t.Error(errorMsg) @@ -137,9 +137,9 @@ func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ - {"name": "John", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "iPhone"})}, - {"name": "Fred", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "MacBook"})}, + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ + {"name": "John", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"})}, + {"name": "Fred", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"})}, {"model": "iPhone"}, {"model": "MacBook"}, }), docs) @@ -180,9 +180,9 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS"}) - deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone", "specs_id": specsDoc[request.KeyFieldName]}) - userDoc := mustAddKeyToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.KeyFieldName]}) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName]}) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.DocIDFieldName]}) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -222,12 +222,12 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { }) assert.NoError(t, err) - specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS"}) - userDoc := mustAddKeyToDoc(map[string]any{"name": "John"}) - deviceDoc := mustAddKeyToDoc(map[string]any{ + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}) + deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", - "specs_id": specsDoc[request.KeyFieldName], - "owner_id": userDoc[request.KeyFieldName], + "specs_id": specsDoc[request.DocIDFieldName], + "owner_id": userDoc[request.DocIDFieldName], }) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) @@ -268,9 +268,9 @@ func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { }) assert.NoError(t, err) - deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone"}) - specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS", "device_id": deviceDoc[request.KeyFieldName]}) - userDoc := mustAddKeyToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.KeyFieldName]}) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS", "device_id": deviceDoc[request.DocIDFieldName]}) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.DocIDFieldName]}) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -310,12 +310,12 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { }) assert.NoError(t, err) - deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone"}) - addressDoc := mustAddKeyToDoc(map[string]any{"street": "Backer"}) - userDoc := mustAddKeyToDoc(map[string]any{ + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}) + addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}) + userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", - "device_id": deviceDoc[request.KeyFieldName], - "address_id": addressDoc[request.KeyFieldName], + "device_id": deviceDoc[request.DocIDFieldName], + "address_id": addressDoc[request.DocIDFieldName], }) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, addressDoc}, docs) @@ -356,15 +356,15 @@ func TestGeneratePredefinedFromSchema_OneToMany(t *testing.T) { }) assert.NoError(t, err) - johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) - fredDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"}) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) + fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": johnDocKey}, - {"model": "PlayStation", "owner_id": johnDocKey}, - {"model": "Surface", "owner_id": fredDocKey}, - {"model": "Pixel", "owner_id": fredDocKey}, + {"model": "iPhone", "owner_id": johnDocID}, + {"model": "PlayStation", "owner_id": johnDocID}, + {"model": "Surface", "owner_id": fredDocID}, + {"model": "Pixel", "owner_id": fredDocID}, }), docs) if errorMsg != "" { t.Error(errorMsg) @@ -411,13 +411,13 @@ func TestGeneratePredefinedFromSchema_OneToManyToOne(t *testing.T) { }) assert.NoError(t, err) - johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, - {"model": "iPhone", "owner_id": johnDocKey}, - {"model": "MacBook", "owner_id": johnDocKey}, - {"CPU": "A13", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "iPhone", "owner_id": johnDocKey})}, - {"CPU": "M2", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "MacBook", "owner_id": johnDocKey})}, + {"model": "iPhone", "owner_id": johnDocID}, + {"model": "MacBook", "owner_id": johnDocID}, + {"CPU": "A13", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone", "owner_id": johnDocID})}, + {"CPU": "M2", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook", "owner_id": johnDocID})}, }), docs) if errorMsg != "" { t.Error(errorMsg) @@ -492,15 +492,15 @@ func TestGeneratePredefined_OneToMany(t *testing.T) { }) assert.NoError(t, err) - johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) - fredDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"}) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) + fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": johnDocKey}, - {"model": "PlayStation", "owner_id": johnDocKey}, - {"model": "Surface", "owner_id": fredDocKey}, - {"model": "Pixel", "owner_id": fredDocKey}, + {"model": "iPhone", "owner_id": johnDocID}, + {"model": "PlayStation", "owner_id": johnDocID}, + {"model": "Surface", "owner_id": fredDocID}, + {"model": "Pixel", "owner_id": fredDocID}, }), docs) if errorMsg != "" { t.Error(errorMsg) diff --git a/tests/predefined/util_test.go b/tests/predefined/util_test.go index da5d880ba8..c06e6c0fdc 100644 --- a/tests/predefined/util_test.go +++ b/tests/predefined/util_test.go @@ -68,22 +68,22 @@ outer: return "" } -func mustGetDocKeyFromDocMap(docMap map[string]any) string { +func mustGetDocIDFromDocMap(docMap map[string]any) string { doc, err := client.NewDocFromMap(docMap) if err != nil { panic("can not get doc from map" + err.Error()) } - return doc.Key().String() + return doc.ID().String() } -func mustAddKeyToDoc(doc map[string]any) map[string]any { - doc[request.KeyFieldName] = mustGetDocKeyFromDocMap(doc) +func mustAddDocIDToDoc(doc map[string]any) map[string]any { + doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc) return doc } -func mustAddKeysToDocs(docs []map[string]any) []map[string]any { +func mustAddDocIDsToDocs(docs []map[string]any) []map[string]any { for i := range docs { - mustAddKeyToDoc(docs[i]) + mustAddDocIDToDoc(docs[i]) } return docs } diff --git a/version/version.go b/version/version.go index 67538d302b..32de6f67c8 100644 --- a/version/version.go +++ b/version/version.go @@ -37,12 +37,12 @@ var ( type defraVersion struct { Release string `json:"release"` Commit string `json:"commit"` - CommitDate string `json:"commitdate"` + CommitDate string `json:"commitDate"` GoInfo string `json:"go"` - VersionHTTPAPI string `json:"httpapi"` - DocKeyVersions string `json:"dockeyversions"` - NetProtocol string `json:"netprotocol"` + VersionHTTPAPI string `json:"httpAPI"` + DocIDVersions string `json:"docIDVersions"` + NetProtocol string `json:"netProtocol"` } // NewDefraVersion returns a defraVersion with normalized values. @@ -55,13 +55,13 @@ func NewDefraVersion() (defraVersion, error) { VersionHTTPAPI: http.Version, NetProtocol: string(net.Protocol), } - var docKeyVersions []string - for k, v := range client.ValidDocKeyVersions { + var docIDVersions []string + for k, v := range client.ValidDocIDVersions { if v { - docKeyVersions = append(docKeyVersions, fmt.Sprintf("%x", k)) + docIDVersions = append(docIDVersions, fmt.Sprintf("%x", k)) } } - dv.DocKeyVersions = strings.Join(docKeyVersions, ",") + dv.DocIDVersions = strings.Join(docIDVersions, ",") return dv, nil } @@ -88,14 +88,14 @@ func (dv *defraVersion) StringFull() string { `defradb %s (%s %s) * HTTP API: %s * P2P multicodec: %s -* DocKey versions: %s +* DocID versions: %s * Go: %s`, dv.Release, commitHash, dv.CommitDate, dv.VersionHTTPAPI, dv.NetProtocol, - dv.DocKeyVersions, + dv.DocIDVersions, dv.GoInfo, ) } diff --git a/version/version_test.go b/version/version_test.go index f69c9959e4..1f46fc208d 100644 --- a/version/version_test.go +++ b/version/version_test.go @@ -23,7 +23,7 @@ func TestNewDefraVersion(t *testing.T) { assert.NotEmpty(t, dv.VersionHTTPAPI) assert.NotEmpty(t, dv.NetProtocol) - assert.NotEmpty(t, dv.DocKeyVersions) + assert.NotEmpty(t, dv.DocIDVersions) // These variables are set in the Makefile via BUILD_FLAGS when building defradb. // This test assumes the test suite is not using these BUILD_FLAGS. @@ -51,14 +51,14 @@ func TestDefraVersionStringFull(t *testing.T) { CommitDate: "2022-01-01T12:00:00Z", GoInfo: "1.17.5", VersionHTTPAPI: "v0", - DocKeyVersions: "1", + DocIDVersions: "1", NetProtocol: "/defra/0.0.1", } expected := `defradb test-release (abc123de 2022-01-01T12:00:00Z) * HTTP API: v0 * P2P multicodec: /defra/0.0.1 -* DocKey versions: 1 +* DocID versions: 1 * Go: 1.17.5` assert.Equal(t, expected, dv.StringFull()) @@ -71,7 +71,7 @@ func TestDefraVersion_JSON(t *testing.T) { CommitDate: "2022-01-01T12:00:00Z", GoInfo: "go1.17.5", VersionHTTPAPI: "1.2.3", - DocKeyVersions: "0123456789abcdef", + DocIDVersions: "0123456789abcdef", NetProtocol: "test-protocol", }