From 27146cbcf391fd90f1f8b9786f5b1903791c99d1 Mon Sep 17 00:00:00 2001
From: Shahzad Lone
Date: Mon, 21 Oct 2024 11:37:04 -0400
Subject: [PATCH 01/47] test(i): Fix the nodes ID/index bug in tests (#3162)
## Relevant issue(s)
Resolves #3076
## Description
- Fix the `getNodes` bug
- Fix the temporary duplications in some actions
- Remove the `getNodesCollection`
---
tests/integration/acp.go | 156 ++--------------
tests/integration/explain.go | 3 +-
tests/integration/lens.go | 3 +-
tests/integration/utils.go | 346 +++++++++++------------------------
4 files changed, 127 insertions(+), 381 deletions(-)
diff --git a/tests/integration/acp.go b/tests/integration/acp.go
index d98fe08a3f..f58f1963d8 100644
--- a/tests/integration/acp.go
+++ b/tests/integration/acp.go
@@ -112,8 +112,10 @@ func addPolicyACP(
require.Fail(s.t, "Expected error should not have an expected policyID with it.", s.testCase.Description)
}
- for i, node := range getNodes(action.NodeID, s.nodes) {
- identity := getIdentity(s, i, action.Identity)
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ identity := getIdentity(s, nodeID, action.Identity)
ctx := db.SetContextIdentity(s.ctx, identity)
policyResult, err := node.AddPolicy(ctx, action.Policy)
@@ -183,16 +185,15 @@ func addDocActorRelationshipACP(
s *state,
action AddDocActorRelationship,
) {
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- collections := s.collections[nodeID]
- node := s.nodes[nodeID]
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
var collectionName string
if action.CollectionID == -1 {
collectionName = ""
} else {
- collection := collections[action.CollectionID]
+ collection := s.collections[nodeID][action.CollectionID]
if !collection.Description().Name.HasValue() {
require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
}
@@ -243,69 +244,11 @@ func addDocActorRelationshipACP(
require.Equal(s.t, action.ExpectedError, "")
require.Equal(s.t, action.ExpectedExistence, exists.ExistedAlready)
}
- } else {
- for i, node := range getNodes(action.NodeID, s.nodes) {
- var collectionName string
- if action.CollectionID == -1 {
- collectionName = ""
- } else {
- collection := s.collections[i][action.CollectionID]
- if !collection.Description().Name.HasValue() {
- require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
- }
- collectionName = collection.Description().Name.Value()
- }
-
- var docID string
- if action.DocID == -1 || action.CollectionID == -1 {
- docID = ""
- } else {
- docID = s.docIDs[action.CollectionID][action.DocID].String()
- }
- var targetIdentity string
- if action.TargetIdentity == -1 {
- targetIdentity = ""
- } else {
- optionalTargetIdentity := getIdentity(s, i, immutable.Some(action.TargetIdentity))
- if !optionalTargetIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description)
- }
- targetIdentity = optionalTargetIdentity.Value().DID
- }
-
- var requestorIdentity immutable.Option[acpIdentity.Identity]
- if action.RequestorIdentity == -1 {
- requestorIdentity = acpIdentity.None
- } else {
- requestorIdentity = getIdentity(s, i, immutable.Some(action.RequestorIdentity))
- if !requestorIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description)
- }
- }
- ctx := db.SetContextIdentity(s.ctx, requestorIdentity)
-
- exists, err := node.AddDocActorRelationship(
- ctx,
- collectionName,
- docID,
- action.Relation,
- targetIdentity,
- )
-
- expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
-
- if !expectedErrorRaised {
- require.Equal(s.t, action.ExpectedError, "")
- require.Equal(s.t, action.ExpectedExistence, exists.ExistedAlready)
- }
-
- // The relationship should only be added to a SourceHub chain once - there is no need to loop through
- // the nodes.
- if acpType == SourceHubACPType {
- break
- }
+ // The relationship should only be added to a SourceHub chain once - there is no need to loop through
+ // the nodes.
+ if acpType == SourceHubACPType {
+ break
}
}
}
@@ -361,16 +304,15 @@ func deleteDocActorRelationshipACP(
s *state,
action DeleteDocActorRelationship,
) {
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- collections := s.collections[nodeID]
- node := s.nodes[nodeID]
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
var collectionName string
if action.CollectionID == -1 {
collectionName = ""
} else {
- collection := collections[action.CollectionID]
+ collection := s.collections[nodeID][action.CollectionID]
if !collection.Description().Name.HasValue() {
require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
}
@@ -421,69 +363,11 @@ func deleteDocActorRelationshipACP(
require.Equal(s.t, action.ExpectedError, "")
require.Equal(s.t, action.ExpectedRecordFound, deleteDocActorRelationshipResult.RecordFound)
}
- } else {
- for i, node := range getNodes(action.NodeID, s.nodes) {
- var collectionName string
- if action.CollectionID == -1 {
- collectionName = ""
- } else {
- collection := s.collections[i][action.CollectionID]
- if !collection.Description().Name.HasValue() {
- require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
- }
- collectionName = collection.Description().Name.Value()
- }
-
- var docID string
- if action.DocID == -1 || action.CollectionID == -1 {
- docID = ""
- } else {
- docID = s.docIDs[action.CollectionID][action.DocID].String()
- }
- var targetIdentity string
- if action.TargetIdentity == -1 {
- targetIdentity = ""
- } else {
- optionalTargetIdentity := getIdentity(s, i, immutable.Some(action.TargetIdentity))
- if !optionalTargetIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description)
- }
- targetIdentity = optionalTargetIdentity.Value().DID
- }
-
- var requestorIdentity immutable.Option[acpIdentity.Identity]
- if action.RequestorIdentity == -1 {
- requestorIdentity = acpIdentity.None
- } else {
- requestorIdentity = getIdentity(s, i, immutable.Some(action.RequestorIdentity))
- if !requestorIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description)
- }
- }
- ctx := db.SetContextIdentity(s.ctx, requestorIdentity)
-
- deleteDocActorRelationshipResult, err := node.DeleteDocActorRelationship(
- ctx,
- collectionName,
- docID,
- action.Relation,
- targetIdentity,
- )
-
- expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
-
- if !expectedErrorRaised {
- require.Equal(s.t, action.ExpectedError, "")
- require.Equal(s.t, action.ExpectedRecordFound, deleteDocActorRelationshipResult.RecordFound)
- }
-
- // The relationship should only be added to a SourceHub chain once - there is no need to loop through
- // the nodes.
- if acpType == SourceHubACPType {
- break
- }
+ // The relationship should only be added to a SourceHub chain once - there is no need to loop through
+ // the nodes.
+ if acpType == SourceHubACPType {
+ break
}
}
}
diff --git a/tests/integration/explain.go b/tests/integration/explain.go
index 4cdebe9103..c7090a7b50 100644
--- a/tests/integration/explain.go
+++ b/tests/integration/explain.go
@@ -133,7 +133,8 @@ func executeExplainRequest(
require.Fail(s.t, "Expected error should not have other expected results with it.", s.testCase.Description)
}
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
result := node.ExecRequest(
s.ctx,
action.Request,
diff --git a/tests/integration/lens.go b/tests/integration/lens.go
index 61ece97d73..c361c55342 100644
--- a/tests/integration/lens.go
+++ b/tests/integration/lens.go
@@ -57,7 +57,8 @@ func configureMigration(
s *state,
action ConfigureMigration,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
txn := getTransaction(s, node, action.TransactionID, action.ExpectedError)
ctx := db.SetContextTxn(s.ctx, txn)
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index 8d12d03b29..05698e9a39 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -433,11 +433,13 @@ func createGenerateDocs(s *state, docs []gen.GeneratedDoc, nodeID immutable.Opti
}
func generateDocs(s *state, action GenerateDocs) {
- collections := getNodeCollections(action.NodeID, s.collections)
- defs := make([]client.CollectionDefinition, 0, len(collections[0]))
- for _, col := range collections[0] {
- if len(action.ForCollections) == 0 || slices.Contains(action.ForCollections, col.Name().Value()) {
- defs = append(defs, col.Definition())
+ nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
+ firstNodesID := nodeIDs[0]
+ collections := s.collections[firstNodesID]
+ defs := make([]client.CollectionDefinition, 0, len(collections))
+ for _, collection := range collections {
+ if len(action.ForCollections) == 0 || slices.Contains(action.ForCollections, collection.Name().Value()) {
+ defs = append(defs, collection.Definition())
}
}
docs, err := gen.AutoGenerate(defs, action.Options...)
@@ -448,9 +450,11 @@ func generateDocs(s *state, action GenerateDocs) {
}
func generatePredefinedDocs(s *state, action CreatePredefinedDocs) {
- collections := getNodeCollections(action.NodeID, s.collections)
- defs := make([]client.CollectionDefinition, 0, len(collections[0]))
- for _, col := range collections[0] {
+ nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
+ firstNodesID := nodeIDs[0]
+ collections := s.collections[firstNodesID]
+ defs := make([]client.CollectionDefinition, 0, len(collections))
+ for _, col := range collections {
defs = append(defs, col.Definition())
}
docs, err := predefined.Create(defs, action.Docs)
@@ -566,38 +570,32 @@ func closeNodes(
s *state,
action Close,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
node.Close()
}
}
-// getNodes gets the set of applicable nodes for the given nodeID.
+// getNodesWithIDs gets the applicable node(s) and their ID(s) for the given target nodeID.
//
-// If nodeID has a value it will return that node only, otherwise all nodes will be returned.
-func getNodes(nodeID immutable.Option[int], nodes []clients.Client) []clients.Client {
- if !nodeID.HasValue() {
- return nodes
- }
-
- return []clients.Client{nodes[nodeID.Value()]}
-}
-
-// getNodeCollections gets the set of applicable collections for the given nodeID.
-//
-// If nodeID has a value it will return collections for that node only, otherwise all collections across all
-// nodes will be returned.
+// If nodeID has a value it will return that node and it's ID only. Otherwise all nodes will
+// be returned with their corresponding IDs in a list.
//
// WARNING:
-// The caller must not assume the returned collections are in order of the node index if the specified
-// index is greater than 0. For example if requesting collections with nodeID=2 then the resulting output
-// will contain only one element (at index 0) that will be the collections of the respective node, the
-// caller might accidentally assume that these collections belong to node 0.
-func getNodeCollections(nodeID immutable.Option[int], collections [][]client.Collection) [][]client.Collection {
+// The caller must not assume the returned node's ID is in order of the node's index if the specified nodeID is
+// greater than 0. For example if requesting a node with nodeID=2 then the resulting output will contain only
+// one element (at index 0) caller might accidentally assume that this node belongs to node 0. Therefore, the
+// caller should always use the returned IDs, instead of guessing the IDs based on node indexes.
+func getNodesWithIDs(nodeID immutable.Option[int], nodes []clients.Client) ([]int, []clients.Client) {
if !nodeID.HasValue() {
- return collections
+ indexes := make([]int, len(nodes))
+ for i := range nodes {
+ indexes[i] = i
+ }
+ return indexes, nodes
}
- return [][]client.Collection{collections[nodeID.Value()]}
+ return []int{nodeID.Value()}, []clients.Client{nodes[nodeID.Value()]}
}
func calculateLenForFlattenedActions(testCase *TestCase) int {
@@ -732,7 +730,7 @@ func setStartingNodes(
}
func startNodes(s *state, action Start) {
- nodes := getNodes(action.NodeID, s.nodes)
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
// We need to restart the nodes in reverse order, to avoid dial backoff issues.
for i := len(nodes) - 1; i >= 0; i-- {
nodeIndex := i
@@ -903,9 +901,11 @@ func refreshDocuments(
// otherwise they cannot be referenced correctly by other actions.
switch action := s.testCase.Actions[i].(type) {
case CreateDoc:
+ nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
// Just use the collection from the first relevant node, as all will be the same for this
// purpose.
- collection := getNodeCollections(action.NodeID, s.collections)[0][action.CollectionID]
+ firstNodesID := nodeIDs[0]
+ collection := s.collections[firstNodesID][action.CollectionID]
if action.DocMap != nil {
substituteRelations(s, action)
@@ -960,8 +960,8 @@ func getIndexes(
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
+ nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, nodeID := range nodeIDs {
collections := s.collections[nodeID]
err := withRetryOnNode(
s.nodes[nodeID],
@@ -979,25 +979,6 @@ func getIndexes(
)
expectedErrorRaised = expectedErrorRaised ||
AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for nodeID, collections := range s.collections {
- err := withRetryOnNode(
- s.nodes[nodeID],
- func() error {
- actualIndexes, err := collections[action.CollectionID].GetIndexes(s.ctx)
- if err != nil {
- return err
- }
-
- assertIndexesListsEqual(action.ExpectedIndexes,
- actualIndexes, s.t, s.testCase.Description)
-
- return nil
- },
- )
- expectedErrorRaised = expectedErrorRaised ||
- AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1071,7 +1052,8 @@ func updateSchema(
s *state,
action SchemaUpdate,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
results, err := node.AddSchema(s.ctx, action.Schema)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -1091,7 +1073,8 @@ func patchSchema(
s *state,
action SchemaPatch,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
var setAsDefaultVersion bool
if action.SetAsDefaultVersion.HasValue() {
setAsDefaultVersion = action.SetAsDefaultVersion.Value()
@@ -1114,7 +1097,8 @@ func patchCollection(
s *state,
action PatchCollection,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
err := node.PatchCollection(s.ctx, action.Patch)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -1130,7 +1114,8 @@ func getSchema(
s *state,
action GetSchema,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
var results []client.SchemaDescription
var err error
switch {
@@ -1161,7 +1146,8 @@ func getCollections(
s *state,
action GetCollections,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
txn := getTransaction(s, node, action.TransactionID, "")
ctx := db.SetContextTxn(s.ctx, txn)
results, err := node.GetCollections(ctx, action.FilterOptions)
@@ -1183,7 +1169,8 @@ func setActiveSchemaVersion(
s *state,
action SetActiveSchemaVersion,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
err := node.SetActiveSchemaVersion(s.ctx, action.SchemaVersionID)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -1211,7 +1198,8 @@ func createView(
}, "")
}
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
_, err := node.AddView(s.ctx, action.Query, action.SDL, action.Transform)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -1223,7 +1211,8 @@ func refreshViews(
s *state,
action RefreshViews,
) {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
err := node.RefreshViews(s.ctx, action.FilterOptions)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1255,42 +1244,25 @@ func createDoc(
var expectedErrorRaised bool
var docIDs []client.DocID
- if action.NodeID.HasValue() {
- actionNode := s.nodes[action.NodeID.Value()]
- collections := s.collections[action.NodeID.Value()]
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ collection := s.collections[nodeID][action.CollectionID]
err := withRetryOnNode(
- actionNode,
+ node,
func() error {
var err error
docIDs, err = mutation(
s,
action,
- actionNode,
- action.NodeID.Value(),
- collections[action.CollectionID],
+ node,
+ nodeID,
+ collection,
)
return err
},
)
expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for nodeID, collections := range s.collections {
- err := withRetryOnNode(
- s.nodes[nodeID],
- func() error {
- var err error
- docIDs, err = mutation(
- s,
- action,
- s.nodes[nodeID],
- nodeID,
- collections[action.CollectionID],
- )
- return err
- },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1463,33 +1435,20 @@ func deleteDoc(
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- actionNode := s.nodes[nodeID]
- collections := s.collections[nodeID]
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ collection := s.collections[nodeID][action.CollectionID]
identity := getIdentity(s, nodeID, action.Identity)
ctx := db.SetContextIdentity(s.ctx, identity)
err := withRetryOnNode(
- actionNode,
+ node,
func() error {
- _, err := collections[action.CollectionID].Delete(ctx, docID)
+ _, err := collection.Delete(ctx, docID)
return err
},
)
expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for nodeID, collections := range s.collections {
- identity := getIdentity(s, nodeID, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
- err := withRetryOnNode(
- s.nodes[nodeID],
- func() error {
- _, err := collections[action.CollectionID].Delete(ctx, docID)
- return err
- },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1521,40 +1480,23 @@ func updateDoc(
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- collections := s.collections[nodeID]
- actionNode := s.nodes[nodeID]
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ collection := s.collections[nodeID][action.CollectionID]
err := withRetryOnNode(
- actionNode,
+ node,
func() error {
return mutation(
s,
action,
- actionNode,
+ node,
nodeID,
- collections[action.CollectionID],
+ collection,
)
},
)
expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for nodeID, collections := range s.collections {
- actionNode := s.nodes[nodeID]
- err := withRetryOnNode(
- actionNode,
- func() error {
- return mutation(
- s,
- action,
- actionNode,
- nodeID,
- collections[action.CollectionID],
- )
- },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1643,34 +1585,22 @@ func updateDocViaGQL(
func updateWithFilter(s *state, action UpdateWithFilter) {
var res *client.UpdateResult
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- collections := s.collections[nodeID]
+
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ collection := s.collections[nodeID][action.CollectionID]
identity := getIdentity(s, nodeID, action.Identity)
ctx := db.SetContextIdentity(s.ctx, identity)
err := withRetryOnNode(
- s.nodes[nodeID],
+ node,
func() error {
var err error
- res, err = collections[action.CollectionID].UpdateWithFilter(ctx, action.Filter, action.Updater)
+ res, err = collection.UpdateWithFilter(ctx, action.Filter, action.Updater)
return err
},
)
expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for nodeID, collections := range s.collections {
- identity := getIdentity(s, nodeID, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
- err := withRetryOnNode(
- s.nodes[nodeID],
- func() error {
- var err error
- res, err = collections[action.CollectionID].UpdateWithFilter(ctx, action.Filter, action.Updater)
- return err
- },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1693,9 +1623,10 @@ func createIndex(
)
}
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- collections := s.collections[nodeID]
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ collection := s.collections[nodeID][action.CollectionID]
indexDesc := client.IndexDescription{
Name: action.IndexName,
}
@@ -1716,9 +1647,9 @@ func createIndex(
indexDesc.Unique = action.Unique
err := withRetryOnNode(
- s.nodes[nodeID],
+ node,
func() error {
- desc, err := collections[action.CollectionID].CreateIndex(s.ctx, indexDesc)
+ desc, err := collection.CreateIndex(s.ctx, indexDesc)
if err != nil {
return err
}
@@ -1732,45 +1663,6 @@ func createIndex(
if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) {
return
}
- } else {
- for nodeID, collections := range s.collections {
- indexDesc := client.IndexDescription{
- Name: action.IndexName,
- }
- if action.FieldName != "" {
- indexDesc.Fields = []client.IndexedFieldDescription{
- {
- Name: action.FieldName,
- },
- }
- } else if len(action.Fields) > 0 {
- for i := range action.Fields {
- indexDesc.Fields = append(indexDesc.Fields, client.IndexedFieldDescription{
- Name: action.Fields[i].Name,
- Descending: action.Fields[i].Descending,
- })
- }
- }
-
- indexDesc.Unique = action.Unique
- err := withRetryOnNode(
- s.nodes[nodeID],
- func() error {
- desc, err := collections[action.CollectionID].CreateIndex(s.ctx, indexDesc)
- if err != nil {
- return err
- }
- s.indexes[nodeID][action.CollectionID] = append(
- s.indexes[nodeID][action.CollectionID],
- desc,
- )
- return nil
- },
- )
- if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) {
- return
- }
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, false)
@@ -1783,37 +1675,22 @@ func dropIndex(
) {
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- collections := s.collections[nodeID]
-
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
+ collection := s.collections[nodeID][action.CollectionID]
indexName := action.IndexName
if indexName == "" {
indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name
}
err := withRetryOnNode(
- s.nodes[nodeID],
+ node,
func() error {
- return collections[action.CollectionID].DropIndex(s.ctx, indexName)
+ return collection.DropIndex(s.ctx, indexName)
},
)
expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for nodeID, collections := range s.collections {
- indexName := action.IndexName
- if indexName == "" {
- indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name
- }
-
- err := withRetryOnNode(
- s.nodes[nodeID],
- func() error {
- return collections[action.CollectionID].DropIndex(s.ctx, indexName)
- },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1830,9 +1707,8 @@ func backupExport(
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- node := s.nodes[nodeID]
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
err := withRetryOnNode(
node,
func() error { return node.BasicExport(s.ctx, &action.Config) },
@@ -1842,18 +1718,6 @@ func backupExport(
if !expectedErrorRaised {
assertBackupContent(s.t, action.ExpectedContent, action.Config.Filepath)
}
- } else {
- for _, node := range s.nodes {
- err := withRetryOnNode(
- node,
- func() error { return node.BasicExport(s.ctx, &action.Config) },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
-
- if !expectedErrorRaised {
- assertBackupContent(s.t, action.ExpectedContent, action.Config.Filepath)
- }
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1874,22 +1738,13 @@ func backupImport(
var expectedErrorRaised bool
- if action.NodeID.HasValue() {
- nodeID := action.NodeID.Value()
- node := s.nodes[nodeID]
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
err := withRetryOnNode(
node,
func() error { return node.BasicImport(s.ctx, action.Filepath) },
)
expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- } else {
- for _, node := range s.nodes {
- err := withRetryOnNode(
- node,
- func() error { return node.BasicImport(s.ctx, action.Filepath) },
- )
- expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
- }
}
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
@@ -1972,7 +1827,9 @@ func executeRequest(
action Request,
) {
var expectedErrorRaised bool
- for nodeID, node := range getNodes(action.NodeID, s.nodes) {
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for index, node := range nodes {
+ nodeID := nodeIDs[index]
txn := getTransaction(s, node, action.TransactionID, action.ExpectedError)
ctx := db.SetContextTxn(s.ctx, txn)
@@ -2024,7 +1881,8 @@ func executeSubscriptionRequest(
) {
subscriptionAssert := make(chan func())
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
result := node.ExecRequest(s.ctx, action.Request)
if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) {
return
@@ -2275,7 +2133,8 @@ func assertIntrospectionResults(
s *state,
action IntrospectionRequest,
) bool {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
result := node.ExecRequest(s.ctx, action.Request)
if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) {
@@ -2306,7 +2165,8 @@ func assertClientIntrospectionResults(
s *state,
action ClientIntrospectionRequest,
) bool {
- for _, node := range getNodes(action.NodeID, s.nodes) {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
result := node.ExecRequest(s.ctx, action.Request)
if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) {
From bb7843cf93e84bca99761266760e711b2dd8facc Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 21 Oct 2024 11:55:53 -0400
Subject: [PATCH 02/47] bot: Update dependencies (bulk dependabot PRs)
21-10-2024 (#3168)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3167 bot: Bump @typescript-eslint/eslint-plugin from 8.8.1 to 8.10.0 in
/playground
#3166 bot: Bump eslint-plugin-react-refresh from 0.4.12 to 0.4.13 in
/playground
#3164 bot: Bump vite from 5.4.8 to 5.4.9 in /playground
#3163 bot: Bump eslint from 9.12.0 to 9.13.0 in /playground
⚠️ The following PRs were resolved manually due to merge conflicts:
#3165 bot: Bump @typescript-eslint/parser from 8.8.1 to 8.10.0 in
/playground
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
playground/package-lock.json | 349 ++++++++++++++++++++++++++++++-----
playground/package.json | 10 +-
2 files changed, 307 insertions(+), 52 deletions(-)
diff --git a/playground/package-lock.json b/playground/package-lock.json
index 5f1f350d92..ffe998c5f6 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -18,14 +18,14 @@
"@types/react": "^18.3.11",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.8.1",
- "@typescript-eslint/parser": "^8.8.1",
+ "@typescript-eslint/eslint-plugin": "^8.10.0",
+ "@typescript-eslint/parser": "^8.10.0",
"@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.12.0",
+ "eslint": "^9.13.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.12",
+ "eslint-plugin-react-refresh": "^0.4.13",
"typescript": "^5.6.3",
- "vite": "^5.4.8"
+ "vite": "^5.4.9"
}
},
"node_modules/@babel/runtime": {
@@ -559,9 +559,9 @@
}
},
"node_modules/@eslint/core": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.6.0.tgz",
- "integrity": "sha512-8I2Q8ykA4J0x0o7cg67FPVnehcqWTBehu/lmY+bolPFHGjh49YzGBMXTvpqVgEbBdvNCSxj6iFgiIyHzf03lzg==",
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.7.0.tgz",
+ "integrity": "sha512-xp5Jirz5DyPYlPiKat8jaq0EmYvDXKKpzTbxXMpT9eqlRJkRKIz9AGMdlvYjih+im+QlhWrpvVjl8IPC/lHlUw==",
"dev": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -616,9 +616,9 @@
}
},
"node_modules/@eslint/js": {
- "version": "9.12.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.12.0.tgz",
- "integrity": "sha512-eohesHH8WFRUprDNyEREgqP6beG6htMeUYeCpkEgBCieCMme5r9zFWjzAJp//9S+Kub4rqE+jXe9Cp1a7IYIIA==",
+ "version": "9.13.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz",
+ "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==",
"dev": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2524,16 +2524,16 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.1.tgz",
- "integrity": "sha512-xfvdgA8AP/vxHgtgU310+WBnLB4uJQ9XdyP17RebG26rLtDrQJV3ZYrcopX91GrHmMoH8bdSwMRh2a//TiJ1jQ==",
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.10.0.tgz",
+ "integrity": "sha512-phuB3hoP7FFKbRXxjl+DRlQDuJqhpOnm5MmtROXyWi3uS/Xg2ZXqiQfcG2BJHiN4QKyzdOJi3NEn/qTnjUlkmQ==",
"dev": true,
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.8.1",
- "@typescript-eslint/type-utils": "8.8.1",
- "@typescript-eslint/utils": "8.8.1",
- "@typescript-eslint/visitor-keys": "8.8.1",
+ "@typescript-eslint/scope-manager": "8.10.0",
+ "@typescript-eslint/type-utils": "8.10.0",
+ "@typescript-eslint/utils": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2556,16 +2556,63 @@
}
}
},
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz",
+ "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
+ "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
+ "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/parser": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.1.tgz",
- "integrity": "sha512-hQUVn2Lij2NAxVFEdvIGxT9gP1tq2yM83m+by3whWFsWC+1y8pxxxHUFE1UqDu2VsGi2i6RLcv4QvouM84U+ow==",
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.10.0.tgz",
+ "integrity": "sha512-E24l90SxuJhytWJ0pTQydFT46Nk0Z+bsLKo/L8rtQSL93rQ6byd1V/QbDpHUTdLPOMsBCcYXZweADNCfOCmOAg==",
"dev": true,
"dependencies": {
- "@typescript-eslint/scope-manager": "8.8.1",
- "@typescript-eslint/types": "8.8.1",
- "@typescript-eslint/typescript-estree": "8.8.1",
- "@typescript-eslint/visitor-keys": "8.8.1",
+ "@typescript-eslint/scope-manager": "8.10.0",
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/typescript-estree": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2584,6 +2631,81 @@
}
}
},
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz",
+ "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
+ "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz",
+ "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
+ "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.8.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.1.tgz",
@@ -2602,13 +2724,13 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.1.tgz",
- "integrity": "sha512-qSVnpcbLP8CALORf0za+vjLYj1Wp8HSoiI8zYU5tHxRVj30702Z1Yw4cLwfNKhTPWp5+P+k1pjmD5Zd1nhxiZA==",
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.10.0.tgz",
+ "integrity": "sha512-PCpUOpyQSpxBn230yIcK+LeCQaXuxrgCm2Zk1S+PTIRJsEfU6nJ0TtwyH8pIwPK/vJoA+7TZtzyAJSGBz+s/dg==",
"dev": true,
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.8.1",
- "@typescript-eslint/utils": "8.8.1",
+ "@typescript-eslint/typescript-estree": "8.10.0",
+ "@typescript-eslint/utils": "8.10.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -2625,6 +2747,64 @@
}
}
},
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
+ "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz",
+ "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
+ "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/types": {
"version": "8.8.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.1.tgz",
@@ -2667,15 +2847,15 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.1.tgz",
- "integrity": "sha512-/QkNJDbV0bdL7H7d0/y0qBbV2HTtf0TIyjSDTvvmQEzeVx8jEImEbLuOA4EsvE8gIgqMitns0ifb5uQhMj8d9w==",
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.10.0.tgz",
+ "integrity": "sha512-Oq4uZ7JFr9d1ZunE/QKy5egcDRXT/FrS2z/nlxzPua2VHFtmMvFNDvpq1m/hq0ra+T52aUezfcjGRIB7vNJF9w==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.8.1",
- "@typescript-eslint/types": "8.8.1",
- "@typescript-eslint/typescript-estree": "8.8.1"
+ "@typescript-eslint/scope-manager": "8.10.0",
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/typescript-estree": "8.10.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2688,6 +2868,81 @@
"eslint": "^8.57.0 || ^9.0.0"
}
},
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz",
+ "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
+ "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz",
+ "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "@typescript-eslint/visitor-keys": "8.10.0",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.10.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
+ "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.10.0",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.8.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.1.tgz",
@@ -3300,17 +3555,17 @@
}
},
"node_modules/eslint": {
- "version": "9.12.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.12.0.tgz",
- "integrity": "sha512-UVIOlTEWxwIopRL1wgSQYdnVDcEvs2wyaO6DGo5mXqe3r16IoCNWkR29iHhyaP4cICWjbgbmFUGAhh0GJRuGZw==",
+ "version": "9.13.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz",
+ "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.11.0",
"@eslint/config-array": "^0.18.0",
- "@eslint/core": "^0.6.0",
+ "@eslint/core": "^0.7.0",
"@eslint/eslintrc": "^3.1.0",
- "@eslint/js": "9.12.0",
+ "@eslint/js": "9.13.0",
"@eslint/plugin-kit": "^0.2.0",
"@humanfs/node": "^0.16.5",
"@humanwhocodes/module-importer": "^1.0.1",
@@ -3372,9 +3627,9 @@
}
},
"node_modules/eslint-plugin-react-refresh": {
- "version": "0.4.12",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.12.tgz",
- "integrity": "sha512-9neVjoGv20FwYtCP6CB1dzR1vr57ZDNOXst21wd2xJ/cTlM2xLq0GWVlSNTdMn/4BtP6cHYBMCSp1wFBJ9jBsg==",
+ "version": "0.4.13",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.13.tgz",
+ "integrity": "sha512-f1EppwrpJRWmqDTyvAyomFVDYRtrS7iTEqv3nokETnMiMzs2SSTmKRTACce4O2p4jYyowiSMvpdwC/RLcMFhuQ==",
"dev": true,
"peerDependencies": {
"eslint": ">=7"
@@ -5899,9 +6154,9 @@
"optional": true
},
"node_modules/vite": {
- "version": "5.4.8",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.8.tgz",
- "integrity": "sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ==",
+ "version": "5.4.9",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.9.tgz",
+ "integrity": "sha512-20OVpJHh0PAM0oSOELa5GaZNWeDjcAvQjGXy2Uyr+Tp+/D2/Hdz6NLgpJLsarPTA2QJ6v8mX2P1ZfbsSKvdMkg==",
"dev": true,
"dependencies": {
"esbuild": "^0.21.3",
diff --git a/playground/package.json b/playground/package.json
index 1175a6a788..8bb52949fa 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -20,13 +20,13 @@
"@types/react": "^18.3.11",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.8.1",
- "@typescript-eslint/parser": "^8.8.1",
+ "@typescript-eslint/eslint-plugin": "^8.10.0",
+ "@typescript-eslint/parser": "^8.10.0",
"@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.12.0",
+ "eslint": "^9.13.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.12",
+ "eslint-plugin-react-refresh": "^0.4.13",
"typescript": "^5.6.3",
- "vite": "^5.4.8"
+ "vite": "^5.4.9"
}
}
From 7799b315e3298d2ff1aef2719021bd23c5558c53 Mon Sep 17 00:00:00 2001
From: Shahzad Lone
Date: Mon, 21 Oct 2024 12:42:23 -0400
Subject: [PATCH 03/47] refactor(i): Lint casts & move panics under `must`
funcs (#3134)
## Relevant issue(s)
Resolves #3103
## Description
- Fix the BadRequest in the acp handlers to now be panics
- Make the panics documented under `must` functions
- Add the linter to avoid increasing of undocumented panics
- Ignore linter in existing files outside cli and http (this should
either be resolved gradually over time or in a single PR, regardless
would prefer to not block this PR because of them)
---
cli/p2p_info.go | 4 +--
cli/purge.go | 4 +--
cli/utils.go | 17 +++++++---
http/handler.go | 6 ++--
http/handler_acp.go | 20 ++---------
http/handler_ccip.go | 4 +--
http/handler_collection.go | 20 +++++------
http/handler_extras.go | 3 +-
http/handler_lens.go | 10 +++---
http/handler_p2p.go | 14 ++++----
http/handler_store.go | 28 +++++++--------
http/handler_tx.go | 35 +++++++++----------
http/middleware.go | 24 ++-----------
http/utils.go | 68 +++++++++++++++++++++++++++++++++++++
tools/configs/golangci.yaml | 64 ++++++++++++++++++++++++++++++++++
15 files changed, 209 insertions(+), 112 deletions(-)
diff --git a/cli/p2p_info.go b/cli/p2p_info.go
index 36adfb8fac..cc30d37701 100644
--- a/cli/p2p_info.go
+++ b/cli/p2p_info.go
@@ -12,8 +12,6 @@ package cli
import (
"github.com/spf13/cobra"
-
- "github.com/sourcenetwork/defradb/http"
)
func MakeP2PInfoCommand() *cobra.Command {
@@ -22,7 +20,7 @@ func MakeP2PInfoCommand() *cobra.Command {
Short: "Get peer info from a DefraDB node",
Long: `Get peer info from a DefraDB node`,
RunE: func(cmd *cobra.Command, args []string) error {
- db := cmd.Context().Value(dbContextKey).(*http.Client)
+ db := mustGetContextHTTP(cmd)
return writeJSON(cmd, db.PeerInfo())
},
}
diff --git a/cli/purge.go b/cli/purge.go
index 5880e021b8..0e2552b625 100644
--- a/cli/purge.go
+++ b/cli/purge.go
@@ -12,8 +12,6 @@ package cli
import (
"github.com/spf13/cobra"
-
- "github.com/sourcenetwork/defradb/http"
)
func MakePurgeCommand() *cobra.Command {
@@ -24,7 +22,7 @@ func MakePurgeCommand() *cobra.Command {
Long: `Delete all persisted data and restart.
WARNING this operation cannot be reversed.`,
RunE: func(cmd *cobra.Command, args []string) error {
- db := mustGetContextDB(cmd).(*http.Client)
+ db := mustGetContextHTTP(cmd)
if !force {
return ErrPurgeForceFlagRequired
}
diff --git a/cli/utils.go b/cli/utils.go
index 845cea671b..fb9b5a6d3f 100644
--- a/cli/utils.go
+++ b/cli/utils.go
@@ -60,35 +60,42 @@ const (
//
// If a db is not set in the current context this function panics.
func mustGetContextDB(cmd *cobra.Command) client.DB {
- return cmd.Context().Value(dbContextKey).(client.DB)
+ return cmd.Context().Value(dbContextKey).(client.DB) //nolint:forcetypeassert
}
// mustGetContextStore returns the store for the current command context.
//
// If a store is not set in the current context this function panics.
func mustGetContextStore(cmd *cobra.Command) client.Store {
- return cmd.Context().Value(dbContextKey).(client.Store)
+ return cmd.Context().Value(dbContextKey).(client.Store) //nolint:forcetypeassert
}
// mustGetContextP2P returns the p2p implementation for the current command context.
//
// If a p2p implementation is not set in the current context this function panics.
func mustGetContextP2P(cmd *cobra.Command) client.P2P {
- return cmd.Context().Value(dbContextKey).(client.P2P)
+ return cmd.Context().Value(dbContextKey).(client.P2P) //nolint:forcetypeassert
+}
+
+// mustGetContextHTTP returns the http client for the current command context.
+//
+// If http client is not set in the current context this function panics.
+func mustGetContextHTTP(cmd *cobra.Command) *http.Client {
+ return cmd.Context().Value(dbContextKey).(*http.Client) //nolint:forcetypeassert
}
// mustGetContextConfig returns the config for the current command context.
//
// If a config is not set in the current context this function panics.
func mustGetContextConfig(cmd *cobra.Command) *viper.Viper {
- return cmd.Context().Value(cfgContextKey).(*viper.Viper)
+ return cmd.Context().Value(cfgContextKey).(*viper.Viper) //nolint:forcetypeassert
}
// mustGetContextRootDir returns the rootdir for the current command context.
//
// If a rootdir is not set in the current context this function panics.
func mustGetContextRootDir(cmd *cobra.Command) string {
- return cmd.Context().Value(rootDirContextKey).(string)
+ return cmd.Context().Value(rootDirContextKey).(string) //nolint:forcetypeassert
}
// tryGetContextCollection returns the collection for the current command context
diff --git a/http/handler.go b/http/handler.go
index cdb09767c6..336dfc54d3 100644
--- a/http/handler.go
+++ b/http/handler.go
@@ -12,7 +12,6 @@ package http
import (
"context"
- "fmt"
"net/http"
"sync"
@@ -100,9 +99,10 @@ func NewHandler(db client.DB) (*Handler, error) {
func (h *Handler) Transaction(id uint64) (datastore.Txn, error) {
tx, ok := h.txs.Load(id)
if !ok {
- return nil, fmt.Errorf("invalid transaction id")
+ return nil, ErrInvalidTransactionId
}
- return tx.(datastore.Txn), nil
+
+ return mustGetDataStoreTxn(tx), nil
}
func (h *Handler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
diff --git a/http/handler_acp.go b/http/handler_acp.go
index d359d5085e..f9ef17cbee 100644
--- a/http/handler_acp.go
+++ b/http/handler_acp.go
@@ -15,18 +15,12 @@ import (
"net/http"
"github.com/getkin/kin-openapi/openapi3"
-
- "github.com/sourcenetwork/defradb/client"
)
type acpHandler struct{}
func (s *acpHandler) AddPolicy(rw http.ResponseWriter, req *http.Request) {
- db, ok := req.Context().Value(dbContextKey).(client.DB)
- if !ok {
- responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")})
- return
- }
+ db := mustGetContextClientDB(req)
policyBytes, err := io.ReadAll(req.Body)
if err != nil {
@@ -47,11 +41,7 @@ func (s *acpHandler) AddPolicy(rw http.ResponseWriter, req *http.Request) {
}
func (s *acpHandler) AddDocActorRelationship(rw http.ResponseWriter, req *http.Request) {
- db, ok := req.Context().Value(dbContextKey).(client.DB)
- if !ok {
- responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")})
- return
- }
+ db := mustGetContextClientDB(req)
var message addDocActorRelationshipRequest
err := requestJSON(req, &message)
@@ -76,11 +66,7 @@ func (s *acpHandler) AddDocActorRelationship(rw http.ResponseWriter, req *http.R
}
func (s *acpHandler) DeleteDocActorRelationship(rw http.ResponseWriter, req *http.Request) {
- db, ok := req.Context().Value(dbContextKey).(client.DB)
- if !ok {
- responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")})
- return
- }
+ db := mustGetContextClientDB(req)
var message deleteDocActorRelationshipRequest
err := requestJSON(req, &message)
diff --git a/http/handler_ccip.go b/http/handler_ccip.go
index 5b9aeb5402..f4855d69d7 100644
--- a/http/handler_ccip.go
+++ b/http/handler_ccip.go
@@ -18,8 +18,6 @@ import (
"github.com/getkin/kin-openapi/openapi3"
"github.com/go-chi/chi/v5"
-
- "github.com/sourcenetwork/defradb/client"
)
type ccipHandler struct{}
@@ -35,7 +33,7 @@ type CCIPResponse struct {
// ExecCCIP handles GraphQL over Cross Chain Interoperability Protocol requests.
func (c *ccipHandler) ExecCCIP(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var ccipReq CCIPRequest
switch req.Method {
diff --git a/http/handler_collection.go b/http/handler_collection.go
index 8f45a7948f..ddade699e3 100644
--- a/http/handler_collection.go
+++ b/http/handler_collection.go
@@ -40,7 +40,7 @@ type CollectionUpdateRequest struct {
}
func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
data, err := io.ReadAll(req.Body)
if err != nil {
@@ -89,7 +89,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) {
}
func (s *collectionHandler) DeleteWithFilter(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
var request CollectionDeleteRequest
if err := requestJSON(req, &request); err != nil {
@@ -106,7 +106,7 @@ func (s *collectionHandler) DeleteWithFilter(rw http.ResponseWriter, req *http.R
}
func (s *collectionHandler) UpdateWithFilter(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
var request CollectionUpdateRequest
if err := requestJSON(req, &request); err != nil {
@@ -123,7 +123,7 @@ func (s *collectionHandler) UpdateWithFilter(rw http.ResponseWriter, req *http.R
}
func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID"))
if err != nil {
@@ -160,7 +160,7 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) {
}
func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID"))
if err != nil {
@@ -177,7 +177,7 @@ func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) {
}
func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
showDeleted, _ := strconv.ParseBool(req.URL.Query().Get("show_deleted"))
docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID"))
@@ -211,7 +211,7 @@ type DocIDResult struct {
}
func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
flusher, ok := rw.(http.Flusher)
if !ok {
@@ -252,7 +252,7 @@ func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Reque
}
func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
var indexDesc client.IndexDescription
if err := requestJSON(req, &indexDesc); err != nil {
@@ -268,7 +268,7 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques
}
func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
indexesMap, err := store.GetAllIndexes(req.Context())
if err != nil {
@@ -283,7 +283,7 @@ func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request
}
func (s *collectionHandler) DropIndex(rw http.ResponseWriter, req *http.Request) {
- col := req.Context().Value(colContextKey).(client.Collection)
+ col := mustGetContextClientCollection(req)
err := col.DropIndex(req.Context(), chi.URLParam(req, "index"))
if err != nil {
diff --git a/http/handler_extras.go b/http/handler_extras.go
index c891e9befc..1f14cc40a7 100644
--- a/http/handler_extras.go
+++ b/http/handler_extras.go
@@ -15,7 +15,6 @@ import (
"github.com/getkin/kin-openapi/openapi3"
- "github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/event"
)
@@ -23,7 +22,7 @@ import (
type extrasHandler struct{}
func (s *extrasHandler) Purge(rw http.ResponseWriter, req *http.Request) {
- db := req.Context().Value(dbContextKey).(client.DB)
+ db := mustGetContextClientDB(req)
rw.WriteHeader(http.StatusOK) // write the response before we restart to purge
db.Events().Publish(event.NewMessage(event.PurgeName, nil))
}
diff --git a/http/handler_lens.go b/http/handler_lens.go
index 94ef9c2abe..f6d20465f0 100644
--- a/http/handler_lens.go
+++ b/http/handler_lens.go
@@ -15,14 +15,12 @@ import (
"github.com/getkin/kin-openapi/openapi3"
"github.com/sourcenetwork/immutable/enumerable"
-
- "github.com/sourcenetwork/defradb/client"
)
type lensHandler struct{}
func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
err := store.LensRegistry().ReloadLenses(req.Context())
if err != nil {
@@ -33,7 +31,7 @@ func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) {
}
func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var request setMigrationRequest
if err := requestJSON(req, &request); err != nil {
@@ -50,7 +48,7 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) {
}
func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var request migrateRequest
if err := requestJSON(req, &request); err != nil {
@@ -75,7 +73,7 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) {
}
func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var request migrateRequest
if err := requestJSON(req, &request); err != nil {
diff --git a/http/handler_p2p.go b/http/handler_p2p.go
index 13fc88a90c..941b6f4b5b 100644
--- a/http/handler_p2p.go
+++ b/http/handler_p2p.go
@@ -21,7 +21,7 @@ import (
type p2pHandler struct{}
func (s *p2pHandler) PeerInfo(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
@@ -30,7 +30,7 @@ func (s *p2pHandler) PeerInfo(rw http.ResponseWriter, req *http.Request) {
}
func (s *p2pHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
@@ -50,7 +50,7 @@ func (s *p2pHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) {
}
func (s *p2pHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
@@ -70,7 +70,7 @@ func (s *p2pHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request)
}
func (s *p2pHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
@@ -85,7 +85,7 @@ func (s *p2pHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Request
}
func (s *p2pHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
@@ -105,7 +105,7 @@ func (s *p2pHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Request)
}
func (s *p2pHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
@@ -125,7 +125,7 @@ func (s *p2pHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Reque
}
func (s *p2pHandler) GetAllP2PCollections(rw http.ResponseWriter, req *http.Request) {
- p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ p2p, ok := tryGetContextClientP2P(req)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrP2PDisabled})
return
diff --git a/http/handler_store.go b/http/handler_store.go
index e08f2aa9cf..86ab9aeb2d 100644
--- a/http/handler_store.go
+++ b/http/handler_store.go
@@ -26,7 +26,7 @@ import (
type storeHandler struct{}
func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var config client.BackupConfig
if err := requestJSON(req, &config); err != nil {
@@ -42,7 +42,7 @@ func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var config client.BackupConfig
if err := requestJSON(req, &config); err != nil {
@@ -58,7 +58,7 @@ func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
schema, err := io.ReadAll(req.Body)
if err != nil {
@@ -74,7 +74,7 @@ func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var message patchSchemaRequest
err := requestJSON(req, &message)
@@ -92,7 +92,7 @@ func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) PatchCollection(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var patch string
err := requestJSON(req, &patch)
@@ -110,7 +110,7 @@ func (s *storeHandler) PatchCollection(rw http.ResponseWriter, req *http.Request
}
func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
schemaVersionID, err := io.ReadAll(req.Body)
if err != nil {
@@ -126,7 +126,7 @@ func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http.
}
func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var message addViewRequest
err := requestJSON(req, &message)
@@ -145,7 +145,7 @@ func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) SetMigration(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var cfg client.LensConfig
if err := requestJSON(req, &cfg); err != nil {
@@ -162,7 +162,7 @@ func (s *storeHandler) SetMigration(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
options := client.CollectionFetchOptions{}
if req.URL.Query().Has("name") {
@@ -198,7 +198,7 @@ func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request)
}
func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
options := client.SchemaFetchOptions{}
if req.URL.Query().Has("version_id") {
@@ -220,7 +220,7 @@ func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) RefreshViews(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
options := client.CollectionFetchOptions{}
if req.URL.Query().Has("name") {
@@ -252,7 +252,7 @@ func (s *storeHandler) RefreshViews(rw http.ResponseWriter, req *http.Request) {
}
func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
indexes, err := store.GetAllIndexes(req.Context())
if err != nil {
@@ -263,7 +263,7 @@ func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request)
}
func (s *storeHandler) PrintDump(rw http.ResponseWriter, req *http.Request) {
- db := req.Context().Value(dbContextKey).(client.DB)
+ db := mustGetContextClientDB(req)
if err := db.PrintDump(req.Context()); err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
@@ -279,7 +279,7 @@ type GraphQLRequest struct {
}
func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) {
- store := req.Context().Value(dbContextKey).(client.Store)
+ store := mustGetContextClientStore(req)
var request GraphQLRequest
switch {
diff --git a/http/handler_tx.go b/http/handler_tx.go
index e28acab3df..e1ac38376c 100644
--- a/http/handler_tx.go
+++ b/http/handler_tx.go
@@ -13,13 +13,9 @@ package http
import (
"net/http"
"strconv"
- "sync"
"github.com/getkin/kin-openapi/openapi3"
"github.com/go-chi/chi/v5"
-
- "github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/datastore"
)
type txHandler struct{}
@@ -29,8 +25,8 @@ type CreateTxResponse struct {
}
func (h *txHandler) NewTxn(rw http.ResponseWriter, req *http.Request) {
- db := req.Context().Value(dbContextKey).(client.DB)
- txs := req.Context().Value(txsContextKey).(*sync.Map)
+ db := mustGetContextClientDB(req)
+ txs := mustGetContextSyncMap(req)
readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only"))
tx, err := db.NewTxn(req.Context(), readOnly)
@@ -43,8 +39,8 @@ func (h *txHandler) NewTxn(rw http.ResponseWriter, req *http.Request) {
}
func (h *txHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) {
- db := req.Context().Value(dbContextKey).(client.DB)
- txs := req.Context().Value(txsContextKey).(*sync.Map)
+ db := mustGetContextClientDB(req)
+ txs := mustGetContextSyncMap(req)
readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only"))
tx, err := db.NewConcurrentTxn(req.Context(), readOnly)
@@ -57,41 +53,46 @@ func (h *txHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request)
}
func (h *txHandler) Commit(rw http.ResponseWriter, req *http.Request) {
- txs := req.Context().Value(txsContextKey).(*sync.Map)
+ txs := mustGetContextSyncMap(req)
- txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64)
+ txID, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64)
if err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId})
return
}
- txVal, ok := txs.Load(txId)
+ txVal, ok := txs.Load(txID)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId})
return
}
- err = txVal.(datastore.Txn).Commit(req.Context())
+
+ dsTxn := mustGetDataStoreTxn(txVal)
+ err = dsTxn.Commit(req.Context())
if err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
return
}
- txs.Delete(txId)
+ txs.Delete(txID)
rw.WriteHeader(http.StatusOK)
}
func (h *txHandler) Discard(rw http.ResponseWriter, req *http.Request) {
- txs := req.Context().Value(txsContextKey).(*sync.Map)
+ txs := mustGetContextSyncMap(req)
- txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64)
+ txID, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64)
if err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId})
return
}
- txVal, ok := txs.LoadAndDelete(txId)
+ txVal, ok := txs.LoadAndDelete(txID)
if !ok {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId})
return
}
- txVal.(datastore.Txn).Discard(req.Context())
+
+ dsTxn := mustGetDataStoreTxn(txVal)
+ dsTxn.Discard(req.Context())
+
rw.WriteHeader(http.StatusOK)
}
diff --git a/http/middleware.go b/http/middleware.go
index d02c3d6470..cc98473711 100644
--- a/http/middleware.go
+++ b/http/middleware.go
@@ -26,26 +26,6 @@ import (
"github.com/sourcenetwork/defradb/internal/db"
)
-const (
- // txHeaderName is the name of the transaction header.
- // This header should contain a valid transaction id.
- txHeaderName = "x-defradb-tx"
-)
-
-type contextKey string
-
-var (
- // txsContextKey is the context key for the transaction *sync.Map
- txsContextKey = contextKey("txs")
- // dbContextKey is the context key for the client.DB
- dbContextKey = contextKey("db")
- // colContextKey is the context key for the client.Collection
- //
- // If a transaction exists, all operations will be executed
- // in the current transaction context.
- colContextKey = contextKey("col")
-)
-
// CorsMiddleware handles cross origin request
func CorsMiddleware(allowedOrigins []string) func(http.Handler) http.Handler {
return cors.Handler(cors.Options{
@@ -76,7 +56,7 @@ func ApiMiddleware(db client.DB, txs *sync.Map) func(http.Handler) http.Handler
// TransactionMiddleware sets the transaction context for the current request.
func TransactionMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
- txs := req.Context().Value(txsContextKey).(*sync.Map)
+ txs := mustGetContextSyncMap(req)
txValue := req.Header.Get(txHeaderName)
if txValue == "" {
@@ -104,7 +84,7 @@ func TransactionMiddleware(next http.Handler) http.Handler {
// CollectionMiddleware sets the collection context for the current request.
func CollectionMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
- db := req.Context().Value(dbContextKey).(client.DB)
+ db := mustGetContextClientDB(req)
col, err := db.GetCollectionByName(req.Context(), chi.URLParam(req, "name"))
if err != nil {
diff --git a/http/utils.go b/http/utils.go
index 176fe3d035..d371c802e6 100644
--- a/http/utils.go
+++ b/http/utils.go
@@ -14,8 +14,76 @@ import (
"encoding/json"
"io"
"net/http"
+ "sync"
+
+ "github.com/sourcenetwork/defradb/client"
+ "github.com/sourcenetwork/defradb/datastore"
+)
+
+const (
+ // txHeaderName is the name of the transaction header.
+ // This header should contain a valid transaction id.
+ txHeaderName = "x-defradb-tx"
+)
+
+type contextKey string
+
+var (
+ // txsContextKey is the context key for the transaction *sync.Map
+ txsContextKey = contextKey("txs")
+ // dbContextKey is the context key for the client.DB
+ dbContextKey = contextKey("db")
+ // colContextKey is the context key for the client.Collection
+ //
+ // If a transaction exists, all operations will be executed
+ // in the current transaction context.
+ colContextKey = contextKey("col")
)
+// mustGetContextClientCollection returns the client collection from the http request context or panics.
+//
+// This should only be called from functions within the http package.
+func mustGetContextClientCollection(req *http.Request) client.Collection {
+ return req.Context().Value(colContextKey).(client.Collection) //nolint:forcetypeassert
+}
+
+// mustGetContextSyncMap returns the sync map from the http request context or panics.
+//
+// This should only be called from functions within the http package.
+func mustGetContextSyncMap(req *http.Request) *sync.Map {
+ return req.Context().Value(txsContextKey).(*sync.Map) //nolint:forcetypeassert
+}
+
+// mustGetContextClientDB returns the client DB from the http request context or panics.
+//
+// This should only be called from functions within the http package.
+func mustGetContextClientDB(req *http.Request) client.DB {
+ return req.Context().Value(dbContextKey).(client.DB) //nolint:forcetypeassert
+}
+
+// mustGetContextClientStore returns the client store from the http request context or panics.
+//
+// This should only be called from functions within the http package.
+func mustGetContextClientStore(req *http.Request) client.Store {
+ return req.Context().Value(dbContextKey).(client.Store) //nolint:forcetypeassert
+}
+
+// mustGetDataStoreTxn returns the datastore transaction or panics.
+//
+// This should only be called from functions within the http package.
+func mustGetDataStoreTxn(tx any) datastore.Txn {
+ return tx.(datastore.Txn) //nolint:forcetypeassert
+}
+
+// tryGetContextClientP2P returns the P2P client from the http request context and a boolean
+// indicating if p2p was enabled.
+//
+// This should only be called from functions within the http package.
+func tryGetContextClientP2P(req *http.Request) (client.P2P, bool) {
+ p2p, ok := req.Context().Value(dbContextKey).(client.P2P)
+ return p2p, ok
+}
+
func requestJSON(req *http.Request, out any) error {
data, err := io.ReadAll(req.Body)
if err != nil {
diff --git a/tools/configs/golangci.yaml b/tools/configs/golangci.yaml
index d8162783df..1b4abe0718 100644
--- a/tools/configs/golangci.yaml
+++ b/tools/configs/golangci.yaml
@@ -113,6 +113,7 @@ linters:
- errcheck
- errorlint
- forbidigo
+ - forcetypeassert
- goconst
- gofmt
- goheader
@@ -151,6 +152,69 @@ issues:
linters:
- goheader
+ # Exclude running force type assert check in these file paths, we are ignoring these files for now
+ # because there are many linter complaints in them, we want to resolve all of them eventually.
+ # TODO: https://github.com/sourcenetwork/defradb/issues/3154
+ # Note: The last item must not have a `|` at the end otherwise linter ignores everyfile.
+ - path: "(\
+ client/document.go|\
+ client/normal_value_test.go|\
+ net/grpc.go|\
+ node/store_badger.go|\
+ internal/connor/eq.go|\
+ internal/core/block/block.go|\
+ internal/core/block/block_test.go|\
+ internal/core/key_test.go|\
+ internal/core/view_item.go|\
+ internal/db/backup.go|\
+ internal/db/base/compare.go|\
+ internal/db/collection.go|\
+ internal/db/context.go|\
+ internal/db/fetcher/indexer_iterators.go|\
+ internal/db/index_test.go|\
+ internal/db/indexed_docs_test.go|\
+ internal/db/merge.go|\
+ internal/db/merge_test.go|\
+ internal/db/p2p_replicator.go|\
+ internal/db/p2p_replicator_test.go|\
+ internal/db/p2p_schema_root.go|\
+ internal/db/p2p_schema_root_test.go|\
+ internal/lens/fetcher.go|\
+ internal/merkle/clock/clock.go|\
+ internal/merkle/crdt/merklecrdt.go|\
+ internal/planner/arbitrary_join.go|\
+ internal/planner/filter/complex.go|\
+ internal/planner/filter/copy.go|\
+ internal/planner/filter/copy_field.go|\
+ internal/planner/filter/copy_test.go|\
+ internal/planner/filter/extract_properties.go|\
+ internal/planner/filter/normalize.go|\
+ internal/planner/filter/unwrap_relation.go|\
+ internal/planner/group.go|\
+ internal/planner/lens.go|\
+ internal/planner/mapper/mapper.go|\
+ internal/planner/mapper/targetable.go|\
+ internal/planner/planner.go|\
+ internal/planner/sum.go|\
+ internal/planner/view.go|\
+ internal/request/graphql/parser/commit.go|\
+ internal/request/graphql/parser/filter.go|\
+ internal/request/graphql/parser/mutation.go|\
+ internal/request/graphql/parser/query.go|\
+ internal/request/graphql/parser/request.go|\
+ internal/request/graphql/schema/collection.go|\
+ internal/request/graphql/schema/generate.go|\
+ tests/gen|\
+ tests/integration/utils.go|\
+ tests/integration/explain.go|\
+ tests/integration/events.go|\
+ tests/integration/acp.go|\
+ tests/integration/schema/default_fields.go|\
+ tests/predefined/gen_predefined.go\
+ )"
+ linters:
+ - forcetypeassert
+
# Independently from option `exclude` we use default exclude patterns,
# it can be disabled by this option. To list all
# excluded by default patterns execute `golangci-lint run --help`.
From b268c0b5544f4382353d85f8d160c8ba2e68c74a Mon Sep 17 00:00:00 2001
From: Fred Carle
Date: Mon, 21 Oct 2024 15:52:53 -0400
Subject: [PATCH 04/47] ci: Freeze goreleaser version and fix amd64 path
(#3170)
## Relevant issue(s)
Resolves #3169
## Description
This PR freezes the goreleaser version to avoid `latest` changes related
bugs. At the same time we update the `amd64` build paths to what
goreleaser `2.3.2` supports.
---
.github/workflows/release.yml | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index fea78d61aa..8b35060eab 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -56,7 +56,7 @@ jobs:
uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser-pro
- version: latest
+ version: 2.3.2
args: release --clean --split ${{ env.flags }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -67,7 +67,7 @@ jobs:
if: matrix.os == 'ubuntu-latest'
uses: actions/cache/save@v4
with:
- path: dist/linux_amd64
+ path: dist/linux_amd64_v1
key: linux-${{ env.sha_short }}
- name: Save cache on MacOS
@@ -81,7 +81,7 @@ jobs:
if: matrix.os == 'windows-latest'
uses: actions/cache/save@v4
with:
- path: dist/windows_amd64
+ path: dist/windows_amd64_v1
key: windows-${{ env.sha_short }}
enableCrossOsArchive: true
@@ -113,7 +113,7 @@ jobs:
id: restore-linux
uses: actions/cache/restore@v4
with:
- path: dist/linux_amd64
+ path: dist/linux_amd64_v1
key: linux-${{ env.sha_short }}
fail-on-cache-miss: true
@@ -129,7 +129,7 @@ jobs:
id: restore-windows
uses: actions/cache/restore@v4
with:
- path: dist/windows_amd64
+ path: dist/windows_amd64_v1
key: windows-${{ env.sha_short }}
fail-on-cache-miss: true
enableCrossOsArchive: true
@@ -147,7 +147,7 @@ jobs:
uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser-pro
- version: latest
+ version: 2.3.2
args: continue --merge
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
From c1fcde0423874343a85c35e8ecec891c97acb755 Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Wed, 23 Oct 2024 14:28:45 -0700
Subject: [PATCH 05/47] fix(i): SetReplicator and DeleteReplicator params
(#3175)
## Relevant issue(s)
Resolves #3159
## Description
This PR fixes an issue where replicator params that are not modifiable
were exposed to the client API.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Existing integration tests.
Specify the platform(s) on which this was tested:
- *(modify the list accordingly*)
- MacOS
---
cli/p2p_replicator_delete.go | 6 +--
cli/p2p_replicator_set.go | 6 +--
client/mocks/db.go | 24 ++++-----
client/p2p.go | 4 +-
client/replicator.go | 8 +++
docs/website/references/http/openapi.json | 27 +++++++++-
http/client_p2p.go | 4 +-
http/handler_p2p.go | 9 ++--
http/openapi.go | 1 +
internal/db/p2p_replicator.go | 14 ++---
internal/db/p2p_replicator_test.go | 66 +++++++++++------------
tests/clients/cli/wrapper.go | 8 +--
tests/clients/http/wrapper.go | 4 +-
tests/integration/p2p.go | 4 +-
14 files changed, 110 insertions(+), 75 deletions(-)
diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go
index debd0ac280..bad8edd519 100644
--- a/cli/p2p_replicator_delete.go
+++ b/cli/p2p_replicator_delete.go
@@ -38,9 +38,9 @@ Example:
if err := json.Unmarshal([]byte(args[0]), &info); err != nil {
return err
}
- rep := client.Replicator{
- Info: info,
- Schemas: collections,
+ rep := client.ReplicatorParams{
+ Info: info,
+ Collections: collections,
}
return p2p.DeleteReplicator(cmd.Context(), rep)
},
diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go
index 29109a920a..8d887fa384 100644
--- a/cli/p2p_replicator_set.go
+++ b/cli/p2p_replicator_set.go
@@ -38,9 +38,9 @@ Example:
if err := json.Unmarshal([]byte(args[0]), &info); err != nil {
return err
}
- rep := client.Replicator{
- Info: info,
- Schemas: collections,
+ rep := client.ReplicatorParams{
+ Info: info,
+ Collections: collections,
}
return p2p.SetReplicator(cmd.Context(), rep)
},
diff --git a/client/mocks/db.go b/client/mocks/db.go
index 024d2ea31c..7925c3c850 100644
--- a/client/mocks/db.go
+++ b/client/mocks/db.go
@@ -553,7 +553,7 @@ func (_c *DB_DeleteDocActorRelationship_Call) RunAndReturn(run func(context.Cont
}
// DeleteReplicator provides a mock function with given fields: ctx, rep
-func (_m *DB) DeleteReplicator(ctx context.Context, rep client.Replicator) error {
+func (_m *DB) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams) error {
ret := _m.Called(ctx, rep)
if len(ret) == 0 {
@@ -561,7 +561,7 @@ func (_m *DB) DeleteReplicator(ctx context.Context, rep client.Replicator) error
}
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, client.Replicator) error); ok {
+ if rf, ok := ret.Get(0).(func(context.Context, client.ReplicatorParams) error); ok {
r0 = rf(ctx, rep)
} else {
r0 = ret.Error(0)
@@ -577,14 +577,14 @@ type DB_DeleteReplicator_Call struct {
// DeleteReplicator is a helper method to define mock.On call
// - ctx context.Context
-// - rep client.Replicator
+// - rep client.ReplicatorParams
func (_e *DB_Expecter) DeleteReplicator(ctx interface{}, rep interface{}) *DB_DeleteReplicator_Call {
return &DB_DeleteReplicator_Call{Call: _e.mock.On("DeleteReplicator", ctx, rep)}
}
-func (_c *DB_DeleteReplicator_Call) Run(run func(ctx context.Context, rep client.Replicator)) *DB_DeleteReplicator_Call {
+func (_c *DB_DeleteReplicator_Call) Run(run func(ctx context.Context, rep client.ReplicatorParams)) *DB_DeleteReplicator_Call {
_c.Call.Run(func(args mock.Arguments) {
- run(args[0].(context.Context), args[1].(client.Replicator))
+ run(args[0].(context.Context), args[1].(client.ReplicatorParams))
})
return _c
}
@@ -594,7 +594,7 @@ func (_c *DB_DeleteReplicator_Call) Return(_a0 error) *DB_DeleteReplicator_Call
return _c
}
-func (_c *DB_DeleteReplicator_Call) RunAndReturn(run func(context.Context, client.Replicator) error) *DB_DeleteReplicator_Call {
+func (_c *DB_DeleteReplicator_Call) RunAndReturn(run func(context.Context, client.ReplicatorParams) error) *DB_DeleteReplicator_Call {
_c.Call.Return(run)
return _c
}
@@ -1892,7 +1892,7 @@ func (_c *DB_SetMigration_Call) RunAndReturn(run func(context.Context, client.Le
}
// SetReplicator provides a mock function with given fields: ctx, rep
-func (_m *DB) SetReplicator(ctx context.Context, rep client.Replicator) error {
+func (_m *DB) SetReplicator(ctx context.Context, rep client.ReplicatorParams) error {
ret := _m.Called(ctx, rep)
if len(ret) == 0 {
@@ -1900,7 +1900,7 @@ func (_m *DB) SetReplicator(ctx context.Context, rep client.Replicator) error {
}
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, client.Replicator) error); ok {
+ if rf, ok := ret.Get(0).(func(context.Context, client.ReplicatorParams) error); ok {
r0 = rf(ctx, rep)
} else {
r0 = ret.Error(0)
@@ -1916,14 +1916,14 @@ type DB_SetReplicator_Call struct {
// SetReplicator is a helper method to define mock.On call
// - ctx context.Context
-// - rep client.Replicator
+// - rep client.ReplicatorParams
func (_e *DB_Expecter) SetReplicator(ctx interface{}, rep interface{}) *DB_SetReplicator_Call {
return &DB_SetReplicator_Call{Call: _e.mock.On("SetReplicator", ctx, rep)}
}
-func (_c *DB_SetReplicator_Call) Run(run func(ctx context.Context, rep client.Replicator)) *DB_SetReplicator_Call {
+func (_c *DB_SetReplicator_Call) Run(run func(ctx context.Context, rep client.ReplicatorParams)) *DB_SetReplicator_Call {
_c.Call.Run(func(args mock.Arguments) {
- run(args[0].(context.Context), args[1].(client.Replicator))
+ run(args[0].(context.Context), args[1].(client.ReplicatorParams))
})
return _c
}
@@ -1933,7 +1933,7 @@ func (_c *DB_SetReplicator_Call) Return(_a0 error) *DB_SetReplicator_Call {
return _c
}
-func (_c *DB_SetReplicator_Call) RunAndReturn(run func(context.Context, client.Replicator) error) *DB_SetReplicator_Call {
+func (_c *DB_SetReplicator_Call) RunAndReturn(run func(context.Context, client.ReplicatorParams) error) *DB_SetReplicator_Call {
_c.Call.Return(run)
return _c
}
diff --git a/client/p2p.go b/client/p2p.go
index d3d3c699b3..3d8ac4f086 100644
--- a/client/p2p.go
+++ b/client/p2p.go
@@ -23,10 +23,10 @@ type P2P interface {
// SetReplicator adds a replicator to the persisted list or adds
// schemas if the replicator already exists.
- SetReplicator(ctx context.Context, rep Replicator) error
+ SetReplicator(ctx context.Context, rep ReplicatorParams) error
// DeleteReplicator deletes a replicator from the persisted list
// or specific schemas if they are specified.
- DeleteReplicator(ctx context.Context, rep Replicator) error
+ DeleteReplicator(ctx context.Context, rep ReplicatorParams) error
// GetAllReplicators returns the full list of replicators with their
// subscribed schemas.
GetAllReplicators(ctx context.Context) ([]Replicator, error)
diff --git a/client/replicator.go b/client/replicator.go
index 730d3e2609..5af7ac99b4 100644
--- a/client/replicator.go
+++ b/client/replicator.go
@@ -16,6 +16,14 @@ import (
"github.com/libp2p/go-libp2p/core/peer"
)
+// ReplicatorParams contains the replicator fields that can be modified by the user.
+type ReplicatorParams struct {
+ // Info is the address of the peer to replicate to.
+ Info peer.AddrInfo
+ // Collections is the list of collection names to replicate.
+ Collections []string
+}
+
// Replicator is a peer that a set of local collections are replicated to.
type Replicator struct {
Info peer.AddrInfo
diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json
index 056d53085d..d4cfdc830c 100644
--- a/docs/website/references/http/openapi.json
+++ b/docs/website/references/http/openapi.json
@@ -527,6 +527,29 @@
},
"type": "object"
},
+ "replicator_params": {
+ "properties": {
+ "Collections": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ "Info": {
+ "properties": {
+ "Addrs": {
+ "items": {},
+ "type": "array"
+ },
+ "ID": {
+ "type": "string"
+ }
+ },
+ "type": "object"
+ }
+ },
+ "type": "object"
+ },
"schema": {
"properties": {
"Fields": {
@@ -1803,7 +1826,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/replicator"
+ "$ref": "#/components/schemas/replicator_params"
}
}
},
@@ -1859,7 +1882,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/replicator"
+ "$ref": "#/components/schemas/replicator_params"
}
}
},
diff --git a/http/client_p2p.go b/http/client_p2p.go
index 8d5f470f99..3ee5b01733 100644
--- a/http/client_p2p.go
+++ b/http/client_p2p.go
@@ -35,7 +35,7 @@ func (c *Client) PeerInfo() peer.AddrInfo {
return res
}
-func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error {
+func (c *Client) SetReplicator(ctx context.Context, rep client.ReplicatorParams) error {
methodURL := c.http.baseURL.JoinPath("p2p", "replicators")
body, err := json.Marshal(rep)
@@ -50,7 +50,7 @@ func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error
return err
}
-func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error {
+func (c *Client) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams) error {
methodURL := c.http.baseURL.JoinPath("p2p", "replicators")
body, err := json.Marshal(rep)
diff --git a/http/handler_p2p.go b/http/handler_p2p.go
index 941b6f4b5b..637b601e5c 100644
--- a/http/handler_p2p.go
+++ b/http/handler_p2p.go
@@ -36,7 +36,7 @@ func (s *p2pHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) {
return
}
- var rep client.Replicator
+ var rep client.ReplicatorParams
if err := requestJSON(req, &rep); err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
return
@@ -56,7 +56,7 @@ func (s *p2pHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request)
return
}
- var rep client.Replicator
+ var rep client.ReplicatorParams
if err := requestJSON(req, &rep); err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
return
@@ -152,6 +152,9 @@ func (h *p2pHandler) bindRoutes(router *Router) {
replicatorSchema := &openapi3.SchemaRef{
Ref: "#/components/schemas/replicator",
}
+ replicatorParamsSchema := &openapi3.SchemaRef{
+ Ref: "#/components/schemas/replicator_params",
+ }
peerInfoResponse := openapi3.NewResponse().
WithDescription("Peer network info").
@@ -178,7 +181,7 @@ func (h *p2pHandler) bindRoutes(router *Router) {
replicatorRequest := openapi3.NewRequestBody().
WithRequired(true).
- WithContent(openapi3.NewContentWithJSONSchemaRef(replicatorSchema))
+ WithContent(openapi3.NewContentWithJSONSchemaRef(replicatorParamsSchema))
setReplicator := openapi3.NewOperation()
setReplicator.Description = "Add peer replicators"
diff --git a/http/openapi.go b/http/openapi.go
index 0a62b6ebac..b3c82d1662 100644
--- a/http/openapi.go
+++ b/http/openapi.go
@@ -35,6 +35,7 @@ var openApiSchemas = map[string]any{
"update_result": &client.UpdateResult{},
"lens_config": &client.LensConfig{},
"replicator": &client.Replicator{},
+ "replicator_params": &client.ReplicatorParams{},
"ccip_request": &CCIPRequest{},
"ccip_response": &CCIPResponse{},
"patch_schema_request": &patchSchemaRequest{},
diff --git a/internal/db/p2p_replicator.go b/internal/db/p2p_replicator.go
index 2d81f123c8..7764a6dec9 100644
--- a/internal/db/p2p_replicator.go
+++ b/internal/db/p2p_replicator.go
@@ -38,7 +38,7 @@ const (
retryTimeout = 10 * time.Second
)
-func (db *db) SetReplicator(ctx context.Context, rep client.Replicator) error {
+func (db *db) SetReplicator(ctx context.Context, rep client.ReplicatorParams) error {
txn, err := db.NewTxn(ctx, false)
if err != nil {
return err
@@ -85,9 +85,9 @@ func (db *db) SetReplicator(ctx context.Context, rep client.Replicator) error {
var collections []client.Collection
switch {
- case len(rep.Schemas) > 0:
+ case len(rep.Collections) > 0:
// if specific collections are chosen get them by name
- for _, name := range rep.Schemas {
+ for _, name := range rep.Collections {
col, err := db.GetCollectionByName(ctx, name)
if err != nil {
return NewErrReplicatorCollections(err)
@@ -210,7 +210,7 @@ func (db *db) getDocsHeads(
return updateChan
}
-func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error {
+func (db *db) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams) error {
txn, err := db.NewTxn(ctx, false)
if err != nil {
return err
@@ -247,9 +247,9 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error
}
var collections []client.Collection
- if len(rep.Schemas) > 0 {
+ if len(rep.Collections) > 0 {
// if specific collections are chosen get them by name
- for _, name := range rep.Schemas {
+ for _, name := range rep.Collections {
col, err := db.GetCollectionByName(ctx, name)
if err != nil {
return NewErrReplicatorCollections(err)
@@ -277,7 +277,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error
// Persist the replicator to the store, deleting it if no schemas remain
key := core.NewReplicatorKey(rep.Info.ID.String())
- if len(rep.Schemas) == 0 {
+ if len(rep.Collections) == 0 {
err := txn.Peerstore().Delete(ctx, key.ToDS())
if err != nil {
return err
diff --git a/internal/db/p2p_replicator_test.go b/internal/db/p2p_replicator_test.go
index b287101a54..bad3d46271 100644
--- a/internal/db/p2p_replicator_test.go
+++ b/internal/db/p2p_replicator_test.go
@@ -43,7 +43,7 @@ func TestSetReplicator_WithEmptyPeerInfo_ShouldError(t *testing.T) {
db, err := newDefraMemoryDB(ctx)
require.NoError(t, err)
defer db.Close()
- err = db.SetReplicator(ctx, client.Replicator{})
+ err = db.SetReplicator(ctx, client.ReplicatorParams{})
require.ErrorContains(t, err, "empty peer ID")
}
@@ -56,7 +56,7 @@ func TestSetReplicator_WithSelfTarget_ShouldError(t *testing.T) {
require.NoError(t, err)
db.events.Publish(event.NewMessage(event.PeerInfoName, event.PeerInfo{Info: peer.AddrInfo{ID: "self"}}))
waitForPeerInfo(db, sub)
- err = db.SetReplicator(ctx, client.Replicator{Info: peer.AddrInfo{ID: "self"}})
+ err = db.SetReplicator(ctx, client.ReplicatorParams{Info: peer.AddrInfo{ID: "self"}})
require.ErrorIs(t, err, ErrSelfTargetForReplicator)
}
@@ -69,9 +69,9 @@ func TestSetReplicator_WithInvalidCollection_ShouldError(t *testing.T) {
require.NoError(t, err)
db.events.Publish(event.NewMessage(event.PeerInfoName, event.PeerInfo{Info: peer.AddrInfo{ID: "self"}}))
waitForPeerInfo(db, sub)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: "other"},
- Schemas: []string{"invalidCollection"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: "other"},
+ Collections: []string{"invalidCollection"},
})
require.ErrorIs(t, err, ErrReplicatorCollections)
}
@@ -87,9 +87,9 @@ func TestSetReplicator_WithValidCollection_ShouldSucceed(t *testing.T) {
require.NoError(t, err)
schema, err := db.GetSchemaByVersionID(ctx, cols[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: "other"},
- Schemas: []string{"User"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: "other"},
+ Collections: []string{"User"},
})
require.NoError(t, err)
msg := <-sub.Message()
@@ -113,9 +113,9 @@ func TestSetReplicator_WithValidCollectionsOnSeparateSet_ShouldSucceed(t *testin
require.NoError(t, err)
schema1, err := db.GetSchemaByVersionID(ctx, cols1[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: peerID},
- Schemas: []string{"User"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: peerID},
+ Collections: []string{"User"},
})
require.NoError(t, err)
msg := <-sub.Message()
@@ -127,9 +127,9 @@ func TestSetReplicator_WithValidCollectionsOnSeparateSet_ShouldSucceed(t *testin
require.NoError(t, err)
schema2, err := db.GetSchemaByVersionID(ctx, cols2[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: peerID},
- Schemas: []string{"Book"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: peerID},
+ Collections: []string{"Book"},
})
require.NoError(t, err)
msg = <-sub.Message()
@@ -154,9 +154,9 @@ func TestSetReplicator_WithValidCollectionWithDoc_ShouldSucceed(t *testing.T) {
err = col.Create(ctx, doc)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: "other"},
- Schemas: []string{"User"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: "other"},
+ Collections: []string{"User"},
})
require.NoError(t, err)
msg := <-sub.Message()
@@ -173,7 +173,7 @@ func TestDeleteReplicator_WithEmptyPeerInfo_ShouldError(t *testing.T) {
db, err := newDefraMemoryDB(ctx)
require.NoError(t, err)
defer db.Close()
- err = db.DeleteReplicator(ctx, client.Replicator{})
+ err = db.DeleteReplicator(ctx, client.ReplicatorParams{})
require.ErrorContains(t, err, "empty peer ID")
}
@@ -182,7 +182,7 @@ func TestDeleteReplicator_WithNonExistantReplicator_ShouldError(t *testing.T) {
db, err := newDefraMemoryDB(ctx)
require.NoError(t, err)
defer db.Close()
- err = db.DeleteReplicator(ctx, client.Replicator{Info: peer.AddrInfo{ID: "other"}})
+ err = db.DeleteReplicator(ctx, client.ReplicatorParams{Info: peer.AddrInfo{ID: "other"}})
require.ErrorIs(t, err, ErrReplicatorNotFound)
}
@@ -201,16 +201,16 @@ func TestDeleteReplicator_WithValidCollection_ShouldSucceed(t *testing.T) {
require.NoError(t, err)
schema, err := db.GetSchemaByVersionID(ctx, cols[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: peerID},
- Schemas: []string{"User"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: peerID},
+ Collections: []string{"User"},
})
require.NoError(t, err)
msg := <-sub.Message()
replicator := msg.Data.(event.Replicator)
require.Equal(t, peerID, replicator.Info.ID)
require.Equal(t, map[string]struct{}{schema.Root: {}}, replicator.Schemas)
- err = db.DeleteReplicator(ctx, client.Replicator{Info: peer.AddrInfo{ID: peerID}})
+ err = db.DeleteReplicator(ctx, client.ReplicatorParams{Info: peer.AddrInfo{ID: peerID}})
require.NoError(t, err)
msg = <-sub.Message()
replicator = msg.Data.(event.Replicator)
@@ -237,9 +237,9 @@ func TestDeleteReplicator_PartialWithValidCollections_ShouldSucceed(t *testing.T
require.NoError(t, err)
schema2, err := db.GetSchemaByVersionID(ctx, cols2[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: peerID},
- Schemas: []string{"User", "Book"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: peerID},
+ Collections: []string{"User", "Book"},
})
require.NoError(t, err)
msg := <-sub.Message()
@@ -247,7 +247,7 @@ func TestDeleteReplicator_PartialWithValidCollections_ShouldSucceed(t *testing.T
require.Equal(t, peerID, replicator.Info.ID)
require.Equal(t, map[string]struct{}{schema1.Root: {}, schema2.Root: {}}, replicator.Schemas)
- err = db.DeleteReplicator(ctx, client.Replicator{Info: peer.AddrInfo{ID: peerID}, Schemas: []string{"User"}})
+ err = db.DeleteReplicator(ctx, client.ReplicatorParams{Info: peer.AddrInfo{ID: peerID}, Collections: []string{"User"}})
require.NoError(t, err)
msg = <-sub.Message()
replicator = msg.Data.(event.Replicator)
@@ -270,9 +270,9 @@ func TestGetAllReplicators_WithValidCollection_ShouldSucceed(t *testing.T) {
require.NoError(t, err)
schema, err := db.GetSchemaByVersionID(ctx, cols[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: peerID},
- Schemas: []string{"User"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: peerID},
+ Collections: []string{"User"},
})
require.NoError(t, err)
msg := <-sub.Message()
@@ -301,9 +301,9 @@ func TestLoadReplicators_WithValidCollection_ShouldSucceed(t *testing.T) {
require.NoError(t, err)
schema, err := db.GetSchemaByVersionID(ctx, cols[0].SchemaVersionID)
require.NoError(t, err)
- err = db.SetReplicator(ctx, client.Replicator{
- Info: peer.AddrInfo{ID: peerID},
- Schemas: []string{"User"},
+ err = db.SetReplicator(ctx, client.ReplicatorParams{
+ Info: peer.AddrInfo{ID: peerID},
+ Collections: []string{"User"},
})
require.NoError(t, err)
msg := <-sub.Message()
diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go
index 6983aa1797..f468cec0f9 100644
--- a/tests/clients/cli/wrapper.go
+++ b/tests/clients/cli/wrapper.go
@@ -77,9 +77,9 @@ func (w *Wrapper) PeerInfo() peer.AddrInfo {
return info
}
-func (w *Wrapper) SetReplicator(ctx context.Context, rep client.Replicator) error {
+func (w *Wrapper) SetReplicator(ctx context.Context, rep client.ReplicatorParams) error {
args := []string{"client", "p2p", "replicator", "set"}
- args = append(args, "--collection", strings.Join(rep.Schemas, ","))
+ args = append(args, "--collection", strings.Join(rep.Collections, ","))
info, err := json.Marshal(rep.Info)
if err != nil {
@@ -91,9 +91,9 @@ func (w *Wrapper) SetReplicator(ctx context.Context, rep client.Replicator) erro
return err
}
-func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.Replicator) error {
+func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams) error {
args := []string{"client", "p2p", "replicator", "delete"}
- args = append(args, "--collection", strings.Join(rep.Schemas, ","))
+ args = append(args, "--collection", strings.Join(rep.Collections, ","))
info, err := json.Marshal(rep.Info)
if err != nil {
diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go
index 35a386e18a..7438fa8cce 100644
--- a/tests/clients/http/wrapper.go
+++ b/tests/clients/http/wrapper.go
@@ -62,11 +62,11 @@ func (w *Wrapper) PeerInfo() peer.AddrInfo {
return w.client.PeerInfo()
}
-func (w *Wrapper) SetReplicator(ctx context.Context, rep client.Replicator) error {
+func (w *Wrapper) SetReplicator(ctx context.Context, rep client.ReplicatorParams) error {
return w.client.SetReplicator(ctx, rep)
}
-func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.Replicator) error {
+func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams) error {
return w.client.DeleteReplicator(ctx, rep)
}
diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go
index 99c713bb79..7c5b20e69a 100644
--- a/tests/integration/p2p.go
+++ b/tests/integration/p2p.go
@@ -174,7 +174,7 @@ func configureReplicator(
sourceNode := s.nodes[cfg.SourceNodeID]
targetNode := s.nodes[cfg.TargetNodeID]
- err := sourceNode.SetReplicator(s.ctx, client.Replicator{
+ err := sourceNode.SetReplicator(s.ctx, client.ReplicatorParams{
Info: targetNode.PeerInfo(),
})
@@ -193,7 +193,7 @@ func deleteReplicator(
sourceNode := s.nodes[cfg.SourceNodeID]
targetNode := s.nodes[cfg.TargetNodeID]
- err := sourceNode.DeleteReplicator(s.ctx, client.Replicator{
+ err := sourceNode.DeleteReplicator(s.ctx, client.ReplicatorParams{
Info: targetNode.PeerInfo(),
})
require.NoError(s.t, err)
From bb0917e720a56de39e1067b3c045abcec3108038 Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Thu, 24 Oct 2024 08:53:26 -0700
Subject: [PATCH 06/47] fix(i): Panic on aggregate alias (#3174)
## Relevant issue(s)
Resolves #3160
## Description
This PR fixes an issue where aliases on top level aggregates would cause
a panic.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Added integration tests.
Specify the platform(s) on which this was tested:
- MacOS
---
internal/request/graphql/parser/query.go | 14 +++---
.../integration/query/commits/simple_test.go | 38 ++++++++++++++++
.../query/latest_commits/with_doc_id_test.go | 44 +++++++++++++++++++
.../query/simple/with_average_test.go | 18 ++++++++
.../query/simple/with_count_test.go | 18 ++++++++
.../integration/query/simple/with_max_test.go | 18 ++++++++
.../integration/query/simple/with_min_test.go | 18 ++++++++
7 files changed, 161 insertions(+), 7 deletions(-)
diff --git a/internal/request/graphql/parser/query.go b/internal/request/graphql/parser/query.go
index e0248768ab..a15cff0e1f 100644
--- a/internal/request/graphql/parser/query.go
+++ b/internal/request/graphql/parser/query.go
@@ -25,18 +25,18 @@ func parseQueryOperationDefinition(
collectedFields map[string][]*ast.Field,
) (*request.OperationDefinition, []error) {
var selections []request.Selection
- for name, fields := range collectedFields {
- for _, node := range fields {
+ for _, fields := range collectedFields {
+ for _, field := range fields {
var parsedSelection request.Selection
- if _, isCommitQuery := request.CommitQueries[name]; isCommitQuery {
- parsed, err := parseCommitSelect(exe, exe.Schema.QueryType(), node)
+ if _, isCommitQuery := request.CommitQueries[field.Name.Value]; isCommitQuery {
+ parsed, err := parseCommitSelect(exe, exe.Schema.QueryType(), field)
if err != nil {
return nil, []error{err}
}
parsedSelection = parsed
- } else if _, isAggregate := request.Aggregates[name]; isAggregate {
- parsed, err := parseAggregate(exe, exe.Schema.QueryType(), node)
+ } else if _, isAggregate := request.Aggregates[field.Name.Value]; isAggregate {
+ parsed, err := parseAggregate(exe, exe.Schema.QueryType(), field)
if err != nil {
return nil, []error{err}
}
@@ -56,7 +56,7 @@ func parseQueryOperationDefinition(
} else {
// the query doesn't match a reserve name
// so its probably a generated query
- parsed, err := parseSelect(exe, exe.Schema.QueryType(), node)
+ parsed, err := parseSelect(exe, exe.Schema.QueryType(), field)
if err != nil {
return nil, []error{err}
}
diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go
index 6a69578ada..ca8c1e51b5 100644
--- a/tests/integration/query/commits/simple_test.go
+++ b/tests/integration/query/commits/simple_test.go
@@ -444,3 +444,41 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
testUtils.ExecuteTestCase(t, test)
}
+
+func TestQueryCommits_WithAlias_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple all commits with alias query",
+ Actions: []any{
+ updateUserCollectionSchema(),
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ history: commits {
+ cid
+ }
+ }`,
+ Results: map[string]any{
+ "history": []map[string]any{
+ {
+ "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
+ },
+ {
+ "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy",
+ },
+ {
+ "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/latest_commits/with_doc_id_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go
index 6250409402..85d03d36a4 100644
--- a/tests/integration/query/latest_commits/with_doc_id_test.go
+++ b/tests/integration/query/latest_commits/with_doc_id_test.go
@@ -91,3 +91,47 @@ func TestQueryLatestCommitsWithDocIDWithSchemaVersionIDField(t *testing.T) {
executeTestCase(t, test)
}
+
+func TestQueryLatestCommits_WithDocIDAndAliased_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple latest commits query with docID and aliased",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ history: latestCommits(docID: "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3") {
+ cid
+ links {
+ cid
+ name
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "history": []map[string]any{
+ {
+ "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy",
+ "links": []map[string]any{
+ {
+ "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
+ "name": "age",
+ },
+ {
+ "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy",
+ "name": "name",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_average_test.go b/tests/integration/query/simple/with_average_test.go
index 36aedd1ee8..269a7fea1d 100644
--- a/tests/integration/query/simple/with_average_test.go
+++ b/tests/integration/query/simple/with_average_test.go
@@ -95,3 +95,21 @@ func TestQuerySimpleWithAverage(t *testing.T) {
executeTestCase(t, test)
}
+
+func TestQuerySimple_WithAliasedAverage_OnEmptyCollection_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query, aliased average on empty",
+ Actions: []any{
+ testUtils.Request{
+ Request: `query {
+ average: _avg(Users: {field: Age})
+ }`,
+ Results: map[string]any{
+ "average": float64(0),
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_count_test.go b/tests/integration/query/simple/with_count_test.go
index 20068e0672..86ddfc0f41 100644
--- a/tests/integration/query/simple/with_count_test.go
+++ b/tests/integration/query/simple/with_count_test.go
@@ -79,3 +79,21 @@ func TestQuerySimpleWithCount(t *testing.T) {
executeTestCase(t, test)
}
+
+func TestQuerySimple_WithAliasedCount_OnEmptyCollection_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query, aliased count on empty",
+ Actions: []any{
+ testUtils.Request{
+ Request: `query {
+ number: _count(Users: {})
+ }`,
+ Results: map[string]any{
+ "number": 0,
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_max_test.go b/tests/integration/query/simple/with_max_test.go
index bdb47b6f8c..82caeb5e6c 100644
--- a/tests/integration/query/simple/with_max_test.go
+++ b/tests/integration/query/simple/with_max_test.go
@@ -127,3 +127,21 @@ func TestQuerySimple_WithMaxAndMaxValueInt_Succeeds(t *testing.T) {
executeTestCase(t, test)
}
+
+func TestQuerySimple_WithAliasedMaxOnEmptyCollection_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query aliased max on empty",
+ Actions: []any{
+ testUtils.Request{
+ Request: `query {
+ maximum: _max(Users: {field: Age})
+ }`,
+ Results: map[string]any{
+ "maximum": nil,
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_min_test.go b/tests/integration/query/simple/with_min_test.go
index feb8e54e2f..ac46c23c8d 100644
--- a/tests/integration/query/simple/with_min_test.go
+++ b/tests/integration/query/simple/with_min_test.go
@@ -127,3 +127,21 @@ func TestQuerySimple_WithMinAndMaxValueInt_Succeeds(t *testing.T) {
executeTestCase(t, test)
}
+
+func TestQuerySimple_WithAliasedMinOnEmptyCollection_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query aliased min on empty",
+ Actions: []any{
+ testUtils.Request{
+ Request: `query {
+ minimum: _min(Users: {field: Age})
+ }`,
+ Results: map[string]any{
+ "minimum": nil,
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
From d95c51ff731c55c97f6c13db1480b2c8b751e551 Mon Sep 17 00:00:00 2001
From: Islam Aliev
Date: Fri, 25 Oct 2024 16:52:43 +0200
Subject: [PATCH 07/47] feat: Add node identity (#3125)
## Relevant issue(s)
Resolves #2908
## Description
Assign an identity to the node upon startup.
---
README.md | 5 +-
acp/identity/context.go | 41 ++++
acp/identity/generate.go | 21 +-
acp/identity/identity.go | 116 ++++++-----
acp/identity/raw_identity.go | 73 +++++++
cli/acp_relationship_add.go | 6 +-
cli/acp_relationship_delete.go | 2 +-
cli/cli.go | 1 +
cli/node_identity.go | 50 +++++
cli/start.go | 133 +++++++++---
cli/utils.go | 27 +--
client/db.go | 4 +
client/mocks/db.go | 58 ++++++
docs/website/references/cli/defradb_client.md | 1 +
.../defradb_client_acp_relationship_add.md | 6 +-
.../defradb_client_acp_relationship_delete.md | 2 +-
.../cli/defradb_client_node-identity.md | 55 +++++
docs/website/references/cli/defradb_start.md | 4 +-
docs/website/references/http/openapi.json | 39 ++++
http/auth.go | 7 +-
http/auth_test.go | 35 ++--
http/client.go | 15 ++
http/errors.go | 21 +-
http/handler_store.go | 32 ++-
http/http_client.go | 3 +-
http/openapi.go | 2 +
internal/db/backup_test.go | 15 +-
internal/db/collection_acp.go | 7 +-
internal/db/collection_get.go | 4 +-
internal/db/collection_index.go | 4 +-
internal/db/collection_update.go | 4 +-
internal/db/config.go | 9 +
internal/db/context.go | 27 ---
internal/db/db.go | 25 ++-
internal/db/request.go | 4 +-
internal/db/subscriptions.go | 4 +-
internal/db/view.go | 4 +-
node/node.go | 3 +
tests/clients/cli/wrapper.go | 20 +-
tests/clients/cli/wrapper_cli.go | 3 +-
tests/clients/http/wrapper.go | 5 +
tests/integration/acp.go | 197 ++++--------------
.../integration/acp/add_policy/basic_test.go | 6 +-
.../acp/add_policy/with_empty_args_test.go | 8 +-
.../with_extra_perms_and_relations_test.go | 4 +-
.../acp/add_policy/with_extra_perms_test.go | 6 +-
.../add_policy/with_extra_relations_test.go | 6 +-
.../add_policy/with_invalid_relations_test.go | 6 +-
.../with_invalid_required_relation_test.go | 6 +-
.../add_policy/with_invalid_resource_test.go | 4 +-
.../add_policy/with_managed_relation_test.go | 4 +-
.../add_policy/with_multi_policies_test.go | 22 +-
.../with_multiple_resources_test.go | 8 +-
.../acp/add_policy/with_no_perms_test.go | 10 +-
.../acp/add_policy/with_no_resources_test.go | 6 +-
.../acp/add_policy/with_perm_expr_test.go | 6 +-
.../add_policy/with_perm_invalid_expr_test.go | 8 +-
.../with_permissionless_owner_test.go | 8 +-
.../add_policy/with_unused_relations_test.go | 4 +-
tests/integration/acp/index/create_test.go | 6 +-
tests/integration/acp/index/query_test.go | 18 +-
.../acp/index/query_with_relation_test.go | 18 +-
tests/integration/acp/p2p/create_test.go | 6 +-
tests/integration/acp/p2p/delete_test.go | 10 +-
tests/integration/acp/p2p/replicator_test.go | 10 +-
...icator_with_doc_actor_relationship_test.go | 30 +--
tests/integration/acp/p2p/subscribe_test.go | 10 +-
...scribe_with_doc_actor_relationship_test.go | 30 +--
tests/integration/acp/p2p/update_test.go | 10 +-
tests/integration/acp/query/avg_test.go | 6 +-
tests/integration/acp/query/count_test.go | 10 +-
tests/integration/acp/query/fixture.go | 10 +-
.../acp/query/relation_objects_test.go | 10 +-
.../acp/register_and_delete_test.go | 30 ++-
.../integration/acp/register_and_read_test.go | 24 +--
.../acp/register_and_update_test.go | 42 ++--
.../doc_actor/add/invalid_test.go | 42 ++--
.../doc_actor/add/with_delete_test.go | 50 +++--
.../doc_actor/add/with_dummy_relation_test.go | 26 ++-
.../doc_actor/add/with_manager_gql_test.go | 66 +++---
.../doc_actor/add/with_manager_test.go | 136 ++++++------
.../add/with_no_policy_on_collection_test.go | 8 +-
.../doc_actor/add/with_only_write_gql_test.go | 16 +-
.../doc_actor/add/with_only_write_test.go | 34 +--
.../add/with_public_document_test.go | 10 +-
.../doc_actor/add/with_reader_gql_test.go | 16 +-
.../doc_actor/add/with_reader_test.go | 70 +++----
.../doc_actor/add/with_update_gql_test.go | 32 +--
.../doc_actor/add/with_update_test.go | 48 ++---
.../doc_actor/delete/invalid_test.go | 42 ++--
.../doc_actor/delete/with_delete_test.go | 30 ++-
.../delete/with_dummy_relation_test.go | 26 ++-
.../doc_actor/delete/with_manager_test.go | 64 +++---
.../with_no_policy_on_collection_test.go | 8 +-
.../delete/with_public_document_test.go | 10 +-
.../doc_actor/delete/with_reader_test.go | 34 ++-
.../doc_actor/delete/with_self_test.go | 30 ++-
.../doc_actor/delete/with_update_test.go | 44 ++--
.../add_dpi/accept_basic_dpi_fmts_test.go | 6 +-
.../accept_extra_permissions_on_dpi_test.go | 8 +-
.../accept_managed_relation_on_dpi_test.go | 4 +-
...ept_mixed_resources_on_partial_dpi_test.go | 4 +-
.../schema/add_dpi/accept_multi_dpis_test.go | 6 +-
.../accept_multi_resources_on_dpi_test.go | 6 +-
...cept_same_resource_on_diff_schemas_test.go | 4 +-
.../reject_empty_arg_on_schema_test.go | 6 +-
.../reject_invalid_arg_type_on_schema_test.go | 6 +-
...ect_invalid_owner_read_perm_on_dpi_test.go | 12 +-
...alid_owner_read_perm_symbol_on_dpi_test.go | 8 +-
...ct_invalid_owner_write_perm_on_dpi_test.go | 12 +-
...lid_owner_write_perm_symbol_on_dpi_test.go | 8 +-
.../schema/add_dpi/reject_missing_dpi_test.go | 4 +-
.../reject_missing_id_arg_on_schema_test.go | 6 +-
.../reject_missing_perms_on_dpi_test.go | 4 +-
...ect_missing_resource_arg_on_schema_test.go | 6 +-
.../reject_missing_resource_on_dpi_test.go | 4 +-
...ect_mixed_resources_on_partial_dpi_test.go | 4 +-
.../updates/remove/policy_test.go | 4 +-
.../updates/replace/view_policy_test.go | 2 +-
tests/integration/identity.go | 147 +++++++++++++
tests/integration/node/identity_test.go | 36 ++++
tests/integration/state.go | 19 +-
tests/integration/test_case.go | 38 +++-
tests/integration/utils.go | 71 ++++---
124 files changed, 1607 insertions(+), 1166 deletions(-)
create mode 100644 acp/identity/context.go
create mode 100644 acp/identity/raw_identity.go
create mode 100644 cli/node_identity.go
create mode 100644 docs/website/references/cli/defradb_client_node-identity.md
create mode 100644 tests/integration/identity.go
create mode 100644 tests/integration/node/identity_test.go
diff --git a/README.md b/README.md
index fd93db1d6f..3f8276b5cb 100644
--- a/README.md
+++ b/README.md
@@ -67,13 +67,16 @@ The following keys are loaded from the keyring on start:
- `peer-key` Ed25519 private key (required)
- `encryption-key` AES-128, AES-192, or AES-256 key (optional)
+- `node-identity-key` Secp256k1 private key (optional). This key is used for node's identity.
A secret to unlock the keyring is required on start and must be provided via the `DEFRA_KEYRING_SECRET` environment variable. If a `.env` file is available in the working directory, the secret can be stored there or via a file at a path defined by the `--secret-file` flag.
-The keys will be randomly generated on the inital start of the node if they are not found.
+The keys will be randomly generated on the initial start of the node if they are not found.
Alternatively, to randomly generate the required keys, run the following command:
+Node identity is an identity assigned to the node. It is used to exchange encryption keys with other nodes.
+
```
defradb keyring generate
```
diff --git a/acp/identity/context.go b/acp/identity/context.go
new file mode 100644
index 0000000000..6947bb49e3
--- /dev/null
+++ b/acp/identity/context.go
@@ -0,0 +1,41 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package identity
+
+import (
+ "context"
+
+ "github.com/sourcenetwork/immutable"
+)
+
+// identityContextKey is the key type for ACP identity context values.
+type identityContextKey struct{}
+
+// FromContext returns the identity from the given context.
+//
+// If an identity does not exist `NoIdentity` is returned.
+func FromContext(ctx context.Context) immutable.Option[Identity] {
+ identity, ok := ctx.Value(identityContextKey{}).(Identity)
+ if ok {
+ return immutable.Some(identity)
+ }
+ return None
+}
+
+// WithContext returns a new context with the identity value set.
+//
+// This will overwrite any previously set identity value.
+func WithContext(ctx context.Context, identity immutable.Option[Identity]) context.Context {
+ if identity.HasValue() {
+ return context.WithValue(ctx, identityContextKey{}, identity.Value())
+ }
+ return context.WithValue(ctx, identityContextKey{}, nil)
+}
diff --git a/acp/identity/generate.go b/acp/identity/generate.go
index d19ee6b2cb..4ea7e94fc8 100644
--- a/acp/identity/generate.go
+++ b/acp/identity/generate.go
@@ -11,24 +11,9 @@
package identity
import (
- "encoding/hex"
-
"github.com/sourcenetwork/defradb/crypto"
)
-// RawIdentity holds the raw bytes that make up an actor's identity.
-type RawIdentity struct {
- // PrivateKey is a secp256k1 private key that is a 256-bit big-endian
- // binary-encoded number, padded to a length of 32 bytes in HEX format.
- PrivateKey string
-
- // PublicKey is a compressed 33-byte secp256k1 public key in HEX format.
- PublicKey string
-
- // DID is `did:key` key generated from the public key address.
- DID string
-}
-
// Generate generates a new identity.
func Generate() (RawIdentity, error) {
privateKey, err := crypto.GenerateSecp256k1()
@@ -43,9 +28,5 @@ func Generate() (RawIdentity, error) {
return RawIdentity{}, err
}
- return RawIdentity{
- PrivateKey: hex.EncodeToString(privateKey.Serialize()),
- PublicKey: hex.EncodeToString(publicKey.SerializeCompressed()),
- DID: did,
- }, nil
+ return newRawIdentity(privateKey, publicKey, did), nil
}
diff --git a/acp/identity/identity.go b/acp/identity/identity.go
index db022b8c74..4dee93deba 100644
--- a/acp/identity/identity.go
+++ b/acp/identity/identity.go
@@ -50,72 +50,22 @@ type Identity struct {
}
// FromPrivateKey returns a new identity using the given private key.
-//
-// - duration: The [time.Duration] that this identity is valid for.
-// - audience: The audience that this identity is valid for. This is required
-// by the Defra http client. For example `github.com/sourcenetwork/defradb`
-// - authorizedAccount: An account that this identity is authorizing to make
-// SourceHub calls on behalf of this actor. This is currently required when
-// using SourceHub ACP.
-// - skipTokenGeneration: If true, BearerToken will not be set. This parameter is
-// provided as generating and signing the token is relatively slow, and only required
-// by remote Defra clients (CLI, http), or if using SourceHub ACP.
-func FromPrivateKey(
- privateKey *secp256k1.PrivateKey,
- duration time.Duration,
- audience immutable.Option[string],
- authorizedAccount immutable.Option[string],
- skipTokenGeneration bool,
-) (Identity, error) {
+// In order to generate a fresh token for this identity, use the [UpdateToken]
+func FromPrivateKey(privateKey *secp256k1.PrivateKey) (Identity, error) {
publicKey := privateKey.PubKey()
did, err := DIDFromPublicKey(publicKey)
if err != nil {
return Identity{}, err
}
- var signedToken []byte
- if !skipTokenGeneration {
- subject := hex.EncodeToString(publicKey.SerializeCompressed())
- now := time.Now()
-
- jwtBuilder := jwt.NewBuilder()
- jwtBuilder = jwtBuilder.Subject(subject)
- jwtBuilder = jwtBuilder.Expiration(now.Add(duration))
- jwtBuilder = jwtBuilder.NotBefore(now)
- jwtBuilder = jwtBuilder.Issuer(did)
- jwtBuilder = jwtBuilder.IssuedAt(now)
-
- if audience.HasValue() {
- jwtBuilder = jwtBuilder.Audience([]string{audience.Value()})
- }
-
- token, err := jwtBuilder.Build()
- if err != nil {
- return Identity{}, err
- }
-
- if authorizedAccount.HasValue() {
- err = token.Set(acptypes.AuthorizedAccountClaim, authorizedAccount.Value())
- if err != nil {
- return Identity{}, err
- }
- }
-
- signedToken, err = jwt.Sign(token, jwt.WithKey(BearerTokenSignatureScheme, privateKey.ToECDSA()))
- if err != nil {
- return Identity{}, err
- }
- }
-
return Identity{
- DID: did,
- PrivateKey: privateKey,
- PublicKey: publicKey,
- BearerToken: string(signedToken),
+ DID: did,
+ PrivateKey: privateKey,
+ PublicKey: publicKey,
}, nil
}
-// FromToken constructs a new `Indentity` from a bearer token.
+// FromToken constructs a new `Identity` from a bearer token.
func FromToken(data []byte) (Identity, error) {
token, err := jwt.Parse(data, jwt.WithVerify(false))
if err != nil {
@@ -158,3 +108,57 @@ func didFromPublicKey(publicKey *secp256k1.PublicKey, producer didProducer) (str
}
return did.String(), nil
}
+
+// IntoRawIdentity converts an `Identity` into a `RawIdentity`.
+func (identity Identity) IntoRawIdentity() RawIdentity {
+ return newRawIdentity(identity.PrivateKey, identity.PublicKey, identity.DID)
+}
+
+// UpdateToken updates the `BearerToken` field of the `Identity`.
+//
+// - duration: The [time.Duration] that this identity is valid for.
+// - audience: The audience that this identity is valid for. This is required
+// by the Defra http client. For example `github.com/sourcenetwork/defradb`
+// - authorizedAccount: An account that this identity is authorizing to make
+// SourceHub calls on behalf of this actor. This is currently required when
+// using SourceHub ACP.
+func (identity *Identity) UpdateToken(
+ duration time.Duration,
+ audience immutable.Option[string],
+ authorizedAccount immutable.Option[string],
+) error {
+ var signedToken []byte
+ subject := hex.EncodeToString(identity.PublicKey.SerializeCompressed())
+ now := time.Now()
+
+ jwtBuilder := jwt.NewBuilder()
+ jwtBuilder = jwtBuilder.Subject(subject)
+ jwtBuilder = jwtBuilder.Expiration(now.Add(duration))
+ jwtBuilder = jwtBuilder.NotBefore(now)
+ jwtBuilder = jwtBuilder.Issuer(identity.DID)
+ jwtBuilder = jwtBuilder.IssuedAt(now)
+
+ if audience.HasValue() {
+ jwtBuilder = jwtBuilder.Audience([]string{audience.Value()})
+ }
+
+ token, err := jwtBuilder.Build()
+ if err != nil {
+ return err
+ }
+
+ if authorizedAccount.HasValue() {
+ err = token.Set(acptypes.AuthorizedAccountClaim, authorizedAccount.Value())
+ if err != nil {
+ return err
+ }
+ }
+
+ signedToken, err = jwt.Sign(token, jwt.WithKey(BearerTokenSignatureScheme, identity.PrivateKey.ToECDSA()))
+ if err != nil {
+ return err
+ }
+
+ identity.BearerToken = string(signedToken)
+ return nil
+}
diff --git a/acp/identity/raw_identity.go b/acp/identity/raw_identity.go
new file mode 100644
index 0000000000..88beeb7b96
--- /dev/null
+++ b/acp/identity/raw_identity.go
@@ -0,0 +1,73 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package identity
+
+import (
+ "encoding/hex"
+
+ "github.com/decred/dcrd/dcrec/secp256k1/v4"
+)
+
+// RawIdentity holds the raw bytes that make up an actor's identity.
+type RawIdentity struct {
+ // PrivateKey is a secp256k1 private key that is a 256-bit big-endian
+ // binary-encoded number, padded to a length of 32 bytes in HEX format.
+ PrivateKey string
+
+ // PublicKey is a compressed 33-byte secp256k1 public key in HEX format.
+ PublicKey string
+
+ // DID is `did:key` key generated from the public key address.
+ DID string
+}
+
+// PublicRawIdentity holds the raw bytes that make up an actor's identity that can be shared publicly.
+type PublicRawIdentity struct {
+ // PublicKey is a compressed 33-byte secp256k1 public key in HEX format.
+ PublicKey string
+
+ // DID is `did:key` key generated from the public key address.
+ DID string
+}
+
+func newRawIdentity(privateKey *secp256k1.PrivateKey, publicKey *secp256k1.PublicKey, did string) RawIdentity {
+ res := RawIdentity{
+ PublicKey: hex.EncodeToString(publicKey.SerializeCompressed()),
+ DID: did,
+ }
+ if privateKey != nil {
+ res.PrivateKey = hex.EncodeToString(privateKey.Serialize())
+ }
+ return res
+}
+
+func (r RawIdentity) Public() PublicRawIdentity {
+ return PublicRawIdentity{
+ PublicKey: r.PublicKey,
+ DID: r.DID,
+ }
+}
+
+// IntoIdentity converts a RawIdentity into an Identity.
+func (r RawIdentity) IntoIdentity() (Identity, error) {
+ privateKeyBytes, err := hex.DecodeString(r.PrivateKey)
+ if err != nil {
+ return Identity{}, err
+ }
+
+ privateKey := secp256k1.PrivKeyFromBytes(privateKeyBytes)
+
+ return Identity{
+ PublicKey: privateKey.PubKey(),
+ PrivateKey: privateKey,
+ DID: r.DID,
+ }, nil
+}
diff --git a/cli/acp_relationship_add.go b/cli/acp_relationship_add.go
index 59b5c3cd32..c0838a2ce2 100644
--- a/cli/acp_relationship_add.go
+++ b/cli/acp_relationship_add.go
@@ -41,7 +41,7 @@ func MakeACPRelationshipAddCommand() *cobra.Command {
Long: `Add new relationship
To share a document (or grant a more restricted access) with another actor, we must add a relationship between the
-actor and the document. Inorder to make the relationship we require all of the following:
+actor and the document. In order to make the relationship we require all of the following:
1) Target DocID: The docID of the document we want to make a relationship for.
2) Collection Name: The name of the collection that has the Target DocID.
3) Relation Name: The type of relation (name must be defined within the linked policy on collection).
@@ -52,7 +52,7 @@ Notes:
- ACP must be available (i.e. ACP can not be disabled).
- The target document must be registered with ACP already (policy & resource specified).
- The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource.
- - If the specified relation was not granted the miminum DPI permissions (read or write) within the policy,
+ - If the specified relation was not granted the minimum DPI permissions (read or write) within the policy,
and a relationship is formed, the subject/actor will still not be able to access (read or write) the resource.
- Learn more about [ACP & DPI Rules](/acp/README.md)
@@ -64,7 +64,7 @@ Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502
--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \
--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
-Example: Creating a dummy relationship does nothing (from database prespective):
+Example: Creating a dummy relationship does nothing (from database perspective):
defradb client acp relationship add \
-c Users \
--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \
diff --git a/cli/acp_relationship_delete.go b/cli/acp_relationship_delete.go
index 7e0852e301..116ff49702 100644
--- a/cli/acp_relationship_delete.go
+++ b/cli/acp_relationship_delete.go
@@ -41,7 +41,7 @@ func MakeACPRelationshipDeleteCommand() *cobra.Command {
Long: `Delete relationship
To revoke access to a document for an actor, we must delete the relationship between the
-actor and the document. Inorder to delete the relationship we require all of the following:
+actor and the document. In order to delete the relationship we require all of the following:
1) Target DocID: The docID of the document we want to delete a relationship for.
2) Collection Name: The name of the collection that has the Target DocID.
diff --git a/cli/cli.go b/cli/cli.go
index f6950225a6..8f9d3fcbd1 100644
--- a/cli/cli.go
+++ b/cli/cli.go
@@ -121,6 +121,7 @@ func NewDefraCommand() *cobra.Command {
MakePurgeCommand(),
MakeDumpCommand(),
MakeRequestCommand(),
+ MakeNodeIdentityCommand(),
schema,
acp,
view,
diff --git a/cli/node_identity.go b/cli/node_identity.go
new file mode 100644
index 0000000000..d4e6c8969a
--- /dev/null
+++ b/cli/node_identity.go
@@ -0,0 +1,50 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package cli
+
+import (
+ "github.com/spf13/cobra"
+)
+
+func MakeNodeIdentityCommand() *cobra.Command {
+ var cmd = &cobra.Command{
+ Use: "node-identity",
+ Short: "Get the public information about the node's identity",
+ Long: `Get the public information about the node's identity.
+
+Node uses the identity to be able to exchange encryption keys with other nodes.
+
+A public identity contains:
+- A compressed 33-byte secp256k1 public key in HEX format.
+- A "did:key" generated from the public key.
+
+Example to get the identity of the node:
+ defradb client node-identity
+
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ db := mustGetContextDB(cmd)
+ identity, err := db.GetNodeIdentity(cmd.Context())
+ if err != nil {
+ return err
+ }
+
+ if identity.HasValue() {
+ return writeJSON(cmd, identity.Value())
+ }
+
+ out := cmd.OutOrStdout()
+ _, err = out.Write([]byte("Node has no identity assigned to it\n"))
+ return err
+ },
+ }
+ return cmd
+}
diff --git a/cli/start.go b/cli/start.go
index 168e2a525c..0bd1510008 100644
--- a/cli/start.go
+++ b/cli/start.go
@@ -15,9 +15,12 @@ import (
"os/signal"
"syscall"
+ "github.com/decred/dcrd/dcrec/secp256k1/v4"
"github.com/sourcenetwork/immutable"
"github.com/spf13/cobra"
+ "github.com/spf13/viper"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/crypto"
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/event"
@@ -39,6 +42,11 @@ const devModeBanner = `
`
+const developmentDescription = `Enables a set of features that make development easier but should not be enabled ` +
+ `in production:
+ - allows purging of all persisted data
+ - generates temporary node identity if keyring is disabled`
+
func MakeStartCommand() *cobra.Command {
var cmd = &cobra.Command{
Use: "start",
@@ -100,39 +108,21 @@ func MakeStartCommand() *cobra.Command {
if err != nil {
return err
}
- // load the required peer key or generate one if it doesn't exist
- peerKey, err := kr.Get(peerKeyName)
- if err != nil && errors.Is(err, keyring.ErrNotFound) {
- peerKey, err = crypto.GenerateEd25519()
- if err != nil {
- return err
- }
- err = kr.Set(peerKeyName, peerKey)
- if err != nil {
- return err
- }
- log.Info("generated peer key")
- } else if err != nil {
+ opts, err = getOrCreatePeerKey(kr, opts)
+ if err != nil {
return err
}
- opts = append(opts, net.WithPrivateKey(peerKey))
- // load the optional encryption key
- encryptionKey, err := kr.Get(encryptionKeyName)
- if err != nil && errors.Is(err, keyring.ErrNotFound) && !cfg.GetBool("datastore.noencryption") {
- encryptionKey, err = crypto.GenerateAES256()
- if err != nil {
- return err
- }
- err = kr.Set(encryptionKeyName, encryptionKey)
- if err != nil {
- return err
- }
- log.Info("generated encryption key")
- } else if err != nil && !errors.Is(err, keyring.ErrNotFound) {
+ opts, err = getOrCreateEncryptionKey(kr, cfg, opts)
+ if err != nil {
+ return err
+ }
+
+ opts, err = getOrCreateIdentity(kr, opts)
+ if err != nil {
return err
}
- opts = append(opts, node.WithBadgerEncryptionKey(encryptionKey))
+
// setup the sourcehub transaction signer
sourceHubKeyName := cfg.GetString("acp.sourceHub.KeyName")
if sourceHubKeyName != "" {
@@ -147,6 +137,15 @@ func MakeStartCommand() *cobra.Command {
isDevMode := cfg.GetBool("development")
if isDevMode {
cmd.Printf(devModeBanner)
+ if cfg.GetBool("keyring.disabled") {
+ var err error
+ // TODO: we want to persist this identity so we can restart the node with the same identity
+ // even in development mode. https://github.com/sourcenetwork/defradb/issues/3148
+ opts, err = addEphemeralIdentity(opts)
+ if err != nil {
+ return err
+ }
+ }
}
signalCh := make(chan os.Signal, 1)
@@ -244,7 +243,7 @@ func MakeStartCommand() *cobra.Command {
cmd.PersistentFlags().Bool(
"development",
cfg.GetBool(configFlags["development"]),
- "Enables a set of features that make development easier but should not be enabled in production",
+ developmentDescription,
)
cmd.Flags().Bool(
"no-encryption",
@@ -252,3 +251,79 @@ func MakeStartCommand() *cobra.Command {
"Skip generating an encryption key. Encryption at rest will be disabled. WARNING: This cannot be undone.")
return cmd
}
+
+func getOrCreateEncryptionKey(kr keyring.Keyring, cfg *viper.Viper, opts []node.Option) ([]node.Option, error) {
+ encryptionKey, err := kr.Get(encryptionKeyName)
+ if err != nil && errors.Is(err, keyring.ErrNotFound) && !cfg.GetBool("datastore.noencryption") {
+ encryptionKey, err = crypto.GenerateAES256()
+ if err != nil {
+ return nil, err
+ }
+ err = kr.Set(encryptionKeyName, encryptionKey)
+ if err != nil {
+ return nil, err
+ }
+ log.Info("generated encryption key")
+ } else if err != nil && !errors.Is(err, keyring.ErrNotFound) {
+ return nil, err
+ }
+ opts = append(opts, node.WithBadgerEncryptionKey(encryptionKey))
+ return opts, nil
+}
+
+func getOrCreatePeerKey(kr keyring.Keyring, opts []node.Option) ([]node.Option, error) {
+ peerKey, err := kr.Get(peerKeyName)
+ if err != nil && errors.Is(err, keyring.ErrNotFound) {
+ peerKey, err = crypto.GenerateEd25519()
+ if err != nil {
+ return nil, err
+ }
+ err = kr.Set(peerKeyName, peerKey)
+ if err != nil {
+ return nil, err
+ }
+ log.Info("generated peer key")
+ } else if err != nil {
+ return nil, err
+ }
+ return append(opts, net.WithPrivateKey(peerKey)), nil
+}
+
+func getOrCreateIdentity(kr keyring.Keyring, opts []node.Option) ([]node.Option, error) {
+ identityBytes, err := kr.Get(nodeIdentityKeyName)
+ if err != nil {
+ if !errors.Is(err, keyring.ErrNotFound) {
+ return nil, err
+ }
+ privateKey, err := crypto.GenerateSecp256k1()
+ if err != nil {
+ return nil, err
+ }
+ identityBytes := privateKey.Serialize()
+ err = kr.Set(nodeIdentityKeyName, identityBytes)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ nodeIdentity, err := identity.FromPrivateKey(secp256k1.PrivKeyFromBytes(identityBytes))
+ if err != nil {
+ return nil, err
+ }
+
+ return append(opts, db.WithNodeIdentity(nodeIdentity)), nil
+}
+
+func addEphemeralIdentity(opts []node.Option) ([]node.Option, error) {
+ privateKey, err := crypto.GenerateSecp256k1()
+ if err != nil {
+ return nil, err
+ }
+
+ nodeIdentity, err := identity.FromPrivateKey(secp256k1.PrivKeyFromBytes(privateKey.Serialize()))
+ if err != nil {
+ return nil, err
+ }
+
+ return append(opts, db.WithNodeIdentity(nodeIdentity)), nil
+}
diff --git a/cli/utils.go b/cli/utils.go
index fb9b5a6d3f..4e1cf14cc5 100644
--- a/cli/utils.go
+++ b/cli/utils.go
@@ -23,6 +23,7 @@ import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
+ "github.com/sourcenetwork/defradb/acp/identity"
acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/http"
@@ -31,8 +32,9 @@ import (
)
const (
- peerKeyName = "peer-key"
- encryptionKeyName = "encryption-key"
+ peerKeyName = "peer-key"
+ encryptionKeyName = "encryption-key"
+ nodeIdentityKeyName = "node-identity-key"
)
type contextKey string
@@ -163,18 +165,19 @@ func setContextIdentity(cmd *cobra.Command, privateKeyHex string) error {
}
privKey := secp256k1.PrivKeyFromBytes(data)
- identity, err := acpIdentity.FromPrivateKey(
- privKey,
+ ident, err := acpIdentity.FromPrivateKey(privKey)
+ if err != nil {
+ return err
+ }
+ err = ident.UpdateToken(
authTokenExpiration,
immutable.Some(cfg.GetString("api.address")),
- sourcehubAddress,
- false,
- )
+ sourcehubAddress)
if err != nil {
return err
}
- ctx := db.SetContextIdentity(cmd.Context(), immutable.Some(identity))
+ ctx := identity.WithContext(cmd.Context(), immutable.Some(ident))
cmd.SetContext(ctx)
return nil
}
@@ -185,11 +188,11 @@ func setContextRootDir(cmd *cobra.Command) error {
if err != nil {
return err
}
- home, err := os.UserHomeDir()
- if err != nil {
- return err
- }
if rootdir == "" {
+ home, err := os.UserHomeDir()
+ if err != nil {
+ return err
+ }
rootdir = filepath.Join(home, ".defradb")
}
ctx := context.WithValue(cmd.Context(), rootDirContextKey, rootdir)
diff --git a/client/db.go b/client/db.go
index 30f123d286..e8942e8501 100644
--- a/client/db.go
+++ b/client/db.go
@@ -19,6 +19,7 @@ import (
"github.com/lens-vm/lens/host-go/config/model"
"github.com/sourcenetwork/immutable"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/event"
)
@@ -135,6 +136,9 @@ type DB interface {
relation string,
targetActor string,
) (DeleteDocActorRelationshipResult, error)
+
+ // GetNodeIdentity returns the identity of the node.
+ GetNodeIdentity(context.Context) (immutable.Option[identity.PublicRawIdentity], error)
}
// Store contains the core DefraDB read-write operations.
diff --git a/client/mocks/db.go b/client/mocks/db.go
index 7925c3c850..63fa0b5950 100644
--- a/client/mocks/db.go
+++ b/client/mocks/db.go
@@ -13,6 +13,8 @@ import (
go_datastore "github.com/ipfs/go-datastore"
+ identity "github.com/sourcenetwork/defradb/acp/identity"
+
immutable "github.com/sourcenetwork/immutable"
mock "github.com/stretchr/testify/mock"
@@ -1049,6 +1051,62 @@ func (_c *DB_GetCollections_Call) RunAndReturn(run func(context.Context, client.
return _c
}
+// GetNodeIdentity provides a mock function with given fields: _a0
+func (_m *DB) GetNodeIdentity(_a0 context.Context) (immutable.Option[identity.PublicRawIdentity], error) {
+ ret := _m.Called(_a0)
+
+ if len(ret) == 0 {
+ panic("no return value specified for GetNodeIdentity")
+ }
+
+ var r0 immutable.Option[identity.PublicRawIdentity]
+ var r1 error
+ if rf, ok := ret.Get(0).(func(context.Context) (immutable.Option[identity.PublicRawIdentity], error)); ok {
+ return rf(_a0)
+ }
+ if rf, ok := ret.Get(0).(func(context.Context) immutable.Option[identity.PublicRawIdentity]); ok {
+ r0 = rf(_a0)
+ } else {
+ r0 = ret.Get(0).(immutable.Option[identity.PublicRawIdentity])
+ }
+
+ if rf, ok := ret.Get(1).(func(context.Context) error); ok {
+ r1 = rf(_a0)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
+// DB_GetNodeIdentity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetNodeIdentity'
+type DB_GetNodeIdentity_Call struct {
+ *mock.Call
+}
+
+// GetNodeIdentity is a helper method to define mock.On call
+// - _a0 context.Context
+func (_e *DB_Expecter) GetNodeIdentity(_a0 interface{}) *DB_GetNodeIdentity_Call {
+ return &DB_GetNodeIdentity_Call{Call: _e.mock.On("GetNodeIdentity", _a0)}
+}
+
+func (_c *DB_GetNodeIdentity_Call) Run(run func(_a0 context.Context)) *DB_GetNodeIdentity_Call {
+ _c.Call.Run(func(args mock.Arguments) {
+ run(args[0].(context.Context))
+ })
+ return _c
+}
+
+func (_c *DB_GetNodeIdentity_Call) Return(_a0 immutable.Option[identity.PublicRawIdentity], _a1 error) *DB_GetNodeIdentity_Call {
+ _c.Call.Return(_a0, _a1)
+ return _c
+}
+
+func (_c *DB_GetNodeIdentity_Call) RunAndReturn(run func(context.Context) (immutable.Option[identity.PublicRawIdentity], error)) *DB_GetNodeIdentity_Call {
+ _c.Call.Return(run)
+ return _c
+}
+
// GetSchemaByVersionID provides a mock function with given fields: _a0, _a1
func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.SchemaDescription, error) {
ret := _m.Called(_a0, _a1)
diff --git a/docs/website/references/cli/defradb_client.md b/docs/website/references/cli/defradb_client.md
index adbbb4eeaa..c23547e6ce 100644
--- a/docs/website/references/cli/defradb_client.md
+++ b/docs/website/references/cli/defradb_client.md
@@ -43,6 +43,7 @@ Execute queries, add schema types, obtain node info, etc.
* [defradb client collection](defradb_client_collection.md) - Interact with a collection.
* [defradb client dump](defradb_client_dump.md) - Dump the contents of DefraDB node-side
* [defradb client index](defradb_client_index.md) - Manage collections' indexes of a running DefraDB instance
+* [defradb client node-identity](defradb_client_node-identity.md) - Get the public information about the node's identity
* [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system
* [defradb client purge](defradb_client_purge.md) - Delete all persisted data and restart
* [defradb client query](defradb_client_query.md) - Send a DefraDB GraphQL query request
diff --git a/docs/website/references/cli/defradb_client_acp_relationship_add.md b/docs/website/references/cli/defradb_client_acp_relationship_add.md
index ba5647c163..1251ffb74e 100644
--- a/docs/website/references/cli/defradb_client_acp_relationship_add.md
+++ b/docs/website/references/cli/defradb_client_acp_relationship_add.md
@@ -7,7 +7,7 @@ Add new relationship
Add new relationship
To share a document (or grant a more restricted access) with another actor, we must add a relationship between the
-actor and the document. Inorder to make the relationship we require all of the following:
+actor and the document. In order to make the relationship we require all of the following:
1) Target DocID: The docID of the document we want to make a relationship for.
2) Collection Name: The name of the collection that has the Target DocID.
3) Relation Name: The type of relation (name must be defined within the linked policy on collection).
@@ -18,7 +18,7 @@ Notes:
- ACP must be available (i.e. ACP can not be disabled).
- The target document must be registered with ACP already (policy & resource specified).
- The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource.
- - If the specified relation was not granted the miminum DPI permissions (read or write) within the policy,
+ - If the specified relation was not granted the minimum DPI permissions (read or write) within the policy,
and a relationship is formed, the subject/actor will still not be able to access (read or write) the resource.
- Learn more about [ACP & DPI Rules](/acp/README.md)
@@ -30,7 +30,7 @@ Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502
--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \
--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
-Example: Creating a dummy relationship does nothing (from database prespective):
+Example: Creating a dummy relationship does nothing (from database perspective):
defradb client acp relationship add \
-c Users \
--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \
diff --git a/docs/website/references/cli/defradb_client_acp_relationship_delete.md b/docs/website/references/cli/defradb_client_acp_relationship_delete.md
index 501f5fb242..8da5e6a8ee 100644
--- a/docs/website/references/cli/defradb_client_acp_relationship_delete.md
+++ b/docs/website/references/cli/defradb_client_acp_relationship_delete.md
@@ -7,7 +7,7 @@ Delete relationship
Delete relationship
To revoke access to a document for an actor, we must delete the relationship between the
-actor and the document. Inorder to delete the relationship we require all of the following:
+actor and the document. In order to delete the relationship we require all of the following:
1) Target DocID: The docID of the document we want to delete a relationship for.
2) Collection Name: The name of the collection that has the Target DocID.
diff --git a/docs/website/references/cli/defradb_client_node-identity.md b/docs/website/references/cli/defradb_client_node-identity.md
new file mode 100644
index 0000000000..907a95990d
--- /dev/null
+++ b/docs/website/references/cli/defradb_client_node-identity.md
@@ -0,0 +1,55 @@
+## defradb client node-identity
+
+Get the public information about the node's identity
+
+### Synopsis
+
+Get the public information about the node's identity.
+
+Node uses the identity to be able to exchange encryption keys with other nodes.
+
+A public identity contains:
+- A compressed 33-byte secp256k1 public key in HEX format.
+- A "did:key" generated from the public key.
+
+Example to get the identity of the node:
+ defradb client node-identity
+
+
+
+```
+defradb client node-identity [flags]
+```
+
+### Options
+
+```
+ -h, --help help for node-identity
+```
+
+### Options inherited from parent commands
+
+```
+ -i, --identity string Hex formatted private key used to authenticate with ACP
+ --keyring-backend string Keyring backend to use. Options are file or system (default "file")
+ --keyring-namespace string Service name to use when using the system backend (default "defradb")
+ --keyring-path string Path to store encrypted keys when using the file backend (default "keys")
+ --log-format string Log format to use. Options are text or json (default "text")
+ --log-level string Log level to use. Options are debug, info, error, fatal (default "info")
+ --log-output string Log output path. Options are stderr or stdout. (default "stderr")
+ --log-overrides string Logger config overrides. Format ,=,...;,...
+ --log-source Include source location in logs
+ --log-stacktrace Include stacktrace in error and fatal logs
+ --no-keyring Disable the keyring and generate ephemeral keys
+ --no-log-color Disable colored log output
+ --rootdir string Directory for persistent data (default: $HOME/.defradb)
+ --secret-file string Path to the file containing secrets (default ".env")
+ --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor
+ --tx uint Transaction ID
+ --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181")
+```
+
+### SEE ALSO
+
+* [defradb client](defradb_client.md) - Interact with a DefraDB node
+
diff --git a/docs/website/references/cli/defradb_start.md b/docs/website/references/cli/defradb_start.md
index 79560ac62b..5aea7e8ed0 100644
--- a/docs/website/references/cli/defradb_start.md
+++ b/docs/website/references/cli/defradb_start.md
@@ -14,7 +14,9 @@ defradb start [flags]
```
--allowed-origins stringArray List of origins to allow for CORS requests
- --development Enables a set of features that make development easier but should not be enabled in production
+ --development Enables a set of features that make development easier but should not be enabled in production:
+ - allows purging of all persisted data
+ - generates temporary node identity if keyring is disabled
-h, --help help for start
--max-txn-retries int Specify the maximum number of retries per transaction (default 5)
--no-encryption Skip generating an encryption key. Encryption at rest will be disabled. WARNING: This cannot be undone.
diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json
index d4cfdc830c..a6795d6959 100644
--- a/docs/website/references/http/openapi.json
+++ b/docs/website/references/http/openapi.json
@@ -395,6 +395,17 @@
},
"type": "object"
},
+ "identity": {
+ "properties": {
+ "DID": {
+ "type": "string"
+ },
+ "PublicKey": {
+ "type": "string"
+ }
+ },
+ "type": "object"
+ },
"index": {
"properties": {
"Fields": {
@@ -1700,6 +1711,34 @@
]
}
},
+ "/node/identity": {
+ "get": {
+ "description": "Get node's public identity",
+ "operationId": "node_identity",
+ "responses": {
+ "200": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/identity"
+ }
+ }
+ },
+ "description": "Identity"
+ },
+ "400": {
+ "$ref": "#/components/responses/error"
+ },
+ "default": {
+ "description": ""
+ }
+ },
+ "tags": [
+ "node",
+ "identity"
+ ]
+ }
+ },
"/p2p/collections": {
"delete": {
"description": "Remove peer collections",
diff --git a/http/auth.go b/http/auth.go
index 0c2d6ae28d..79f4262252 100644
--- a/http/auth.go
+++ b/http/auth.go
@@ -19,7 +19,6 @@ import (
"github.com/sourcenetwork/immutable"
acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
- "github.com/sourcenetwork/defradb/internal/db"
)
const (
@@ -58,19 +57,19 @@ func AuthMiddleware(next http.Handler) http.Handler {
return
}
- identity, err := acpIdentity.FromToken([]byte(token))
+ ident, err := acpIdentity.FromToken([]byte(token))
if err != nil {
http.Error(rw, "forbidden", http.StatusForbidden)
return
}
- err = verifyAuthToken(identity, strings.ToLower(req.Host))
+ err = verifyAuthToken(ident, strings.ToLower(req.Host))
if err != nil {
http.Error(rw, "forbidden", http.StatusForbidden)
return
}
- ctx := db.SetContextIdentity(req.Context(), immutable.Some(identity))
+ ctx := acpIdentity.WithContext(req.Context(), immutable.Some(ident))
next.ServeHTTP(rw, req.WithContext(ctx))
})
}
diff --git a/http/auth_test.go b/http/auth_test.go
index 7e7489e862..365ed98c89 100644
--- a/http/auth_test.go
+++ b/http/auth_test.go
@@ -28,13 +28,10 @@ func TestVerifyAuthToken(t *testing.T) {
privKey, err := crypto.GenerateSecp256k1()
require.NoError(t, err)
- identity, err := acpIdentity.FromPrivateKey(
- privKey,
- time.Hour,
- immutable.Some(audience),
- immutable.None[string](),
- false,
- )
+ identity, err := acpIdentity.FromPrivateKey(privKey)
+ require.NoError(t, err)
+
+ err = identity.UpdateToken(time.Hour, immutable.Some(audience), immutable.None[string]())
require.NoError(t, err)
err = verifyAuthToken(identity, audience)
@@ -45,13 +42,10 @@ func TestVerifyAuthTokenErrorsWithNonMatchingAudience(t *testing.T) {
privKey, err := crypto.GenerateSecp256k1()
require.NoError(t, err)
- identity, err := acpIdentity.FromPrivateKey(
- privKey,
- time.Hour,
- immutable.Some("valid"),
- immutable.None[string](),
- false,
- )
+ identity, err := acpIdentity.FromPrivateKey(privKey)
+ require.NoError(t, err)
+
+ err = identity.UpdateToken(time.Hour, immutable.Some("valid"), immutable.None[string]())
require.NoError(t, err)
err = verifyAuthToken(identity, "invalid")
@@ -64,14 +58,11 @@ func TestVerifyAuthTokenErrorsWithExpired(t *testing.T) {
privKey, err := crypto.GenerateSecp256k1()
require.NoError(t, err)
- identity, err := acpIdentity.FromPrivateKey(
- privKey,
- // negative expiration
- -time.Hour,
- immutable.Some(audience),
- immutable.None[string](),
- false,
- )
+ identity, err := acpIdentity.FromPrivateKey(privKey)
+ require.NoError(t, err)
+
+ // negative expiration
+ err = identity.UpdateToken(-time.Hour, immutable.Some(audience), immutable.None[string]())
require.NoError(t, err)
err = verifyAuthToken(identity, "123abc")
diff --git a/http/client.go b/http/client.go
index ca43181c3c..2e57c017da 100644
--- a/http/client.go
+++ b/http/client.go
@@ -26,6 +26,7 @@ import (
"github.com/sourcenetwork/immutable"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/event"
@@ -508,3 +509,17 @@ func (c *Client) Events() *event.Bus {
func (c *Client) MaxTxnRetries() int {
panic("client side database")
}
+
+func (c *Client) GetNodeIdentity(ctx context.Context) (immutable.Option[identity.PublicRawIdentity], error) {
+ methodURL := c.http.baseURL.JoinPath("node", "identity")
+
+ req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil)
+ if err != nil {
+ return immutable.None[identity.PublicRawIdentity](), err
+ }
+ var ident immutable.Option[identity.PublicRawIdentity]
+ if err := c.http.requestJson(req, &ident); err != nil {
+ return immutable.None[identity.PublicRawIdentity](), err
+ }
+ return ident, err
+}
diff --git a/http/errors.go b/http/errors.go
index aa6d6537ac..f1e03d5882 100644
--- a/http/errors.go
+++ b/http/errors.go
@@ -29,17 +29,16 @@ const (
// This list is incomplete. Undefined errors may also be returned.
// Errors returned from this package may be tested against these errors with errors.Is.
var (
- ErrNoListener = errors.New("cannot serve with no listener")
- ErrNoEmail = errors.New("email address must be specified for tls with autocert")
- ErrInvalidRequestBody = errors.New("invalid request body")
- ErrStreamingNotSupported = errors.New("streaming not supported")
- ErrMigrationNotFound = errors.New("migration not found")
- ErrMissingRequest = errors.New("missing request")
- ErrInvalidTransactionId = errors.New("invalid transaction id")
- ErrP2PDisabled = errors.New("p2p network is disabled")
- ErrMethodIsNotImplemented = errors.New(errMethodIsNotImplemented)
- ErrMissingIdentityPrivateKey = errors.New("identity has no private key")
- ErrMissingIdentityPublicKey = errors.New("identity has no public key")
+ ErrNoListener = errors.New("cannot serve with no listener")
+ ErrNoEmail = errors.New("email address must be specified for tls with autocert")
+ ErrInvalidRequestBody = errors.New("invalid request body")
+ ErrStreamingNotSupported = errors.New("streaming not supported")
+ ErrMigrationNotFound = errors.New("migration not found")
+ ErrMissingRequest = errors.New("missing request")
+ ErrInvalidTransactionId = errors.New("invalid transaction id")
+ ErrP2PDisabled = errors.New("p2p network is disabled")
+ ErrMethodIsNotImplemented = errors.New(errMethodIsNotImplemented)
+ ErrMissingIdentity = errors.New("required identity is missing")
)
type errorResponse struct {
diff --git a/http/handler_store.go b/http/handler_store.go
index 86ab9aeb2d..35436f3762 100644
--- a/http/handler_store.go
+++ b/http/handler_store.go
@@ -342,6 +342,17 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) {
}
}
+func (s *storeHandler) GetNodeIdentity(rw http.ResponseWriter, req *http.Request) {
+ db := mustGetContextClientDB(req)
+
+ identity, err := db.GetNodeIdentity(req.Context())
+ if err != nil {
+ responseJSON(rw, http.StatusBadRequest, errorResponse{err})
+ return
+ }
+ responseJSON(rw, http.StatusOK, identity)
+}
+
func (h *storeHandler) bindRoutes(router *Router) {
successResponse := &openapi3.ResponseRef{
Ref: "#/components/responses/success",
@@ -373,6 +384,9 @@ func (h *storeHandler) bindRoutes(router *Router) {
patchSchemaRequestSchema := &openapi3.SchemaRef{
Ref: "#/components/schemas/patch_schema_request",
}
+ identitySchema := &openapi3.SchemaRef{
+ Ref: "#/components/schemas/identity",
+ }
graphQLResponseSchema := openapi3.NewObjectSchema().
WithProperties(map[string]*openapi3.Schema{
@@ -518,13 +532,13 @@ func (h *storeHandler) bindRoutes(router *Router) {
patchCollection.Responses.Set("200", successResponse)
patchCollection.Responses.Set("400", errorResponse)
- collectionDefintionsSchema := openapi3.NewArraySchema()
- collectionDefintionsSchema.Items = collectionDefinitionSchema
+ collectionDefinitionsSchema := openapi3.NewArraySchema()
+ collectionDefinitionsSchema.Items = collectionDefinitionSchema
addViewResponseSchema := openapi3.NewOneOfSchema()
addViewResponseSchema.OneOf = openapi3.SchemaRefs{
collectionDefinitionSchema,
- openapi3.NewSchemaRef("", collectionDefintionsSchema),
+ openapi3.NewSchemaRef("", collectionDefinitionsSchema),
}
addViewResponse := openapi3.NewResponse().
@@ -629,6 +643,17 @@ func (h *storeHandler) bindRoutes(router *Router) {
debugDump.Responses.Set("200", successResponse)
debugDump.Responses.Set("400", errorResponse)
+ identityResponse := openapi3.NewResponse().
+ WithDescription("Identity").
+ WithJSONSchemaRef(identitySchema)
+
+ nodeIdentity := openapi3.NewOperation()
+ nodeIdentity.OperationID = "node_identity"
+ nodeIdentity.Description = "Get node's public identity"
+ nodeIdentity.Tags = []string{"node", "identity"}
+ nodeIdentity.AddResponse(200, identityResponse)
+ nodeIdentity.Responses.Set("400", errorResponse)
+
router.AddRoute("/backup/export", http.MethodPost, backupExport, h.BasicExport)
router.AddRoute("/backup/import", http.MethodPost, backupImport, h.BasicImport)
router.AddRoute("/collections", http.MethodGet, collectionDescribe, h.GetCollection)
@@ -643,4 +668,5 @@ func (h *storeHandler) bindRoutes(router *Router) {
router.AddRoute("/schema", http.MethodGet, schemaDescribe, h.GetSchema)
router.AddRoute("/schema/default", http.MethodPost, setActiveSchemaVersion, h.SetActiveSchemaVersion)
router.AddRoute("/lens", http.MethodPost, setMigration, h.SetMigration)
+ router.AddRoute("/node/identity", http.MethodGet, nodeIdentity, h.GetNodeIdentity)
}
diff --git a/http/http_client.go b/http/http_client.go
index 5b7b75577d..aa020222c2 100644
--- a/http/http_client.go
+++ b/http/http_client.go
@@ -18,6 +18,7 @@ import (
"net/url"
"strings"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/internal/db"
)
@@ -48,7 +49,7 @@ func (c *httpClient) setDefaultHeaders(req *http.Request) error {
if ok {
req.Header.Set(txHeaderName, fmt.Sprintf("%d", txn.ID()))
}
- id := db.GetContextIdentity(req.Context())
+ id := identity.FromContext(req.Context())
if !id.HasValue() {
return nil
}
diff --git a/http/openapi.go b/http/openapi.go
index b3c82d1662..850ce081f4 100644
--- a/http/openapi.go
+++ b/http/openapi.go
@@ -15,6 +15,7 @@ import (
"github.com/getkin/kin-openapi/openapi3gen"
"github.com/libp2p/go-libp2p/core/peer"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
)
@@ -47,6 +48,7 @@ var openApiSchemas = map[string]any{
"acp_relationship_add_result": &client.AddDocActorRelationshipResult{},
"acp_relationship_delete_request": &deleteDocActorRelationshipRequest{},
"acp_relationship_delete_result": &client.DeleteDocActorRelationshipResult{},
+ "identity": &identity.PublicRawIdentity{},
}
func NewOpenAPISpec() (*openapi3.T, error) {
diff --git a/internal/db/backup_test.go b/internal/db/backup_test.go
index 033f95bcd7..b76a42bfd6 100644
--- a/internal/db/backup_test.go
+++ b/internal/db/backup_test.go
@@ -18,6 +18,7 @@ import (
"github.com/stretchr/testify/require"
+ "github.com/sourcenetwork/defradb/acp/identity"
acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
)
@@ -66,7 +67,7 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) {
require.NoError(t, err)
defer txn.Discard(ctx)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
filepath := t.TempDir() + "/test.json"
@@ -131,7 +132,7 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) {
require.NoError(t, err)
defer txn.Discard(ctx)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
filepath := t.TempDir() + "/test.json"
@@ -196,7 +197,7 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) {
require.NoError(t, err)
defer txn.Discard(ctx)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
filepath := t.TempDir() + "/test.json"
@@ -273,7 +274,7 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) {
require.NoError(t, err)
defer txn.Discard(ctx)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
filepath := t.TempDir() + "/test.json"
@@ -338,7 +339,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) {
require.NoError(t, err)
defer txn.Discard(ctx)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
filepath := t.TempDir() + "/test.json"
@@ -386,7 +387,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) {
txn, err := db.NewTxn(ctx, false)
require.NoError(t, err)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
filepath := t.TempDir() + "/test.json"
@@ -406,7 +407,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) {
txn, err = db.NewTxn(ctx, true)
require.NoError(t, err)
- ctx = SetContextIdentity(ctx, acpIdentity.None)
+ ctx = identity.WithContext(ctx, acpIdentity.None)
ctx = SetContextTxn(ctx, txn)
col1, err := db.getCollectionByName(ctx, "Address")
diff --git a/internal/db/collection_acp.go b/internal/db/collection_acp.go
index 9ca432f9aa..fa263c7aeb 100644
--- a/internal/db/collection_acp.go
+++ b/internal/db/collection_acp.go
@@ -14,6 +14,7 @@ import (
"context"
"github.com/sourcenetwork/defradb/acp"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/internal/db/permission"
)
@@ -36,10 +37,9 @@ func (c *collection) registerDocWithACP(
if !c.db.acp.HasValue() {
return nil
}
- identity := GetContextIdentity(ctx)
return permission.RegisterDocOnCollectionWithACP(
ctx,
- identity,
+ identity.FromContext(ctx),
c.db.acp.Value(),
c,
docID,
@@ -55,10 +55,9 @@ func (c *collection) checkAccessOfDocWithACP(
if !c.db.acp.HasValue() {
return true, nil
}
- identity := GetContextIdentity(ctx)
return permission.CheckAccessOfDocOnCollectionWithACP(
ctx,
- identity,
+ identity.FromContext(ctx),
c.db.acp.Value(),
c,
dpiPermission,
diff --git a/internal/db/collection_get.go b/internal/db/collection_get.go
index 87c8ccbca6..05e6d43308 100644
--- a/internal/db/collection_get.go
+++ b/internal/db/collection_get.go
@@ -13,6 +13,7 @@ package db
import (
"context"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
@@ -59,11 +60,10 @@ func (c *collection) get(
showDeleted bool,
) (*client.Document, error) {
txn := mustGetContextTxn(ctx)
- identity := GetContextIdentity(ctx)
// create a new document fetcher
df := c.newFetcher()
// initialize it with the primary index
- err := df.Init(ctx, identity, txn, c.db.acp, c, fields, nil, nil, false, showDeleted)
+ err := df.Init(ctx, identity.FromContext(ctx), txn, c.db.acp, c, fields, nil, nil, false, showDeleted)
if err != nil {
_ = df.Close()
return nil, err
diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go
index a0786eb8c8..eb2b1b8d4c 100644
--- a/internal/db/collection_index.go
+++ b/internal/db/collection_index.go
@@ -20,6 +20,7 @@ import (
"github.com/sourcenetwork/immutable"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/datastore"
@@ -297,12 +298,11 @@ func (c *collection) iterateAllDocs(
exec func(doc *client.Document) error,
) error {
txn := mustGetContextTxn(ctx)
- identity := GetContextIdentity(ctx)
df := c.newFetcher()
err := df.Init(
ctx,
- identity,
+ identity.FromContext(ctx),
txn,
c.db.acp,
c,
diff --git a/internal/db/collection_update.go b/internal/db/collection_update.go
index 2348095500..29619c48cc 100644
--- a/internal/db/collection_update.go
+++ b/internal/db/collection_update.go
@@ -16,6 +16,7 @@ import (
"github.com/sourcenetwork/immutable"
"github.com/valyala/fastjson"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/planner"
@@ -163,10 +164,9 @@ func (c *collection) makeSelectionPlan(
}
txn := mustGetContextTxn(ctx)
- identity := GetContextIdentity(ctx)
planner := planner.New(
ctx,
- identity,
+ identity.FromContext(ctx),
c.db.acp,
c.db,
txn,
diff --git a/internal/db/config.go b/internal/db/config.go
index f2fc942ae2..03fd9df7d9 100644
--- a/internal/db/config.go
+++ b/internal/db/config.go
@@ -14,6 +14,8 @@ import (
"time"
"github.com/sourcenetwork/immutable"
+
+ "github.com/sourcenetwork/defradb/acp/identity"
)
const (
@@ -24,6 +26,7 @@ const (
type dbOptions struct {
maxTxnRetries immutable.Option[int]
RetryIntervals []time.Duration
+ identity immutable.Option[identity.Identity]
}
// defaultOptions returns the default db options.
@@ -59,3 +62,9 @@ func WithRetryInterval(interval []time.Duration) Option {
}
}
}
+
+func WithNodeIdentity(ident identity.Identity) Option {
+ return func(opts *dbOptions) {
+ opts.identity = immutable.Some(ident)
+ }
+}
diff --git a/internal/db/context.go b/internal/db/context.go
index a2fa50507f..2af3d95a22 100644
--- a/internal/db/context.go
+++ b/internal/db/context.go
@@ -13,18 +13,12 @@ package db
import (
"context"
- "github.com/sourcenetwork/immutable"
-
- acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/datastore"
)
// txnContextKey is the key type for transaction context values.
type txnContextKey struct{}
-// identityContextKey is the key type for ACP identity context values.
-type identityContextKey struct{}
-
// explicitTxn is a transaction that is managed outside of a db operation.
type explicitTxn struct {
datastore.Txn
@@ -85,24 +79,3 @@ func TryGetContextTxn(ctx context.Context) (datastore.Txn, bool) {
func SetContextTxn(ctx context.Context, txn datastore.Txn) context.Context {
return context.WithValue(ctx, txnContextKey{}, txn)
}
-
-// GetContextIdentity returns the identity from the given context.
-//
-// If an identity does not exist `NoIdentity` is returned.
-func GetContextIdentity(ctx context.Context) immutable.Option[acpIdentity.Identity] {
- identity, ok := ctx.Value(identityContextKey{}).(acpIdentity.Identity)
- if ok {
- return immutable.Some(identity)
- }
- return acpIdentity.None
-}
-
-// SetContextTxn returns a new context with the identity value set.
-//
-// This will overwrite any previously set identity value.
-func SetContextIdentity(ctx context.Context, identity immutable.Option[acpIdentity.Identity]) context.Context {
- if identity.HasValue() {
- return context.WithValue(ctx, identityContextKey{}, identity.Value())
- }
- return context.WithValue(ctx, identityContextKey{}, nil)
-}
diff --git a/internal/db/db.go b/internal/db/db.go
index 1e52b16437..2e5363b94b 100644
--- a/internal/db/db.go
+++ b/internal/db/db.go
@@ -27,6 +27,7 @@ import (
"github.com/sourcenetwork/immutable"
"github.com/sourcenetwork/defradb/acp"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/errors"
@@ -75,6 +76,9 @@ type db struct {
// The ID of the last transaction created.
previousTxnID atomic.Uint64
+ // The identity of the current node
+ nodeIdentity immutable.Option[identity.Identity]
+
// Contains ACP if it exists
acp immutable.Option[acp.ACP]
@@ -140,6 +144,8 @@ func newDB(
db.maxTxnRetries = opts.maxTxnRetries
}
+ db.nodeIdentity = opts.identity
+
if lens != nil {
lens.Init(db)
}
@@ -208,11 +214,9 @@ func (db *db) AddPolicy(
return client.AddPolicyResult{}, client.ErrACPOperationButACPNotAvailable
}
- identity := GetContextIdentity(ctx)
-
policyID, err := db.acp.Value().AddPolicy(
ctx,
- identity.Value(),
+ identity.FromContext(ctx).Value(),
policy,
)
if err != nil {
@@ -243,15 +247,13 @@ func (db *db) AddDocActorRelationship(
return client.AddDocActorRelationshipResult{}, client.ErrACPOperationButCollectionHasNoPolicy
}
- identity := GetContextIdentity(ctx)
-
exists, err := db.acp.Value().AddDocActorRelationship(
ctx,
policyID,
resourceName,
docID,
relation,
- identity.Value(),
+ identity.FromContext(ctx).Value(),
targetActor,
)
@@ -283,15 +285,13 @@ func (db *db) DeleteDocActorRelationship(
return client.DeleteDocActorRelationshipResult{}, client.ErrACPOperationButCollectionHasNoPolicy
}
- identity := GetContextIdentity(ctx)
-
recordFound, err := db.acp.Value().DeleteDocActorRelationship(
ctx,
policyID,
resourceName,
docID,
relation,
- identity.Value(),
+ identity.FromContext(ctx).Value(),
targetActor,
)
@@ -302,6 +302,13 @@ func (db *db) DeleteDocActorRelationship(
return client.DeleteDocActorRelationshipResult{RecordFound: recordFound}, nil
}
+func (db *db) GetNodeIdentity(context.Context) (immutable.Option[identity.PublicRawIdentity], error) {
+ if db.nodeIdentity.HasValue() {
+ return immutable.Some(db.nodeIdentity.Value().IntoRawIdentity().Public()), nil
+ }
+ return immutable.None[identity.PublicRawIdentity](), nil
+}
+
// Initialize is called when a database is first run and creates all the db global meta data
// like Collection ID counters.
func (db *db) initialize(ctx context.Context) error {
diff --git a/internal/db/request.go b/internal/db/request.go
index 560e270d0b..611382d6c2 100644
--- a/internal/db/request.go
+++ b/internal/db/request.go
@@ -13,6 +13,7 @@ package db
import (
"context"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/internal/planner"
)
@@ -47,8 +48,7 @@ func (db *db) execRequest(ctx context.Context, request string, options *client.G
}
txn := mustGetContextTxn(ctx)
- identity := GetContextIdentity(ctx)
- planner := planner.New(ctx, identity, db.acp, db, txn)
+ planner := planner.New(ctx, identity.FromContext(ctx), db.acp, db, txn)
results, err := planner.RunRequest(ctx, parsedRequest)
if err != nil {
diff --git a/internal/db/subscriptions.go b/internal/db/subscriptions.go
index b876d6c90c..4b92b127fc 100644
--- a/internal/db/subscriptions.go
+++ b/internal/db/subscriptions.go
@@ -13,6 +13,7 @@ package db
import (
"context"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/event"
@@ -65,9 +66,8 @@ func (db *db) handleSubscription(ctx context.Context, r *request.Request) (<-cha
}
ctx := SetContextTxn(ctx, txn)
- identity := GetContextIdentity(ctx)
- p := planner.New(ctx, identity, db.acp, db, txn)
+ p := planner.New(ctx, identity.FromContext(ctx), db.acp, db, txn)
s := subRequest.ToSelect(evt.DocID, evt.Cid.String())
result, err := p.RunSelection(ctx, s)
diff --git a/internal/db/view.go b/internal/db/view.go
index 8fb54ccb24..9c1e5eaafd 100644
--- a/internal/db/view.go
+++ b/internal/db/view.go
@@ -20,6 +20,7 @@ import (
"github.com/lens-vm/lens/host-go/config/model"
"github.com/sourcenetwork/immutable"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
@@ -144,9 +145,8 @@ func (db *db) getViews(ctx context.Context, opts client.CollectionFetchOptions)
func (db *db) buildViewCache(ctx context.Context, col client.CollectionDefinition) (err error) {
txn := mustGetContextTxn(ctx)
- identity := GetContextIdentity(ctx)
- p := planner.New(ctx, identity, db.acp, db, txn)
+ p := planner.New(ctx, identity.FromContext(ctx), db.acp, db, txn)
// temporarily disable the cache in order to query without using it
col.Description.IsMaterialized = false
diff --git a/node/node.go b/node/node.go
index d5e62bc1bb..0a1b813862 100644
--- a/node/node.go
+++ b/node/node.go
@@ -136,14 +136,17 @@ func (n *Node) Start(ctx context.Context) error {
if err != nil {
return err
}
+
acp, err := NewACP(ctx, n.acpOpts...)
if err != nil {
return err
}
+
lens, err := NewLens(ctx, n.lensOpts...)
if err != nil {
return err
}
+
n.DB, err = db.NewDB(ctx, rootstore, acp, lens, n.dbOpts...)
if err != nil {
return err
diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go
index f468cec0f9..70ee022261 100644
--- a/tests/clients/cli/wrapper.go
+++ b/tests/clients/cli/wrapper.go
@@ -26,6 +26,7 @@ import (
"github.com/sourcenetwork/immutable"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/cli"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
@@ -550,7 +551,10 @@ func (w *Wrapper) MaxTxnRetries() int {
}
func (w *Wrapper) PrintDump(ctx context.Context) error {
- return w.node.DB.PrintDump(ctx)
+ args := []string{"dump"}
+
+ _, err := w.cmd.execute(ctx, args)
+ return err
}
func (w *Wrapper) Connect(ctx context.Context, addr peer.AddrInfo) error {
@@ -560,3 +564,17 @@ func (w *Wrapper) Connect(ctx context.Context, addr peer.AddrInfo) error {
func (w *Wrapper) Host() string {
return w.httpServer.URL
}
+
+func (w *Wrapper) GetNodeIdentity(ctx context.Context) (immutable.Option[identity.PublicRawIdentity], error) {
+ args := []string{"client", "node-identity"}
+
+ data, err := w.cmd.execute(ctx, args)
+ if err != nil {
+ return immutable.None[identity.PublicRawIdentity](), err
+ }
+ var res identity.PublicRawIdentity
+ if err := json.Unmarshal(data, &res); err != nil {
+ return immutable.None[identity.PublicRawIdentity](), err
+ }
+ return immutable.Some(res), nil
+}
diff --git a/tests/clients/cli/wrapper_cli.go b/tests/clients/cli/wrapper_cli.go
index 39e5ef6290..65865d4cbb 100644
--- a/tests/clients/cli/wrapper_cli.go
+++ b/tests/clients/cli/wrapper_cli.go
@@ -17,6 +17,7 @@ import (
"io"
"strings"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/cli"
"github.com/sourcenetwork/defradb/internal/db"
)
@@ -60,7 +61,7 @@ func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadC
if ok {
args = append(args, "--tx", fmt.Sprintf("%d", tx.ID()))
}
- id := db.GetContextIdentity(ctx)
+ id := identity.FromContext(ctx)
if id.HasValue() && id.Value().PrivateKey != nil {
args = append(args, "--identity", hex.EncodeToString(id.Value().PrivateKey.Serialize()))
args = append(args, "--source-hub-address", w.sourceHubAddress)
diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go
index 7438fa8cce..2e7b6885e0 100644
--- a/tests/clients/http/wrapper.go
+++ b/tests/clients/http/wrapper.go
@@ -20,6 +20,7 @@ import (
"github.com/sourcenetwork/immutable"
+ "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/event"
@@ -281,3 +282,7 @@ func (w *Wrapper) Connect(ctx context.Context, addr peer.AddrInfo) error {
func (w *Wrapper) Host() string {
return w.httpServer.URL
}
+
+func (w *Wrapper) GetNodeIdentity(ctx context.Context) (immutable.Option[identity.PublicRawIdentity], error) {
+ return w.client.GetNodeIdentity(ctx)
+}
diff --git a/tests/integration/acp.go b/tests/integration/acp.go
index f58f1963d8..8269245757 100644
--- a/tests/integration/acp.go
+++ b/tests/integration/acp.go
@@ -29,8 +29,6 @@ import (
"github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/require"
- acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
- "github.com/sourcenetwork/defradb/internal/db"
"github.com/sourcenetwork/defradb/keyring"
"github.com/sourcenetwork/defradb/node"
"github.com/sourcenetwork/defradb/tests/clients/cli"
@@ -90,7 +88,7 @@ type AddPolicy struct {
Policy string
// The policy creator identity, i.e. actor creating the policy.
- Identity immutable.Option[int]
+ Identity immutable.Option[identityRef]
// The expected policyID generated based on the Policy loaded in to the ACP system.
ExpectedPolicyID string
@@ -114,9 +112,7 @@ func addPolicyACP(
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
- nodeID := nodeIDs[index]
- identity := getIdentity(s, nodeID, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
+ ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeIDs[index])
policyResult, err := node.AddPolicy(ctx, action.Policy)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -162,14 +158,14 @@ type AddDocActorRelationship struct {
// The target public identity, i.e. the identity of the actor to tie the document's relation with.
//
- // This is a required field. To test the invalid usage of not having this arg, use -1 index.
- TargetIdentity int
+ // This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
+ TargetIdentity immutable.Option[identityRef]
// The requestor identity, i.e. identity of the actor creating the relationship.
// Note: This identity must either own or have managing access defined in the policy.
//
- // This is a required field. To test the invalid usage of not having this arg, use -1 index.
- RequestorIdentity int
+ // This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
+ RequestorIdentity immutable.Option[identityRef]
// Result returns true if it was a no-op due to existing before, and false if a new relationship was made.
ExpectedExistence bool
@@ -189,52 +185,14 @@ func addDocActorRelationshipACP(
for index, node := range nodes {
nodeID := nodeIDs[index]
- var collectionName string
- if action.CollectionID == -1 {
- collectionName = ""
- } else {
- collection := s.collections[nodeID][action.CollectionID]
- if !collection.Description().Name.HasValue() {
- require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
- }
- collectionName = collection.Description().Name.Value()
- }
-
- var docID string
- if action.DocID == -1 || action.CollectionID == -1 {
- docID = ""
- } else {
- docID = s.docIDs[action.CollectionID][action.DocID].String()
- }
-
- var targetIdentity string
- if action.TargetIdentity == -1 {
- targetIdentity = ""
- } else {
- optionalTargetIdentity := getIdentity(s, nodeID, immutable.Some(action.TargetIdentity))
- if !optionalTargetIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description)
- }
- targetIdentity = optionalTargetIdentity.Value().DID
- }
-
- var requestorIdentity immutable.Option[acpIdentity.Identity]
- if action.RequestorIdentity == -1 {
- requestorIdentity = acpIdentity.None
- } else {
- requestorIdentity = getIdentity(s, nodeID, immutable.Some(action.RequestorIdentity))
- if !requestorIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description)
- }
- }
- ctx := db.SetContextIdentity(s.ctx, requestorIdentity)
+ collectionName, docID := getCollectionAndDocInfo(s, action.CollectionID, action.DocID, nodeID)
exists, err := node.AddDocActorRelationship(
- ctx,
+ getContextWithIdentity(s.ctx, s, action.RequestorIdentity, nodeID),
collectionName,
docID,
action.Relation,
- targetIdentity,
+ getIdentityDID(s, action.TargetIdentity),
)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -280,14 +238,14 @@ type DeleteDocActorRelationship struct {
// The target public identity, i.e. the identity of the actor with whom the relationship is with.
//
- // This is a required field. To test the invalid usage of not having this arg, use -1 index.
- TargetIdentity int
+ // This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
+ TargetIdentity immutable.Option[identityRef]
// The requestor identity, i.e. identity of the actor deleting the relationship.
// Note: This identity must either own or have managing access defined in the policy.
//
- // This is a required field. To test the invalid usage of not having this arg, use -1 index.
- RequestorIdentity int
+ // This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
+ RequestorIdentity immutable.Option[identityRef]
// Result returns true if the relationship record was expected to be found and deleted,
// and returns false if no matching relationship record was found (no-op).
@@ -308,52 +266,14 @@ func deleteDocActorRelationshipACP(
for index, node := range nodes {
nodeID := nodeIDs[index]
- var collectionName string
- if action.CollectionID == -1 {
- collectionName = ""
- } else {
- collection := s.collections[nodeID][action.CollectionID]
- if !collection.Description().Name.HasValue() {
- require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
- }
- collectionName = collection.Description().Name.Value()
- }
-
- var docID string
- if action.DocID == -1 || action.CollectionID == -1 {
- docID = ""
- } else {
- docID = s.docIDs[action.CollectionID][action.DocID].String()
- }
-
- var targetIdentity string
- if action.TargetIdentity == -1 {
- targetIdentity = ""
- } else {
- optionalTargetIdentity := getIdentity(s, nodeID, immutable.Some(action.TargetIdentity))
- if !optionalTargetIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description)
- }
- targetIdentity = optionalTargetIdentity.Value().DID
- }
-
- var requestorIdentity immutable.Option[acpIdentity.Identity]
- if action.RequestorIdentity == -1 {
- requestorIdentity = acpIdentity.None
- } else {
- requestorIdentity = getIdentity(s, nodeID, immutable.Some(action.RequestorIdentity))
- if !requestorIdentity.HasValue() {
- require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description)
- }
- }
- ctx := db.SetContextIdentity(s.ctx, requestorIdentity)
+ collectionName, docID := getCollectionAndDocInfo(s, action.CollectionID, action.DocID, nodeID)
deleteDocActorRelationshipResult, err := node.DeleteDocActorRelationship(
- ctx,
+ getContextWithIdentity(s.ctx, s, action.RequestorIdentity, nodeID),
collectionName,
docID,
action.Relation,
- targetIdentity,
+ getIdentityDID(s, action.TargetIdentity),
)
expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError)
@@ -372,6 +292,23 @@ func deleteDocActorRelationshipACP(
}
}
+func getCollectionAndDocInfo(s *state, collectionID, docInd, nodeID int) (string, string) {
+ collectionName := ""
+ docID := ""
+ if collectionID != -1 {
+ collection := s.collections[nodeID][collectionID]
+ if !collection.Description().Name.HasValue() {
+ require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
+ }
+ collectionName = collection.Description().Name.Value()
+
+ if docInd != -1 {
+ docID = s.docIDs[collectionID][docInd].String()
+ }
+ }
+ return collectionName, docID
+}
+
func setupSourceHub(s *state) ([]node.ACPOpt, error) {
var isACPTest bool
for _, a := range s.testCase.Actions {
@@ -473,7 +410,7 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
return nil, err
}
- // The result is suffexed with a newline char so we must trim the whitespace
+ // The result is suffixed with a newline char so we must trim the whitespace
validatorAddress := strings.TrimSpace(string(out))
s.sourcehubAddress = validatorAddress
@@ -508,7 +445,7 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
// process involves finding free ports, dropping them, and then assigning them to the source hub node.
//
// We have to do this because source hub (cosmos) annoyingly does not support automatic port assignment
- // (appart from the p2p port which we just manage here for consistency).
+ // (apart from the p2p port which we just manage here for consistency).
//
// We need to lock before getting the ports, otherwise they may try and use the port we use for locking.
// We can only unlock after the source hub node has started and begun listening on the assigned ports.
@@ -581,7 +518,7 @@ cmdReaderLoop:
// can safely unlock here.
unlock()
}
- // This is guarenteed to be logged after the gRPC server has been spun up
+ // This is guaranteed to be logged after the gRPC server has been spun up
// so we can be sure that the lock has been unlocked.
if strings.Contains(line, "committed state") {
break cmdReaderLoop
@@ -648,66 +585,18 @@ func crossLock(port uint16) (func(), error) {
nil
}
-// Generate the keys using the index as the seed so that multiple
-// runs yield the same private key. This is important for stuff like
-// the change detector.
-func generateIdentity(s *state, seedIndex int, nodeIndex int) (acpIdentity.Identity, error) {
- var audience immutable.Option[string]
+func getNodeAudience(s *state, nodeIndex int) immutable.Option[string] {
+ if nodeIndex >= len(s.nodes) {
+ return immutable.None[string]()
+ }
switch client := s.nodes[nodeIndex].(type) {
case *http.Wrapper:
- audience = immutable.Some(strings.TrimPrefix(client.Host(), "http://"))
+ return immutable.Some(strings.TrimPrefix(client.Host(), "http://"))
case *cli.Wrapper:
- audience = immutable.Some(strings.TrimPrefix(client.Host(), "http://"))
- }
-
- source := rand.NewSource(int64(seedIndex))
- r := rand.New(source)
-
- privateKey, err := secp256k1.GeneratePrivateKeyFromRand(r)
- require.NoError(s.t, err)
-
- identity, err := acpIdentity.FromPrivateKey(
- privateKey,
- authTokenExpiration,
- audience,
- immutable.Some(s.sourcehubAddress),
- // Creating and signing the bearer token is slow, so we skip it if it not
- // required.
- !(acpType == SourceHubACPType || audience.HasValue()),
- )
-
- return identity, err
-}
-
-func getIdentity(s *state, nodeIndex int, index immutable.Option[int]) immutable.Option[acpIdentity.Identity] {
- if !index.HasValue() {
- return immutable.None[acpIdentity.Identity]()
+ return immutable.Some(strings.TrimPrefix(client.Host(), "http://"))
}
- if len(s.identities) <= nodeIndex {
- identities := make([][]acpIdentity.Identity, nodeIndex+1)
- copy(identities, s.identities)
- s.identities = identities
- }
- nodeIdentities := s.identities[nodeIndex]
-
- if len(nodeIdentities) <= index.Value() {
- identities := make([]acpIdentity.Identity, index.Value()+1)
- // Fill any empty identities up to the index.
- for i := range identities {
- if i < len(nodeIdentities) && nodeIdentities[i] != (acpIdentity.Identity{}) {
- identities[i] = nodeIdentities[i]
- continue
- }
- newIdentity, err := generateIdentity(s, i, nodeIndex)
- require.NoError(s.t, err)
- identities[i] = newIdentity
- }
- s.identities[nodeIndex] = identities
- return immutable.Some(identities[index.Value()])
- } else {
- return immutable.Some(nodeIdentities[index.Value()])
- }
+ return immutable.None[string]()
}
// testBuffer is a very simple, thread-safe (--race flag friendly), io.Writer
diff --git a/tests/integration/acp/add_policy/basic_test.go b/tests/integration/acp/add_policy/basic_test.go
index a96a073e5c..48aa649f3b 100644
--- a/tests/integration/acp/add_policy/basic_test.go
+++ b/tests/integration/acp/add_policy/basic_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_BasicYAML_ValidPolicyID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -64,7 +62,7 @@ func TestACP_AddPolicy_BasicJSON_ValidPolicyID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
{
diff --git a/tests/integration/acp/add_policy/with_empty_args_test.go b/tests/integration/acp/add_policy/with_empty_args_test.go
index 1af4a5c1f3..3c392117c7 100644
--- a/tests/integration/acp/add_policy/with_empty_args_test.go
+++ b/tests/integration/acp/add_policy/with_empty_args_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_EmptyPolicyData_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: "",
@@ -44,7 +42,7 @@ func TestACP_AddPolicy_EmptyPolicyCreator_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.None[int](),
+ Identity: testUtils.NoIdentity(),
Policy: `
name: test
@@ -83,7 +81,7 @@ func TestACP_AddPolicy_EmptyCreatorAndPolicyArgs_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.None[int](),
+ Identity: testUtils.NoIdentity(),
Policy: "",
diff --git a/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go
index 1db26e639e..9bf36b88dd 100644
--- a/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go
+++ b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_ExtraPermissionsAndExtraRelations_ValidPolicyID(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/add_policy/with_extra_perms_test.go b/tests/integration/acp/add_policy/with_extra_perms_test.go
index 963101356f..2de8d0da6a 100644
--- a/tests/integration/acp/add_policy/with_extra_perms_test.go
+++ b/tests/integration/acp/add_policy/with_extra_perms_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_ExtraPermissions_ValidPolicyID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
@@ -65,7 +63,7 @@ func TestACP_AddPolicy_ExtraDuplicatePermissions_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
diff --git a/tests/integration/acp/add_policy/with_extra_relations_test.go b/tests/integration/acp/add_policy/with_extra_relations_test.go
index f4bec6479c..36f7694e52 100644
--- a/tests/integration/acp/add_policy/with_extra_relations_test.go
+++ b/tests/integration/acp/add_policy/with_extra_relations_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_ExtraRelations_ValidPolicyID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
@@ -69,7 +67,7 @@ func TestACP_AddPolicy_ExtraDuplicateRelations_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
diff --git a/tests/integration/acp/add_policy/with_invalid_relations_test.go b/tests/integration/acp/add_policy/with_invalid_relations_test.go
index 1cf3d8315b..09a26a4685 100644
--- a/tests/integration/acp/add_policy/with_invalid_relations_test.go
+++ b/tests/integration/acp/add_policy/with_invalid_relations_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_NoRelations_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
@@ -60,7 +58,7 @@ func TestACP_AddPolicy_NoRelationsLabel_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
diff --git a/tests/integration/acp/add_policy/with_invalid_required_relation_test.go b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go
index 122d1011a5..12242298d4 100644
--- a/tests/integration/acp/add_policy/with_invalid_required_relation_test.go
+++ b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_MissingRequiredOwnerRelation_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
@@ -63,7 +61,7 @@ func TestACP_AddPolicy_DuplicateOwnerRelation_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
diff --git a/tests/integration/acp/add_policy/with_invalid_resource_test.go b/tests/integration/acp/add_policy/with_invalid_resource_test.go
index 79e627e888..1acf9cf8ca 100644
--- a/tests/integration/acp/add_policy/with_invalid_resource_test.go
+++ b/tests/integration/acp/add_policy/with_invalid_resource_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_OneResourceThatIsEmpty_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
diff --git a/tests/integration/acp/add_policy/with_managed_relation_test.go b/tests/integration/acp/add_policy/with_managed_relation_test.go
index bff8f86fb2..a41a8713a3 100644
--- a/tests/integration/acp/add_policy/with_managed_relation_test.go
+++ b/tests/integration/acp/add_policy/with_managed_relation_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -24,7 +22,7 @@ func TestACP_AddPolicy_WithRelationManagingOtherRelation_ValidPolicyID(t *testin
Description: "Test acp, where a relation is managing another relation, valid policy id",
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
diff --git a/tests/integration/acp/add_policy/with_multi_policies_test.go b/tests/integration/acp/add_policy/with_multi_policies_test.go
index e413a5872a..4ca02aeb64 100644
--- a/tests/integration/acp/add_policy/with_multi_policies_test.go
+++ b/tests/integration/acp/add_policy/with_multi_policies_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPolicies_ValidPolicyIDs(t *testing.T)
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
@@ -53,7 +51,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPolicies_ValidPolicyIDs(t *testing.T)
},
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: a policy
@@ -99,7 +97,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPoliciesInDifferentFmts_ValidPolicyID
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
{
@@ -134,7 +132,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPoliciesInDifferentFmts_ValidPolicyID
},
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test2
@@ -201,7 +199,7 @@ func TestACP_AddPolicy_AddDuplicatePolicyByOtherCreator_ValidPolicyIDs(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: policyUsedByBoth,
@@ -209,7 +207,7 @@ func TestACP_AddPolicy_AddDuplicatePolicyByOtherCreator_ValidPolicyIDs(t *testin
},
testUtils.AddPolicy{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Policy: policyUsedByBoth,
@@ -228,7 +226,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePolicies_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -256,7 +254,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePolicies_Error(t *testing.T) {
},
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -295,7 +293,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePoliciesDifferentFmts_ProducesDiffere
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -322,7 +320,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePoliciesDifferentFmts_ProducesDiffere
},
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
{
diff --git a/tests/integration/acp/add_policy/with_multiple_resources_test.go b/tests/integration/acp/add_policy/with_multiple_resources_test.go
index fed7ac9888..161e2d27fb 100644
--- a/tests/integration/acp/add_policy/with_multiple_resources_test.go
+++ b/tests/integration/acp/add_policy/with_multiple_resources_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_MultipleResources_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -80,7 +78,7 @@ func TestACP_AddPolicy_MultipleResourcesUsingRelationDefinedInOther_Error(t *tes
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -132,7 +130,7 @@ func TestACP_AddPolicy_SecondResourcesMissingRequiredOwner_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/add_policy/with_no_perms_test.go b/tests/integration/acp/add_policy/with_no_perms_test.go
index 7bd55bd9d0..57cec65ee7 100644
--- a/tests/integration/acp/add_policy/with_no_perms_test.go
+++ b/tests/integration/acp/add_policy/with_no_perms_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -33,7 +31,7 @@ func TestACP_AddPolicy_NoPermissionsOnlyOwner_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -68,7 +66,7 @@ func TestACP_AddPolicy_NoPermissionsMultiRelations_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -106,7 +104,7 @@ func TestACP_AddPolicy_NoPermissionsLabelOnlyOwner_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -139,7 +137,7 @@ func TestACP_AddPolicy_NoPermissionsLabelMultiRelations_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/add_policy/with_no_resources_test.go b/tests/integration/acp/add_policy/with_no_resources_test.go
index 5bcf3f141b..26e09a76b6 100644
--- a/tests/integration/acp/add_policy/with_no_resources_test.go
+++ b/tests/integration/acp/add_policy/with_no_resources_test.go
@@ -27,7 +27,7 @@ func TestACP_AddPolicy_NoResource_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -56,7 +56,7 @@ func TestACP_AddPolicy_NoResourceLabel_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -85,7 +85,7 @@ func TestACP_AddPolicy_PolicyWithOnlySpace_NameIsRequired(t *testing.T) {
}),
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: " ",
diff --git a/tests/integration/acp/add_policy/with_perm_expr_test.go b/tests/integration/acp/add_policy/with_perm_expr_test.go
index 2329fadfe9..b058a2e652 100644
--- a/tests/integration/acp/add_policy/with_perm_expr_test.go
+++ b/tests/integration/acp/add_policy/with_perm_expr_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinus_ValidID(t *testi
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -67,7 +65,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinusNoSpace_ValidID(t
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go
index 592c14e56d..2f3d20ee51 100644
--- a/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go
+++ b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_EmptyExpressionInPermission_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -66,7 +64,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbol_Error
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -107,7 +105,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbolNoSpac
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/add_policy/with_permissionless_owner_test.go b/tests/integration/acp/add_policy/with_permissionless_owner_test.go
index 84e76736be..822c56907e 100644
--- a/tests/integration/acp/add_policy/with_permissionless_owner_test.go
+++ b/tests/integration/acp/add_policy/with_permissionless_owner_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -32,7 +30,7 @@ func TestACP_AddPolicy_PermissionlessOwnerWrite_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -73,7 +71,7 @@ func TestACP_AddPolicy_PermissionlessOwnerRead_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -114,7 +112,7 @@ func TestACP_AddPolicy_PermissionlessOwnerReadWrite_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/add_policy/with_unused_relations_test.go b/tests/integration/acp/add_policy/with_unused_relations_test.go
index dd610150ee..2189048ab0 100644
--- a/tests/integration/acp/add_policy/with_unused_relations_test.go
+++ b/tests/integration/acp/add_policy/with_unused_relations_test.go
@@ -13,8 +13,6 @@ package test_acp_add_policy
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -25,7 +23,7 @@ func TestACP_AddPolicy_UnusedRelation_ValidID(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/index/create_test.go b/tests/integration/acp/index/create_test.go
index da9e9b96b9..8a3388878c 100644
--- a/tests/integration/acp/index/create_test.go
+++ b/tests/integration/acp/index/create_test.go
@@ -13,8 +13,6 @@ package test_acp_index
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -24,7 +22,7 @@ func TestACP_IndexCreateWithSeparateRequest_OnCollectionWithPolicy_NoError(t *te
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: userPolicy,
ExpectedPolicyID: "94eb195c0e459aa79e02a1986c7e731c5015721c18a373f2b2a0ed140a04b454",
},
@@ -72,7 +70,7 @@ func TestACP_IndexCreateWithDirective_OnCollectionWithPolicy_NoError(t *testing.
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: userPolicy,
ExpectedPolicyID: "94eb195c0e459aa79e02a1986c7e731c5015721c18a373f2b2a0ed140a04b454",
},
diff --git a/tests/integration/acp/index/query_test.go b/tests/integration/acp/index/query_test.go
index 3fafeb4b10..06edc45065 100644
--- a/tests/integration/acp/index/query_test.go
+++ b/tests/integration/acp/index/query_test.go
@@ -13,8 +13,6 @@ package test_acp_index
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -23,7 +21,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithoutIdentity_ShouldNotFetch(t *te
Description: "Test acp, querying private doc without identity should not fetch",
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: userPolicy,
ExpectedPolicyID: "94eb195c0e459aa79e02a1986c7e731c5015721c18a373f2b2a0ed140a04b454",
},
@@ -46,7 +44,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithoutIdentity_ShouldNotFetch(t *te
`,
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
"name": "Islam"
@@ -77,7 +75,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithIdentity_ShouldFetch(t *testing.
Description: "Test acp, querying private doc with identity should fetch",
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: userPolicy,
ExpectedPolicyID: "94eb195c0e459aa79e02a1986c7e731c5015721c18a373f2b2a0ed140a04b454",
},
@@ -100,7 +98,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithIdentity_ShouldFetch(t *testing.
`,
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
"name": "Islam"
@@ -108,7 +106,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithIdentity_ShouldFetch(t *testing.
`,
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Users {
@@ -137,7 +135,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithWrongIdentity_ShouldNotFetch(t *
Description: "Test acp, querying private doc with wrong identity should not fetch",
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: userPolicy,
ExpectedPolicyID: "94eb195c0e459aa79e02a1986c7e731c5015721c18a373f2b2a0ed140a04b454",
},
@@ -160,7 +158,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithWrongIdentity_ShouldNotFetch(t *
`,
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
"name": "Islam"
@@ -168,7 +166,7 @@ func TestACPWithIndex_UponQueryingPrivateDocWithWrongIdentity_ShouldNotFetch(t *
`,
},
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Users {
diff --git a/tests/integration/acp/index/query_with_relation_test.go b/tests/integration/acp/index/query_with_relation_test.go
index 5a406e6575..01a09db435 100644
--- a/tests/integration/acp/index/query_with_relation_test.go
+++ b/tests/integration/acp/index/query_with_relation_test.go
@@ -13,15 +13,13 @@ package test_acp_index
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
func createAuthorBooksSchemaWithPolicyAndCreateDocs() []any {
return []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: bookAuthorPolicy,
ExpectedPolicyID: "f6927e8861f91122a5e3e333249297e4315b672298b5cb93ee3f49facc1e0d11",
},
@@ -56,7 +54,7 @@ func createAuthorBooksSchemaWithPolicyAndCreateDocs() []any {
}`,
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
// bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04
Doc: `{
@@ -74,7 +72,7 @@ func createAuthorBooksSchemaWithPolicyAndCreateDocs() []any {
},
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 1,
DocMap: map[string]any{
"name": "A Time for Mercy",
@@ -83,7 +81,7 @@ func createAuthorBooksSchemaWithPolicyAndCreateDocs() []any {
},
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 1,
DocMap: map[string]any{
"name": "Theif Lord",
@@ -136,7 +134,7 @@ func TestACPWithIndex_UponQueryingPrivateOneToManyRelatedDocWithIdentity_ShouldF
Actions: []any{
createAuthorBooksSchemaWithPolicyAndCreateDocs(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Author(filter: {
@@ -184,7 +182,7 @@ func TestACPWithIndex_UponQueryingPrivateOneToManyRelatedDocWithWrongIdentity_Sh
Actions: []any{
createAuthorBooksSchemaWithPolicyAndCreateDocs(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Author(filter: {
@@ -255,7 +253,7 @@ func TestACPWithIndex_UponQueryingPrivateManyToOneRelatedDocWithIdentity_ShouldF
Actions: []any{
createAuthorBooksSchemaWithPolicyAndCreateDocs(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Book(filter: {
@@ -302,7 +300,7 @@ func TestACPWithIndex_UponQueryingPrivateManyToOneRelatedDocWithWrongIdentity_Sh
Actions: []any{
createAuthorBooksSchemaWithPolicyAndCreateDocs(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Book(filter: {
diff --git a/tests/integration/acp/p2p/create_test.go b/tests/integration/acp/p2p/create_test.go
index 8775a553d7..db3d5a4508 100644
--- a/tests/integration/acp/p2p/create_test.go
+++ b/tests/integration/acp/p2p/create_test.go
@@ -39,7 +39,7 @@ func TestACP_P2PCreatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -103,7 +103,7 @@ func TestACP_P2PCreatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -115,7 +115,7 @@ func TestACP_P2PCreatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(1),
diff --git a/tests/integration/acp/p2p/delete_test.go b/tests/integration/acp/p2p/delete_test.go
index 59cae4cde9..2f45fbcf43 100644
--- a/tests/integration/acp/p2p/delete_test.go
+++ b/tests/integration/acp/p2p/delete_test.go
@@ -39,7 +39,7 @@ func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -108,7 +108,7 @@ func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -120,7 +120,7 @@ func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(1),
@@ -134,7 +134,7 @@ func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
testUtils.WaitForSync{},
testUtils.DeleteDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -144,7 +144,7 @@ func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.DeleteDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(1),
diff --git a/tests/integration/acp/p2p/replicator_test.go b/tests/integration/acp/p2p/replicator_test.go
index 8afaafebc0..2f7f69ceca 100644
--- a/tests/integration/acp/p2p/replicator_test.go
+++ b/tests/integration/acp/p2p/replicator_test.go
@@ -29,7 +29,7 @@ func TestACP_P2POneToOneReplicatorWithPermissionedCollection_LocalACP(t *testing
testUtils.RandomNetworkingConfig(),
testUtils.RandomNetworkingConfig(),
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
description: a test policy which marks a collection in a database as a resource
@@ -93,7 +93,7 @@ func TestACP_P2POneToOneReplicatorWithPermissionedCollection_SourceHubACP(t *tes
testUtils.RandomNetworkingConfig(),
testUtils.RandomNetworkingConfig(),
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
description: a test policy which marks a collection in a database as a resource
@@ -141,7 +141,7 @@ func TestACP_P2POneToOneReplicatorWithPermissionedCollection_SourceHubACP(t *tes
},
testUtils.CreateDoc{
NodeID: immutable.Some(0),
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocMap: map[string]any{
"name": "John",
},
@@ -149,7 +149,7 @@ func TestACP_P2POneToOneReplicatorWithPermissionedCollection_SourceHubACP(t *tes
testUtils.WaitForSync{},
testUtils.Request{
// Ensure that the document is accessible on all nodes to authorized actors
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Users {
@@ -180,7 +180,7 @@ func TestACP_P2POneToOneReplicatorWithPermissionedCollection_SourceHubACP(t *tes
},
testUtils.Request{
// Ensure that the document is hidden on all nodes to unauthorized actors
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Users {
diff --git a/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go b/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go
index cdefe70a46..46d01c8616 100644
--- a/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go
+++ b/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go
@@ -39,7 +39,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -109,7 +109,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -124,7 +124,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.Request{
// Ensure that the document is hidden on all nodes to an unauthorized actor
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -142,9 +142,9 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.AddDocActorRelationship{
NodeID: immutable.Some(0),
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -158,9 +158,9 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.AddDocActorRelationship{
NodeID: immutable.Some(1), // Note: Different node than the previous
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -173,7 +173,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.Request{
// Ensure that the document is now accessible on all nodes to the newly authorized actor.
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -194,7 +194,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.Request{
// Ensure that the document is still accessible on all nodes to the owner.
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -216,9 +216,9 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.DeleteDocActorRelationship{
NodeID: immutable.Some(1),
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -232,9 +232,9 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.DeleteDocActorRelationship{
NodeID: immutable.Some(0), // Note: Different node than the previous
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -247,7 +247,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.Request{
// Ensure that the document is now inaccessible on all nodes to the actor we revoked access from.
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -264,7 +264,7 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S
testUtils.Request{
// Ensure that the document is still accessible on all nodes to the owner.
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
diff --git a/tests/integration/acp/p2p/subscribe_test.go b/tests/integration/acp/p2p/subscribe_test.go
index e95fbaca97..e776ae4fb2 100644
--- a/tests/integration/acp/p2p/subscribe_test.go
+++ b/tests/integration/acp/p2p/subscribe_test.go
@@ -32,7 +32,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_LocalACP(t *test
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -110,7 +110,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_SourceHubACP(t *
testUtils.RandomNetworkingConfig(),
testUtils.RandomNetworkingConfig(),
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
description: a test policy which marks a collection in a database as a resource
@@ -163,7 +163,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_SourceHubACP(t *
},
testUtils.CreateDoc{
NodeID: immutable.Some(0),
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocMap: map[string]any{
"name": "John",
},
@@ -171,7 +171,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_SourceHubACP(t *
testUtils.WaitForSync{},
testUtils.Request{
// Ensure that the document is accessible on all nodes to authorized actors
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Users {
@@ -202,7 +202,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_SourceHubACP(t *
},
testUtils.Request{
// Ensure that the document is hidden on all nodes to unauthorized actors
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Users {
diff --git a/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go b/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go
index b9f3f8edd3..52038b8d5b 100644
--- a/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go
+++ b/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go
@@ -39,7 +39,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -115,7 +115,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -130,7 +130,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.Request{
// Ensure that the document is hidden on all nodes to an unauthorized actor
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -148,9 +148,9 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.AddDocActorRelationship{
NodeID: immutable.Some(0),
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -164,9 +164,9 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.AddDocActorRelationship{
NodeID: immutable.Some(1), // Note: Different node than the previous
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -179,7 +179,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.Request{
// Ensure that the document is now accessible on all nodes to the newly authorized actor.
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -200,7 +200,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.Request{
// Ensure that the document is still accessible on all nodes to the owner.
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -222,9 +222,9 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.DeleteDocActorRelationship{
NodeID: immutable.Some(1),
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -238,9 +238,9 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.DeleteDocActorRelationship{
NodeID: immutable.Some(0), // Note: Different node than the previous
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -253,7 +253,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.Request{
// Ensure that the document is now inaccessible on all nodes to the actor we revoked access from.
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -270,7 +270,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel
testUtils.Request{
// Ensure that the document is still accessible on all nodes to the owner.
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
diff --git a/tests/integration/acp/p2p/update_test.go b/tests/integration/acp/p2p/update_test.go
index 339babee10..df26fe4e9a 100644
--- a/tests/integration/acp/p2p/update_test.go
+++ b/tests/integration/acp/p2p/update_test.go
@@ -39,7 +39,7 @@ func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -108,7 +108,7 @@ func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -120,7 +120,7 @@ func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(1),
@@ -134,7 +134,7 @@ func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
testUtils.WaitForSync{},
testUtils.UpdateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(0),
@@ -150,7 +150,7 @@ func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T
},
testUtils.UpdateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
NodeID: immutable.Some(1),
diff --git a/tests/integration/acp/query/avg_test.go b/tests/integration/acp/query/avg_test.go
index 34b03de6ea..f7804a3474 100644
--- a/tests/integration/acp/query/avg_test.go
+++ b/tests/integration/acp/query/avg_test.go
@@ -13,8 +13,6 @@ package test_acp
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -50,7 +48,7 @@ func TestACP_QueryAverageWithIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
_avg(Employee: {field: salary})
@@ -75,7 +73,7 @@ func TestACP_QueryAverageWithWrongIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
_avg(Employee: {field: salary})
diff --git a/tests/integration/acp/query/count_test.go b/tests/integration/acp/query/count_test.go
index e5f867b3d5..1b0e450148 100644
--- a/tests/integration/acp/query/count_test.go
+++ b/tests/integration/acp/query/count_test.go
@@ -13,8 +13,6 @@ package test_acp
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -79,7 +77,7 @@ func TestACP_QueryCountDocumentsWithIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
_count(Employee: {})
@@ -103,7 +101,7 @@ func TestACP_QueryCountRelatedObjectsWithIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Company {
@@ -136,7 +134,7 @@ func TestACP_QueryCountDocumentsWithWrongIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
_count(Employee: {})
@@ -160,7 +158,7 @@ func TestACP_QueryCountRelatedObjectsWithWrongIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Company {
diff --git a/tests/integration/acp/query/fixture.go b/tests/integration/acp/query/fixture.go
index d526a218d3..7b9394cd27 100644
--- a/tests/integration/acp/query/fixture.go
+++ b/tests/integration/acp/query/fixture.go
@@ -11,8 +11,6 @@
package test_acp
import (
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -58,7 +56,7 @@ resources:
func getSetupEmployeeCompanyActions() []any {
return []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: employeeCompanyPolicy,
ExpectedPolicyID: "9d6c19007a894746c3f45f7fe45513a88a20ad77637948228869546197bb1b05",
},
@@ -96,7 +94,7 @@ func getSetupEmployeeCompanyActions() []any {
},
testUtils.CreateDoc{
CollectionID: 1,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
"name": "Private Company",
@@ -122,7 +120,7 @@ func getSetupEmployeeCompanyActions() []any {
},
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocMap: map[string]any{
"name": "PrivateEmp in PubCompany",
"salary": 30000,
@@ -131,7 +129,7 @@ func getSetupEmployeeCompanyActions() []any {
},
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocMap: map[string]any{
"name": "PrivateEmp in PrivateCompany",
"salary": 40000,
diff --git a/tests/integration/acp/query/relation_objects_test.go b/tests/integration/acp/query/relation_objects_test.go
index afbc014c08..eed0ff7351 100644
--- a/tests/integration/acp/query/relation_objects_test.go
+++ b/tests/integration/acp/query/relation_objects_test.go
@@ -13,8 +13,6 @@ package test_acp
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -98,7 +96,7 @@ func TestACP_QueryManyToOneRelationObjectsWithIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Employee {
@@ -144,7 +142,7 @@ func TestACP_QueryOneToManyRelationObjectsWithIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
Company {
@@ -188,7 +186,7 @@ func TestACP_QueryManyToOneRelationObjectsWithWrongIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Employee {
@@ -226,7 +224,7 @@ func TestACP_QueryOneToManyRelationObjectsWithWrongIdentity(t *testing.T) {
getSetupEmployeeCompanyActions(),
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
Company {
diff --git a/tests/integration/acp/register_and_delete_test.go b/tests/integration/acp/register_and_delete_test.go
index e30388bd76..4c4dead596 100644
--- a/tests/integration/acp/register_and_delete_test.go
+++ b/tests/integration/acp/register_and_delete_test.go
@@ -13,8 +13,6 @@ package test_acp
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithoutIdentity_CanDelete(t *testing.
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -126,7 +124,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T)
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -186,7 +184,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T)
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocID: 0,
},
@@ -219,7 +217,7 @@ func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -268,7 +266,7 @@ func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) {
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -281,13 +279,13 @@ func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) {
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -316,7 +314,7 @@ func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing.
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -365,7 +363,7 @@ func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing.
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -384,7 +382,7 @@ func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing.
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -419,7 +417,7 @@ func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -468,7 +466,7 @@ func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testin
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -481,7 +479,7 @@ func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testin
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -489,7 +487,7 @@ func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testin
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
diff --git a/tests/integration/acp/register_and_read_test.go b/tests/integration/acp/register_and_read_test.go
index 83d0819f54..62d041d061 100644
--- a/tests/integration/acp/register_and_read_test.go
+++ b/tests/integration/acp/register_and_read_test.go
@@ -13,8 +13,6 @@ package test_acp
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -26,7 +24,7 @@ func TestACP_CreateWithoutIdentityAndReadWithoutIdentity_CanRead(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -117,7 +115,7 @@ func TestACP_CreateWithoutIdentityAndReadWithIdentity_CanRead(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -175,7 +173,7 @@ func TestACP_CreateWithoutIdentityAndReadWithIdentity_CanRead(t *testing.T) {
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -210,7 +208,7 @@ func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -259,7 +257,7 @@ func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) {
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -270,7 +268,7 @@ func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) {
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -305,7 +303,7 @@ func TestACP_CreateWithIdentityAndReadWithoutIdentity_CanNotRead(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -354,7 +352,7 @@ func TestACP_CreateWithIdentityAndReadWithoutIdentity_CanNotRead(t *testing.T) {
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -392,7 +390,7 @@ func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T)
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -441,7 +439,7 @@ func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T)
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -452,7 +450,7 @@ func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T)
},
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
diff --git a/tests/integration/acp/register_and_update_test.go b/tests/integration/acp/register_and_update_test.go
index 4bf39a0508..4cbb186e19 100644
--- a/tests/integration/acp/register_and_update_test.go
+++ b/tests/integration/acp/register_and_update_test.go
@@ -30,7 +30,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithoutIdentity_CanUpdate(t *testing.
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -138,7 +138,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T)
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -198,7 +198,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T)
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocID: 0,
@@ -243,7 +243,7 @@ func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -292,7 +292,7 @@ func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) {
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -305,7 +305,7 @@ func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) {
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
DocID: 0,
@@ -317,7 +317,7 @@ func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) {
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -358,7 +358,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing.
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -407,7 +407,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing.
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -432,7 +432,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing.
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -473,7 +473,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -522,7 +522,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testin
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -535,7 +535,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testin
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -549,7 +549,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testin
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -591,7 +591,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testi
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -640,7 +640,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testi
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -665,7 +665,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testi
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -707,7 +707,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *tes
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -756,7 +756,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *tes
testUtils.CreateDoc{
CollectionID: 0,
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Doc: `
{
@@ -769,7 +769,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *tes
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -783,7 +783,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *tes
},
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/add/invalid_test.go b/tests/integration/acp/relationship/doc_actor/add/invalid_test.go
index d9f96d9c21..0742151493 100644
--- a/tests/integration/acp/relationship/doc_actor/add/invalid_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/invalid_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_AddDocActorRelationshipMissingDocID_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_AddDocActorRelationshipMissingDocID_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,9 +104,9 @@ func TestACP_AddDocActorRelationshipMissingDocID_Error(t *testing.T) {
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -134,7 +132,7 @@ func TestACP_AddDocActorRelationshipMissingCollection_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -198,7 +196,7 @@ func TestACP_AddDocActorRelationshipMissingCollection_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -211,9 +209,9 @@ func TestACP_AddDocActorRelationshipMissingCollection_Error(t *testing.T) {
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: -1,
@@ -239,7 +237,7 @@ func TestACP_AddDocActorRelationshipMissingRelationName_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -303,7 +301,7 @@ func TestACP_AddDocActorRelationshipMissingRelationName_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -316,9 +314,9 @@ func TestACP_AddDocActorRelationshipMissingRelationName_Error(t *testing.T) {
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -344,7 +342,7 @@ func TestACP_AddDocActorRelationshipMissingTargetActorName_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -408,7 +406,7 @@ func TestACP_AddDocActorRelationshipMissingTargetActorName_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -421,9 +419,9 @@ func TestACP_AddDocActorRelationshipMissingTargetActorName_Error(t *testing.T) {
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: -1,
+ TargetIdentity: testUtils.NoIdentity(),
CollectionID: 0,
@@ -449,7 +447,7 @@ func TestACP_AddDocActorRelationshipMissingReqestingIdentityName_Error(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -513,7 +511,7 @@ func TestACP_AddDocActorRelationshipMissingReqestingIdentityName_Error(t *testin
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -526,9 +524,9 @@ func TestACP_AddDocActorRelationshipMissingReqestingIdentityName_Error(t *testin
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: -1,
+ RequestorIdentity: testUtils.NoIdentity(),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_delete_test.go b/tests/integration/acp/relationship/doc_actor/add/with_delete_test.go
index c87c3c0a8f..b75fb41ef3 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_delete_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_delete_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelations
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,7 +104,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -126,7 +124,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelations
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not delete yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not delete yet.
DocID: 0,
@@ -134,9 +132,9 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -148,9 +146,9 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -176,7 +174,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -240,7 +238,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -253,7 +251,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -273,7 +271,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not delete yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not delete yet.
DocID: 0,
@@ -281,9 +279,9 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -295,7 +293,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now read.
Request: `
query {
@@ -321,13 +319,13 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *te
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now delete.
+ Identity: testUtils.ClientIdentity(2), // This identity can now delete.
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(2), // Check if actually deleted.
+ Identity: testUtils.ClientIdentity(2), // Check if actually deleted.
Request: `
query {
@@ -359,7 +357,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDeleteSoCanT
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -423,7 +421,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDeleteSoCanT
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -436,9 +434,9 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDeleteSoCanT
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -450,7 +448,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDeleteSoCanT
},
testUtils.Request{
- Identity: immutable.Some(1), // Owner can still also delete (ownership not transferred)
+ Identity: testUtils.ClientIdentity(1), // Owner can still also delete (ownership not transferred)
Request: `
query {
@@ -476,13 +474,13 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDeleteSoCanT
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(1), // Owner can still also delete.
+ Identity: testUtils.ClientIdentity(1), // Owner can still also delete.
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(1), // Check if actually deleted.
+ Identity: testUtils.ClientIdentity(1), // Check if actually deleted.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go b/tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go
index 79cc4639e2..7bfe6c2ff2 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChan
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChan
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,7 +104,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChan
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -124,9 +122,9 @@ func TestACP_AddDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChan
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -138,7 +136,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChan
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can still not read.
+ Identity: testUtils.ClientIdentity(2), // This identity can still not read.
Request: `
query {
@@ -170,7 +168,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -234,7 +232,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -247,7 +245,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -265,9 +263,9 @@ func TestACP_AddDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -279,7 +277,7 @@ func TestACP_AddDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can still not read.
+ Identity: testUtils.ClientIdentity(2), // This identity can still not read.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go
index 1881979c32..757053f365 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -99,7 +99,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -112,7 +112,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity (to be manager) can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity (to be manager) can not read yet.
Request: `
query {
@@ -132,7 +132,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can't update yet.
+ Identity: testUtils.ClientIdentity(2), // Manager can't update yet.
DocID: 0,
@@ -148,7 +148,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can't delete yet.
+ Identity: testUtils.ClientIdentity(2), // Manager can't delete yet.
DocID: 0,
@@ -156,9 +156,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -170,9 +170,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
},
testUtils.AddDocActorRelationship{ // Manager makes itself a writer
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -185,9 +185,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
// Note: It is not neccesary to make itself a reader, as becoming a writer allows reading.
testUtils.AddDocActorRelationship{ // Manager makes itself a reader
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -201,7 +201,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can now update.
+ Identity: testUtils.ClientIdentity(2), // Manager can now update.
DocID: 0,
@@ -213,7 +213,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
},
testUtils.Request{
- Identity: immutable.Some(2), // Manager can read now
+ Identity: testUtils.ClientIdentity(2), // Manager can read now
Request: `
query {
@@ -239,13 +239,13 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can now delete.
+ Identity: testUtils.ClientIdentity(2), // Manager can now delete.
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(2), // Make sure manager was able to delete the document.
+ Identity: testUtils.ClientIdentity(2), // Make sure manager was able to delete the document.
Request: `
query {
@@ -282,7 +282,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -346,7 +346,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -359,9 +359,9 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -373,7 +373,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
},
testUtils.Request{
- Identity: immutable.Some(2), // Manager can not read
+ Identity: testUtils.ClientIdentity(2), // Manager can not read
Request: `
query {
@@ -393,7 +393,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can not update.
+ Identity: testUtils.ClientIdentity(2), // Manager can not update.
DocID: 0,
@@ -409,7 +409,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can not delete.
+ Identity: testUtils.ClientIdentity(2), // Manager can not delete.
DocID: 0,
@@ -417,9 +417,9 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
},
testUtils.AddDocActorRelationship{ // Manager can manage only.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -450,7 +450,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -514,7 +514,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -527,9 +527,9 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -541,9 +541,9 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.AddDocActorRelationship{ // Admin tries to make another actor a writer
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -555,7 +555,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.Request{
- Identity: immutable.Some(3), // The other actor can't read
+ Identity: testUtils.ClientIdentity(3), // The other actor can't read
Request: `
query {
@@ -575,7 +575,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can not update
+ Identity: testUtils.ClientIdentity(3), // The other actor can not update
DocID: 0,
@@ -591,7 +591,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can not delete
+ Identity: testUtils.ClientIdentity(3), // The other actor can not delete
DocID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go b/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go
index f07971589c..485c130805 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go
@@ -29,7 +29,7 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +93,7 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,7 +106,7 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
},
testUtils.Request{
- Identity: immutable.Some(3), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(3), // This identity can not read yet.
Request: `
query {
@@ -124,9 +124,9 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -138,9 +138,9 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
},
testUtils.AddDocActorRelationship{ // Admin makes another actor a reader
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -152,7 +152,7 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
},
testUtils.Request{
- Identity: immutable.Some(3), // The other actor can read
+ Identity: testUtils.ClientIdentity(3), // The other actor can read
Request: `
query {
@@ -178,7 +178,7 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can not update
+ Identity: testUtils.ClientIdentity(3), // The other actor can not update
DocID: 0,
@@ -194,7 +194,7 @@ func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can not delete
+ Identity: testUtils.ClientIdentity(3), // The other actor can not delete
DocID: 0,
@@ -216,7 +216,7 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -280,7 +280,7 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -293,7 +293,7 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
},
testUtils.Request{
- Identity: immutable.Some(3), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(3), // This identity can not read yet.
Request: `
query {
@@ -311,9 +311,9 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -325,9 +325,9 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
},
testUtils.AddDocActorRelationship{ // Admin makes another actor a writer
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -341,7 +341,7 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can update
+ Identity: testUtils.ClientIdentity(3), // The other actor can update
DocID: 0,
@@ -353,7 +353,7 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
},
testUtils.Request{
- Identity: immutable.Some(3), // The other actor can read
+ Identity: testUtils.ClientIdentity(3), // The other actor can read
Request: `
query {
@@ -379,13 +379,13 @@ func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can delete
+ Identity: testUtils.ClientIdentity(3), // The other actor can delete
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(3),
+ Identity: testUtils.ClientIdentity(3),
Request: `
query {
@@ -417,7 +417,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -481,7 +481,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -494,7 +494,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity (to be manager) can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity (to be manager) can not read yet.
Request: `
query {
@@ -512,9 +512,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -526,9 +526,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
},
testUtils.AddDocActorRelationship{ // Manager makes itself a reader
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -540,7 +540,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
},
testUtils.Request{
- Identity: immutable.Some(2), // Manager can read now
+ Identity: testUtils.ClientIdentity(2), // Manager can read now
Request: `
query {
@@ -566,7 +566,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager still can't update
+ Identity: testUtils.ClientIdentity(2), // Manager still can't update
DocID: 0,
@@ -582,7 +582,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testi
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager still can't delete
+ Identity: testUtils.ClientIdentity(2), // Manager still can't delete
DocID: 0,
@@ -609,7 +609,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -674,7 +674,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -687,7 +687,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity (to be manager) can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity (to be manager) can not read yet.
Request: `
query {
@@ -707,7 +707,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can't update yet.
+ Identity: testUtils.ClientIdentity(2), // Manager can't update yet.
DocID: 0,
@@ -723,7 +723,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can't delete yet.
+ Identity: testUtils.ClientIdentity(2), // Manager can't delete yet.
DocID: 0,
@@ -731,9 +731,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -745,9 +745,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
},
testUtils.AddDocActorRelationship{ // Manager makes itself a writer
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -760,9 +760,9 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
// Note: It is not neccesary to make itself a reader, as becoming a writer allows reading.
testUtils.AddDocActorRelationship{ // Manager makes itself a reader
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -776,7 +776,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can now update.
+ Identity: testUtils.ClientIdentity(2), // Manager can now update.
DocID: 0,
@@ -788,7 +788,7 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
},
testUtils.Request{
- Identity: immutable.Some(2), // Manager can read now
+ Identity: testUtils.ClientIdentity(2), // Manager can read now
Request: `
query {
@@ -814,13 +814,13 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can now delete.
+ Identity: testUtils.ClientIdentity(2), // Manager can now delete.
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(2), // Make sure manager was able to delete the document.
+ Identity: testUtils.ClientIdentity(2), // Make sure manager was able to delete the document.
Request: `
query {
@@ -857,7 +857,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -921,7 +921,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -934,9 +934,9 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -948,9 +948,9 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.AddDocActorRelationship{ // Admin tries to make another actor a writer
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -962,7 +962,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
},
testUtils.Request{
- Identity: immutable.Some(3), // The other actor can't read
+ Identity: testUtils.ClientIdentity(3), // The other actor can't read
Request: `
query {
@@ -982,7 +982,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can not update
+ Identity: testUtils.ClientIdentity(3), // The other actor can not update
DocID: 0,
@@ -998,7 +998,7 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(3), // The other actor can not delete
+ Identity: testUtils.ClientIdentity(3), // The other actor can not delete
DocID: 0,
@@ -1025,7 +1025,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -1089,7 +1089,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -1102,9 +1102,9 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
},
testUtils.AddDocActorRelationship{ // Make admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -1116,7 +1116,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
},
testUtils.Request{
- Identity: immutable.Some(2), // Manager can not read
+ Identity: testUtils.ClientIdentity(2), // Manager can not read
Request: `
query {
@@ -1136,7 +1136,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can not update.
+ Identity: testUtils.ClientIdentity(2), // Manager can not update.
DocID: 0,
@@ -1152,7 +1152,7 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // Manager can not delete.
+ Identity: testUtils.ClientIdentity(2), // Manager can not delete.
DocID: 0,
@@ -1160,9 +1160,9 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
},
testUtils.AddDocActorRelationship{ // Manager can manage only.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -1188,7 +1188,7 @@ func TestACP_CantMakeRelationshipIfNotOwnerOrManager_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -1252,7 +1252,7 @@ func TestACP_CantMakeRelationshipIfNotOwnerOrManager_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -1265,9 +1265,9 @@ func TestACP_CantMakeRelationshipIfNotOwnerOrManager_Error(t *testing.T) {
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 2, // This identity can not manage as not an admin yet
+ RequestorIdentity: testUtils.ClientIdentity(2), // This identity can not manage as not an admin yet
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go b/tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go
index a7ad53db41..1f2d7eb1da 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go
@@ -13,8 +13,6 @@ package test_acp_relationship_doc_actor_add
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -34,7 +32,7 @@ func TestACP_AddDocActorRelationshipWithCollectionThatHasNoPolicy_NotAllowedErro
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -47,9 +45,9 @@ func TestACP_AddDocActorRelationshipWithCollectionThatHasNoPolicy_NotAllowedErro
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go
index 9c6649c2c1..6a3f02f4ba 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -98,7 +98,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -111,7 +111,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -131,7 +131,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not update yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not update yet.
DocID: 0,
@@ -145,9 +145,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -161,7 +161,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now update.
+ Identity: testUtils.ClientIdentity(2), // This identity can now update.
DocID: 0,
@@ -173,7 +173,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now also read.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go
index 8333790f3d..ccac9cd232 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -98,7 +98,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -111,7 +111,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -131,7 +131,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not update yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not update yet.
DocID: 0,
@@ -145,9 +145,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -161,7 +161,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now update.
+ Identity: testUtils.ClientIdentity(2), // This identity can now update.
DocID: 0,
@@ -173,7 +173,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now also read.
Request: `
query {
@@ -211,7 +211,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -275,7 +275,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -288,7 +288,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -308,7 +308,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not delete yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not delete yet.
DocID: 0,
@@ -316,9 +316,9 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -330,7 +330,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now read.
Request: `
query {
@@ -356,13 +356,13 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now delete.
+ Identity: testUtils.ClientIdentity(2), // This identity can now delete.
DocID: 0,
},
testUtils.Request{
- Identity: immutable.Some(2), // Check if actually deleted.
+ Identity: testUtils.ClientIdentity(2), // Check if actually deleted.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go b/tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go
index 30c299e222..3a8a11087c 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_AddDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error(t
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -104,7 +102,7 @@ func TestACP_AddDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error(t
},
testUtils.Request{
- Identity: immutable.Some(2), // Can read as it is a public document
+ Identity: testUtils.ClientIdentity(2), // Can read as it is a public document
Request: `
query {
@@ -128,9 +126,9 @@ func TestACP_AddDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error(t
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go
index e40661cede..f51861ec5c 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -98,7 +98,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -111,7 +111,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -131,7 +131,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
testUtils.UpdateDoc{ // Since it can't read, it can't update either.
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -145,9 +145,9 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -159,7 +159,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
},
testUtils.Request{
- Identity: immutable.Some(2), // Now this identity can read.
+ Identity: testUtils.ClientIdentity(2), // Now this identity can read.
Request: `
query {
@@ -185,7 +185,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
testUtils.UpdateDoc{ // But this actor still can't update.
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go b/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go
index bac553d553..fd452c2d7d 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go
@@ -29,7 +29,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActorTwice_ShowThatTheRelationshipAlre
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +93,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActorTwice_ShowThatTheRelationshipAlre
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,7 +106,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActorTwice_ShowThatTheRelationshipAlre
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -124,9 +124,9 @@ func TestACP_OwnerGivesReadAccessToAnotherActorTwice_ShowThatTheRelationshipAlre
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -138,9 +138,9 @@ func TestACP_OwnerGivesReadAccessToAnotherActorTwice_ShowThatTheRelationshipAlre
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -166,7 +166,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T)
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -230,7 +230,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T)
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -243,7 +243,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T)
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -261,9 +261,9 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T)
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -275,7 +275,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T)
},
testUtils.Request{
- Identity: immutable.Some(2), // Now this identity can read.
+ Identity: testUtils.ClientIdentity(2), // Now this identity can read.
Request: `
query {
@@ -315,7 +315,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanReadSoCanTheOwner(t
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -379,7 +379,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanReadSoCanTheOwner(t
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -392,9 +392,9 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanReadSoCanTheOwner(t
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -406,7 +406,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanReadSoCanTheOwner(t
},
testUtils.Request{
- Identity: immutable.Some(2), // Now this identity can read.
+ Identity: testUtils.ClientIdentity(2), // Now this identity can read.
Request: `
query {
@@ -430,7 +430,7 @@ func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanReadSoCanTheOwner(t
},
testUtils.Request{
- Identity: immutable.Some(1), // And so can the owner (ownership not transferred).
+ Identity: testUtils.ClientIdentity(1), // And so can the owner (ownership not transferred).
Request: `
query {
@@ -473,7 +473,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -537,7 +537,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -550,7 +550,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -570,7 +570,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
testUtils.UpdateDoc{ // Since it can't read, it can't update either.
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -584,9 +584,9 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -598,7 +598,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
},
testUtils.Request{
- Identity: immutable.Some(2), // Now this identity can read.
+ Identity: testUtils.ClientIdentity(2), // Now this identity can read.
Request: `
query {
@@ -624,7 +624,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
testUtils.UpdateDoc{ // But this actor still can't update.
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -652,7 +652,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -716,7 +716,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -729,7 +729,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -749,7 +749,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
testUtils.DeleteDoc{ // Since it can't read, it can't delete either.
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -757,9 +757,9 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -771,7 +771,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
},
testUtils.Request{
- Identity: immutable.Some(2), // Now this identity can read.
+ Identity: testUtils.ClientIdentity(2), // Now this identity can read.
Request: `
query {
@@ -797,7 +797,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelet
testUtils.DeleteDoc{ // But this actor still can't delete.
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go
index d265b448c3..eff2be0f7d 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -98,7 +98,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -111,7 +111,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -131,7 +131,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not update yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not update yet.
DocID: 0,
@@ -145,9 +145,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -159,9 +159,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -192,7 +192,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -256,7 +256,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -269,7 +269,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -289,7 +289,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not update yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not update yet.
DocID: 0,
@@ -303,9 +303,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -319,7 +319,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now update.
+ Identity: testUtils.ClientIdentity(2), // This identity can now update.
DocID: 0,
@@ -331,7 +331,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now also read.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_update_test.go b/tests/integration/acp/relationship/doc_actor/add/with_update_test.go
index de98f32b53..f6bf553356 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_update_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_update_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -98,7 +98,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -111,7 +111,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -131,7 +131,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not update yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not update yet.
DocID: 0,
@@ -145,9 +145,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -159,9 +159,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -192,7 +192,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -256,7 +256,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -269,7 +269,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -289,7 +289,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can not update yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not update yet.
DocID: 0,
@@ -303,9 +303,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -319,7 +319,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now update.
+ Identity: testUtils.ClientIdentity(2), // This identity can now update.
DocID: 0,
@@ -331,7 +331,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now also read.
Request: `
query {
@@ -369,7 +369,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -433,7 +433,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -446,9 +446,9 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -462,7 +462,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can now update.
+ Identity: testUtils.ClientIdentity(2), // This identity can now update.
DocID: 0,
@@ -474,7 +474,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can now also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can now also read.
Request: `
query {
@@ -500,7 +500,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(1), // Owner can still also update (ownership not transferred)
+ Identity: testUtils.ClientIdentity(1), // Owner can still also update (ownership not transferred)
DocID: 0,
@@ -512,7 +512,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanT
},
testUtils.Request{
- Identity: immutable.Some(2), // Owner can still also read (ownership not transferred)
+ Identity: testUtils.ClientIdentity(2), // Owner can still also read (ownership not transferred)
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go b/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go
index 41cb6e4921..71bdcc9094 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_DeleteDocActorRelationshipMissingDocID_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_DeleteDocActorRelationshipMissingDocID_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,9 +104,9 @@ func TestACP_DeleteDocActorRelationshipMissingDocID_Error(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -134,7 +132,7 @@ func TestACP_DeleteDocActorRelationshipMissingCollection_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -198,7 +196,7 @@ func TestACP_DeleteDocActorRelationshipMissingCollection_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -211,9 +209,9 @@ func TestACP_DeleteDocActorRelationshipMissingCollection_Error(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: -1,
@@ -239,7 +237,7 @@ func TestACP_DeleteDocActorRelationshipMissingRelationName_Error(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -303,7 +301,7 @@ func TestACP_DeleteDocActorRelationshipMissingRelationName_Error(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -316,9 +314,9 @@ func TestACP_DeleteDocActorRelationshipMissingRelationName_Error(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -344,7 +342,7 @@ func TestACP_DeleteDocActorRelationshipMissingTargetActorName_Error(t *testing.T
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -408,7 +406,7 @@ func TestACP_DeleteDocActorRelationshipMissingTargetActorName_Error(t *testing.T
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -421,9 +419,9 @@ func TestACP_DeleteDocActorRelationshipMissingTargetActorName_Error(t *testing.T
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: -1,
+ TargetIdentity: testUtils.NoIdentity(),
CollectionID: 0,
@@ -449,7 +447,7 @@ func TestACP_DeleteDocActorRelationshipMissingReqestingIdentityName_Error(t *tes
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -513,7 +511,7 @@ func TestACP_DeleteDocActorRelationshipMissingReqestingIdentityName_Error(t *tes
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -526,9 +524,9 @@ func TestACP_DeleteDocActorRelationshipMissingReqestingIdentityName_Error(t *tes
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: -1,
+ RequestorIdentity: testUtils.NoIdentity(),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go
index d931a7049b..6857f4de16 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -94,7 +92,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
// Creating two documents because need one to do the test on after one is deleted.
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,7 +104,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
`,
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -120,9 +118,9 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
// Give access to the other actor to delete and read both documents.
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -133,9 +131,9 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
ExpectedExistence: false,
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -148,7 +146,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
// Now the other identity can read both and delete both of those documents
testUtils.Request{
- Identity: immutable.Some(2), // This identity can read.
+ Identity: testUtils.ClientIdentity(2), // This identity can read.
Request: `
query {
@@ -176,15 +174,15 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can also delete.
+ Identity: testUtils.ClientIdentity(2), // This identity can also delete.
DocID: 1,
},
testUtils.DeleteDocActorRelationship{ // Revoke access from being able to delete (and read) the document.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -197,7 +195,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
// The other identity can neither delete nor read the other document anymore.
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -216,7 +214,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
testUtils.DeleteDoc{
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -225,7 +223,7 @@ func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testin
// Ensure document was not accidentally deleted using owner identity.
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go
index 190850dfdd..e9e42b9f42 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingC
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingC
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,7 +104,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingC
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -124,9 +122,9 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingC
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -138,7 +136,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingC
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can still not read.
+ Identity: testUtils.ClientIdentity(2), // This identity can still not read.
Request: `
query {
@@ -170,7 +168,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -234,7 +232,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -247,7 +245,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read yet.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
Request: `
query {
@@ -265,9 +263,9 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -279,7 +277,7 @@ func TestACP_DeleteDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can still not read.
+ Identity: testUtils.ClientIdentity(2), // This identity can still not read.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go
index fd841c562a..779e2d6e62 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,9 +104,9 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
},
testUtils.AddDocActorRelationship{ // Owner makes admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -120,9 +118,9 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
},
testUtils.AddDocActorRelationship{ // Owner gives an actor read access
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -134,7 +132,7 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
},
testUtils.Request{
- Identity: immutable.Some(3), // The other actor can read
+ Identity: testUtils.ClientIdentity(3), // The other actor can read
Request: `
query {
@@ -156,9 +154,9 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{ // Admin revokes access of the other actor that could read.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -171,7 +169,7 @@ func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) {
// The other actor can no longer read.
testUtils.Request{
- Identity: immutable.Some(3),
+ Identity: testUtils.ClientIdentity(3),
Request: `
query {
@@ -202,7 +200,7 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -266,7 +264,7 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -279,9 +277,9 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.AddDocActorRelationship{ // Owner makes admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -293,9 +291,9 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.AddDocActorRelationship{ // Manager gives an actor read access
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -307,7 +305,7 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.Request{
- Identity: immutable.Some(3), // The other actor can read
+ Identity: testUtils.ClientIdentity(3), // The other actor can read
Request: `
query {
@@ -329,9 +327,9 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.DeleteDocActorRelationship{ // Admin revokes access of the admin.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -343,9 +341,9 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.AddDocActorRelationship{ // Manager can no longer grant read access.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 4, // This identity has no access previously.
+ TargetIdentity: testUtils.ClientIdentity(4), // This identity has no access previously.
CollectionID: 0,
@@ -357,7 +355,7 @@ func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testin
},
testUtils.Request{
- Identity: immutable.Some(4), // The other actor can ofcourse still not read.
+ Identity: testUtils.ClientIdentity(4), // The other actor can ofcourse still not read.
Request: `
query {
@@ -388,7 +386,7 @@ func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -452,7 +450,7 @@ func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -465,9 +463,9 @@ func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) {
},
testUtils.AddDocActorRelationship{ // Owner makes admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -479,9 +477,9 @@ func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{ // Admin tries to revoke owners `owner` relation.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 1,
+ TargetIdentity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -493,9 +491,9 @@ func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{ // Owner can still perform owner operations, like restrict admin.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -507,7 +505,7 @@ func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) {
},
testUtils.Request{
- Identity: immutable.Some(1), // The owner can still read
+ Identity: testUtils.ClientIdentity(1), // The owner can still read
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go
index 3039d32e5f..467759f4fd 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go
@@ -13,8 +13,6 @@ package test_acp_relationship_doc_actor_delete
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -34,7 +32,7 @@ func TestACP_DeleteDocActorRelationshipWithCollectionThatHasNoPolicy_NotAllowedE
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -47,9 +45,9 @@ func TestACP_DeleteDocActorRelationshipWithCollectionThatHasNoPolicy_NotAllowedE
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go
index fa071c6806..906055c89b 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_DeleteDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -104,7 +102,7 @@ func TestACP_DeleteDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error
},
testUtils.Request{
- Identity: immutable.Some(2), // Can read as it is a public document
+ Identity: testUtils.ClientIdentity(2), // Can read as it is a public document
Request: `
query {
@@ -128,9 +126,9 @@ func TestACP_DeleteDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go
index 58b74e4dc1..52472ea897 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_OwnerRevokesReadAccessTwice_ShowThatTheRecordWasNotFoundSecondTime(
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_OwnerRevokesReadAccessTwice_ShowThatTheRecordWasNotFoundSecondTime(
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,9 +104,9 @@ func TestACP_OwnerRevokesReadAccessTwice_ShowThatTheRecordWasNotFoundSecondTime(
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -120,9 +118,9 @@ func TestACP_OwnerRevokesReadAccessTwice_ShowThatTheRecordWasNotFoundSecondTime(
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -134,9 +132,9 @@ func TestACP_OwnerRevokesReadAccessTwice_ShowThatTheRecordWasNotFoundSecondTime(
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -162,7 +160,7 @@ func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T)
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -226,7 +224,7 @@ func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T)
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -239,9 +237,9 @@ func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T)
},
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -253,7 +251,7 @@ func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T)
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can read.
+ Identity: testUtils.ClientIdentity(2), // This identity can read.
Request: `
query {
@@ -277,9 +275,9 @@ func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T)
},
testUtils.DeleteDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -291,7 +289,7 @@ func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T)
},
testUtils.Request{
- Identity: immutable.Some(2), // This identity can not read anymore.
+ Identity: testUtils.ClientIdentity(2), // This identity can not read anymore.
Request: `
query {
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go
index 563359fcd4..a732284a0c 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_AdminTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -93,7 +91,7 @@ func TestACP_AdminTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -106,9 +104,9 @@ func TestACP_AdminTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.AddDocActorRelationship{ // Owner makes admin / manager
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -120,9 +118,9 @@ func TestACP_AdminTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{ // Admin tries to revoke it's own relation.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -134,9 +132,9 @@ func TestACP_AdminTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.AddDocActorRelationship{ // Admin can still perform admin operations.
- RequestorIdentity: 2,
+ RequestorIdentity: testUtils.ClientIdentity(2),
- TargetIdentity: 3,
+ TargetIdentity: testUtils.ClientIdentity(3),
CollectionID: 0,
@@ -162,7 +160,7 @@ func TestACP_OwnerTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -226,7 +224,7 @@ func TestACP_OwnerTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -239,9 +237,9 @@ func TestACP_OwnerTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.DeleteDocActorRelationship{ // Owner tries to revoke it's own relation.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 1,
+ TargetIdentity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -253,9 +251,9 @@ func TestACP_OwnerTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) {
},
testUtils.AddDocActorRelationship{ // Owner can still perform admin operations.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go
index e51edc22ca..22777e240d 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go
@@ -34,7 +34,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -98,7 +98,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -112,9 +112,9 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
// Give access to the other actor to update and read the document.
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -128,7 +128,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can update.
+ Identity: testUtils.ClientIdentity(2), // This identity can update.
DocID: 0,
@@ -141,7 +141,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
// Ensure the other identity can read and update the document.
testUtils.Request{
- Identity: immutable.Some(2), // This identity can also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can also read.
Request: `
query {
@@ -163,9 +163,9 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
},
testUtils.DeleteDocActorRelationship{ // Revoke access from being able to update (and read) the document.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -178,7 +178,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
// The other identity can neither update nor read the other document anymore.
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -197,7 +197,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -212,7 +212,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testin
// Ensure document was not accidentally updated using owner identity.
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
@@ -253,7 +253,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: Test Policy
@@ -317,7 +317,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
},
testUtils.CreateDoc{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
CollectionID: 0,
@@ -331,9 +331,9 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
// Give access to the other actor to update and read the document.
testUtils.AddDocActorRelationship{
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -347,7 +347,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2), // This identity can update.
+ Identity: testUtils.ClientIdentity(2), // This identity can update.
DocID: 0,
@@ -360,7 +360,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
// Ensure the other identity can read and update the document.
testUtils.Request{
- Identity: immutable.Some(2), // This identity can also read.
+ Identity: testUtils.ClientIdentity(2), // This identity can also read.
Request: `
query {
@@ -382,9 +382,9 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
},
testUtils.DeleteDocActorRelationship{ // Revoke access from being able to update (and read) the document.
- RequestorIdentity: 1,
+ RequestorIdentity: testUtils.ClientIdentity(1),
- TargetIdentity: 2,
+ TargetIdentity: testUtils.ClientIdentity(2),
CollectionID: 0,
@@ -397,7 +397,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
// The other identity can neither update nor read the other document anymore.
testUtils.Request{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Request: `
query {
@@ -416,7 +416,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
testUtils.UpdateDoc{
CollectionID: 0,
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
DocID: 0,
@@ -431,7 +431,7 @@ func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *te
// Ensure document was not accidentally updated using owner identity.
testUtils.Request{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Request: `
query {
diff --git a/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go
index 5104309f22..6249dbb77b 100644
--- a/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_BasicYAML_SchemaAccepted(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -125,7 +123,7 @@ func TestACP_AddDPISchema_BasicJSON_SchemaAccepted(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
{
diff --git a/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go
index 4d6409c026..3b4c7d711c 100644
--- a/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -31,7 +29,7 @@ func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelation_AcceptSchema(t *t
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -131,7 +129,7 @@ func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelationInTheEnd_AcceptSch
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -231,7 +229,7 @@ func TestACP_AddDPISchema_WithExtraPermsHavingNoRequiredRelation_AcceptSchema(t
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go
index 42eed6b876..83b92a8721 100644
--- a/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -31,7 +29,7 @@ func TestACP_AddDPISchema_WithManagedRelation_AcceptSchemas(t *testing.T) {
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go
index 288e3ecfa3..40b62afef9 100644
--- a/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -31,7 +29,7 @@ func TestACP_AddDPISchema_PartialValidDPIButUseOnlyValidDPIResource_AcceptSchema
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go
index db64e70e8d..b7aa43d22c 100644
--- a/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -56,7 +54,7 @@ func TestACP_AddDPISchema_AddDuplicateDPIsByOtherCreatorsUseBoth_AcceptSchema(t
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: validDPIUsedByBoth,
@@ -65,7 +63,7 @@ func TestACP_AddDPISchema_AddDuplicateDPIsByOtherCreatorsUseBoth_AcceptSchema(t
testUtils.AddPolicy{
- Identity: immutable.Some(2),
+ Identity: testUtils.ClientIdentity(2),
Policy: validDPIUsedByBoth,
diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go
index a8da38040e..2e9a74a32e 100644
--- a/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -31,7 +29,7 @@ func TestACP_AddDPISchema_WithMultipleResources_AcceptSchema(t *testing.T) {
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -141,7 +139,7 @@ func TestACP_AddDPISchema_WithMultipleResourcesBothBeingUsed_AcceptSchema(t *tes
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go
index 812b5ba154..f296d98eee 100644
--- a/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go
+++ b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema"
)
@@ -32,7 +30,7 @@ func TestACP_AddDPISchema_UseSameResourceOnDifferentSchemas_AcceptSchemas(t *tes
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go
index bde886d7de..7a40c69bc5 100644
--- a/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go
@@ -13,8 +13,6 @@ package test_acp_schema_add_dpi
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_AddDPISchema_NoArgWasSpecifiedOnSchema_SchemaRejected(t *testing.T)
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -104,7 +102,7 @@ func TestACP_AddDPISchema_SpecifiedArgsAreEmptyOnSchema_SchemaRejected(t *testin
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go
index efb05fca7b..0d8b8c8e60 100644
--- a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_InvalidPolicyIDArgTypeWasSpecifiedOnSchema_SchemaRejec
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -105,7 +103,7 @@ func TestACP_AddDPISchema_InvalidResourceArgTypeWasSpecifiedOnSchema_SchemaRejec
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go
index 74747b9fb3..c56f6a8f8a 100644
--- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionOnDPI_SchemaRejected
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -115,7 +113,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionLabelOnDPI_SchemaRej
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -199,7 +197,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionExprOnDPI_Sch
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -284,7 +282,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionNoSpaceExprOn
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -369,7 +367,7 @@ func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnReadPermissionExprOnDPI_Schem
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go
index 5c52c37aeb..a540b98ddd 100644
--- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnReadPermissionExprOn
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -116,7 +114,7 @@ func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnReadPermissionExpr
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -202,7 +200,7 @@ func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnReadPermissionExprOnDPI
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go
index a2cf05fc27..c203ed6eb0 100644
--- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionOnDPI_SchemaRejecte
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -115,7 +113,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionLabelOnDPI_SchemaRe
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -199,7 +197,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionExprOnDPI_Sc
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -284,7 +282,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionNoSpaceExprO
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -369,7 +367,7 @@ func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnWritePermissionExprOnDPI_Sche
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go
index 1c523eeb68..cb5c898c75 100644
--- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnWritePermissionExprO
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -116,7 +114,7 @@ func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnWritePermissionExp
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -202,7 +200,7 @@ func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnWritePermissionExprOnDP
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go
index df40f3b202..7c2bf74406 100644
--- a/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -81,7 +79,7 @@ func TestACP_AddDPISchema_WhereAPolicyWasAddedButLinkedPolicyWasNotAdded_SchemaR
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go
index 7a5dc39f3a..2dd775a84f 100644
--- a/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go
@@ -13,8 +13,6 @@ package test_acp_schema_add_dpi
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -29,7 +27,7 @@ func TestACP_AddDPISchema_NoPolicyIDWasSpecifiedOnSchema_SchemaRejected(t *testi
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -104,7 +102,7 @@ func TestACP_AddDPISchema_SpecifiedPolicyIDArgIsEmptyOnSchema_SchemaRejected(t *
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go
index 0b93b6df16..8afbced697 100644
--- a/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_MissingRequiredReadPermissionOnDPI_SchemaRejected(t *t
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go
index 2013b93225..1a88260ec5 100644
--- a/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_NoResourceWasSpecifiedOnSchema_SchemaRejected(t *testi
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
@@ -107,7 +105,7 @@ func TestACP_AddDPISchema_SpecifiedResourceArgIsEmptyOnSchema_SchemaRejected(t *
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go
index 0c8354a625..1be8d6bd97 100644
--- a/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_SpecifiedResourceDoesNotExistOnDPI_SchemaRejected(t *t
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go
index e346da3536..97c3a6f215 100644
--- a/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go
+++ b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go
@@ -14,8 +14,6 @@ import (
"fmt"
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -30,7 +28,7 @@ func TestACP_AddDPISchema_PartialValidDPIButUseInValidDPIResource_RejectSchema(t
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/collection_description/updates/remove/policy_test.go b/tests/integration/collection_description/updates/remove/policy_test.go
index 0c498016a4..d0341e0958 100644
--- a/tests/integration/collection_description/updates/remove/policy_test.go
+++ b/tests/integration/collection_description/updates/remove/policy_test.go
@@ -13,8 +13,6 @@ package remove
import (
"testing"
- "github.com/sourcenetwork/immutable"
-
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -23,7 +21,7 @@ func TestColDescrUpdateRemovePolicy_Errors(t *testing.T) {
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
diff --git a/tests/integration/collection_description/updates/replace/view_policy_test.go b/tests/integration/collection_description/updates/replace/view_policy_test.go
index db24ff2148..20d61b664a 100644
--- a/tests/integration/collection_description/updates/replace/view_policy_test.go
+++ b/tests/integration/collection_description/updates/replace/view_policy_test.go
@@ -27,7 +27,7 @@ func TestColDescrUpdateReplaceIsMaterialized_GivenPolicyOnNonMAterializedView_Er
}),
Actions: []any{
testUtils.AddPolicy{
- Identity: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
Policy: `
name: test
description: a test policy which marks a collection in a database as a resource
diff --git a/tests/integration/identity.go b/tests/integration/identity.go
new file mode 100644
index 0000000000..7c56d81375
--- /dev/null
+++ b/tests/integration/identity.go
@@ -0,0 +1,147 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package tests
+
+import (
+ "context"
+ "math/rand"
+
+ "github.com/decred/dcrd/dcrec/secp256k1/v4"
+ "github.com/sourcenetwork/immutable"
+ "github.com/stretchr/testify/require"
+
+ acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
+)
+
+// identityRef is a type that refers to a specific identity of a certain type.
+type identityRef struct {
+ isClient bool
+ index int
+}
+
+// NoIdentity returns an reference to an identity that represents no identity.
+func NoIdentity() immutable.Option[identityRef] {
+ return immutable.None[identityRef]()
+}
+
+// ClientIdentity returns a reference to a user identity with a given index.
+func ClientIdentity(index int) immutable.Option[identityRef] {
+ return immutable.Some(identityRef{
+ isClient: true,
+ index: index,
+ })
+}
+
+// NodeIdentity returns a reference to a node identity with a given index.
+func NodeIdentity(index int) immutable.Option[identityRef] {
+ return immutable.Some(identityRef{
+ isClient: false,
+ index: index,
+ })
+}
+
+// identityHolder holds an identity and the generated tokens for each target node.
+// This is used to cache the generated tokens for each node.
+type identityHolder struct {
+ // Identity is the identity.
+ Identity acpIdentity.Identity
+ // NodeTokens is a map of node index to the generated token for that node.
+ NodeTokens map[int]string
+}
+
+func newIdentityHolder(ident acpIdentity.Identity) *identityHolder {
+ return &identityHolder{
+ Identity: ident,
+ NodeTokens: make(map[int]string),
+ }
+}
+
+// getIdentity returns the identity for the given reference.
+// If the identity does not exist, it will be generated.
+func getIdentity(s *state, ref immutable.Option[identityRef]) acpIdentity.Identity {
+ if !ref.HasValue() {
+ return acpIdentity.Identity{}
+ }
+ return getIdentityHolder(s, ref.Value()).Identity
+}
+
+// getIdentityHolder returns the identity holder for the given reference.
+// If the identity does not exist, it will be generated.
+func getIdentityHolder(s *state, ref identityRef) *identityHolder {
+ ident, ok := s.identities[ref]
+ if ok {
+ return ident
+ }
+
+ s.identities[ref] = newIdentityHolder(generateIdentity(s))
+ return s.identities[ref]
+}
+
+// getIdentityForRequest returns the identity for the given reference and node index.
+// It prepares the identity for a request by generating a token if needed, i.e. it will
+// return an identity with [Identity.BearerToken] set.
+func getIdentityForRequest(s *state, ref identityRef, nodeIndex int) acpIdentity.Identity {
+ identHolder := getIdentityHolder(s, ref)
+ ident := identHolder.Identity
+
+ token, ok := identHolder.NodeTokens[nodeIndex]
+ if ok {
+ ident.BearerToken = token
+ } else {
+ audience := getNodeAudience(s, nodeIndex)
+ if acpType == SourceHubACPType || audience.HasValue() {
+ err := ident.UpdateToken(authTokenExpiration, audience, immutable.Some(s.sourcehubAddress))
+ require.NoError(s.t, err)
+ identHolder.NodeTokens[nodeIndex] = ident.BearerToken
+ }
+ }
+ return ident
+}
+
+// Generate the keys using predefined seed so that multiple runs yield the same private key.
+// This is important for stuff like the change detector.
+func generateIdentity(s *state) acpIdentity.Identity {
+ source := rand.NewSource(int64(s.nextIdentityGenSeed))
+ r := rand.New(source)
+
+ privateKey, err := secp256k1.GeneratePrivateKeyFromRand(r)
+ require.NoError(s.t, err)
+
+ s.nextIdentityGenSeed++
+
+ identity, err := acpIdentity.FromPrivateKey(privateKey)
+ require.NoError(s.t, err)
+
+ return identity
+}
+
+// getContextWithIdentity returns a context with the identity for the given reference and node index.
+// If the identity does not exist, it will be generated.
+// The identity added to the context is prepared for a request, i.e. its [Identity.BearerToken] is set.
+func getContextWithIdentity(
+ ctx context.Context,
+ s *state,
+ ref immutable.Option[identityRef],
+ nodeIndex int,
+) context.Context {
+ if !ref.HasValue() {
+ return ctx
+ }
+ ident := getIdentityForRequest(s, ref.Value(), nodeIndex)
+ return acpIdentity.WithContext(ctx, immutable.Some(ident))
+}
+
+func getIdentityDID(s *state, ref immutable.Option[identityRef]) string {
+ if ref.HasValue() {
+ return getIdentity(s, ref).DID
+ }
+ return ""
+}
diff --git a/tests/integration/node/identity_test.go b/tests/integration/node/identity_test.go
new file mode 100644
index 0000000000..38c065e1b9
--- /dev/null
+++ b/tests/integration/node/identity_test.go
@@ -0,0 +1,36 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package node
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestNodeIdentity_NodeIdentity_Succeed(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.GetNodeIdentity{
+ NodeID: 0,
+ ExpectedIdentity: testUtils.NodeIdentity(0),
+ },
+ testUtils.GetNodeIdentity{
+ NodeID: 1,
+ ExpectedIdentity: testUtils.NodeIdentity(1),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/state.go b/tests/integration/state.go
index 9e65458531..77fe2e52cd 100644
--- a/tests/integration/state.go
+++ b/tests/integration/state.go
@@ -17,7 +17,6 @@ import (
"github.com/ipfs/go-cid"
"github.com/libp2p/go-libp2p/core/peer"
- identity "github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/event"
@@ -129,13 +128,20 @@ type state struct {
// This is order dependent and the property is accessed by index.
txns []datastore.Txn
- // Identities by node index, by identity index.
- identities [][]identity.Identity
+ // identities contains all identities created in this test.
+ // The map key is the identity reference that uniquely identifies identities of different
+ // types. See [identRef].
+ // The map value is the identity holder that contains the identity itself and token
+ // generated for different target nodes. See [identityHolder].
+ identities map[identityRef]*identityHolder
- // Will recieve an item once all actions have finished processing.
+ // The seed for the next identity generation. We want identities to be deterministic.
+ nextIdentityGenSeed int
+
+ // Will receive an item once all actions have finished processing.
allActionsDone chan struct{}
- // These channels will recieve a function which asserts results of any subscription requests.
+ // These channels will receive a function which asserts results of any subscription requests.
subscriptionResultsChans []chan func()
// nodeEvents contains all event node subscriptions.
@@ -161,7 +167,7 @@ type state struct {
collections [][]client.Collection
// The names of the collections active in this test.
- // Indexes matches that of inital collections.
+ // Indexes matches that of initial collections.
collectionNames []string
// A map of the collection indexes by their Root, this allows easier
@@ -207,6 +213,7 @@ func newState(
clientType: clientType,
txns: []datastore.Txn{},
allActionsDone: make(chan struct{}),
+ identities: map[identityRef]*identityHolder{},
subscriptionResultsChans: []chan func(){},
nodeEvents: []*eventState{},
nodeAddresses: []peer.AddrInfo{},
diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go
index 3103d674ca..e1c9b0b6f1 100644
--- a/tests/integration/test_case.go
+++ b/tests/integration/test_case.go
@@ -293,7 +293,10 @@ type CreateDoc struct {
//
// If an Identity is provided and the collection has a policy, then the
// created document(s) will be owned by this Identity.
- Identity immutable.Option[int]
+ //
+ // Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
+ // Default value is `NoIdentity()`.
+ Identity immutable.Option[identityRef]
// Specifies whether the document should be encrypted.
IsDocEncrypted bool
@@ -362,7 +365,10 @@ type DeleteDoc struct {
//
// If an Identity is provided and the collection has a policy, then
// can also delete private document(s) that are owned by this Identity.
- Identity immutable.Option[int]
+ //
+ // Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
+ // Default value is `NoIdentity()`.
+ Identity immutable.Option[identityRef]
// The collection in which this document should be deleted.
CollectionID int
@@ -392,7 +398,10 @@ type UpdateDoc struct {
//
// If an Identity is provided and the collection has a policy, then
// can also update private document(s) that are owned by this Identity.
- Identity immutable.Option[int]
+ //
+ // Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
+ // Default value is `NoIdentity()`.
+ Identity immutable.Option[identityRef]
// The collection in which this document exists.
CollectionID int
@@ -432,7 +441,10 @@ type UpdateWithFilter struct {
//
// If an Identity is provided and the collection has a policy, then
// can also update private document(s) that are owned by this Identity.
- Identity immutable.Option[int]
+ //
+ // Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
+ // Default value is `NoIdentity()`.
+ Identity immutable.Option[identityRef]
// The collection in which this document exists.
CollectionID int
@@ -586,7 +598,10 @@ type Request struct {
//
// If an Identity is provided and the collection has a policy, then can
// operate over private document(s) that are owned by this Identity.
- Identity immutable.Option[int]
+ //
+ // Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
+ // Default value is `NoIdentity()`.
+ Identity immutable.Option[identityRef]
// Used to identify the transaction for this to run against. Optional.
TransactionID immutable.Option[int]
@@ -778,3 +793,16 @@ type BackupImport struct {
// contains this string.
ExpectedError string
}
+
+// GetNodeIdentity is an action that calls the [DB.GetNodeIdentity] method and asserts the result.
+// It checks if a node at the given index has an identity matching another identity under the same index.
+type GetNodeIdentity struct {
+ // NodeID holds the ID (index) of a node to get the identity from.
+ NodeID int
+
+ // ExpectedIdentity holds the identity that is expected to be found.
+ //
+ // Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
+ // Default value is `NoIdentity()`.
+ ExpectedIdentity immutable.Option[identityRef]
+}
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index 05698e9a39..3bf34d1138 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -115,6 +115,7 @@ func init() {
// mutation type.
mutationType = CollectionSaveMutationType
}
+ mutationType = GQLRequestMutationType
if value, ok := os.LookupEnv(viewTypeEnvName); ok {
viewType = ViewType(value)
@@ -141,7 +142,7 @@ func AssertPanic(t *testing.T, f assert.PanicTestFunc) bool {
}
if httpClient || cliClient {
- // The http / cli client will return an error instead of panicing at the moment.
+ // The http / cli client will return an error instead of panicking at the moment.
t.Skip("Assert panic with the http client is not currently supported.")
}
@@ -410,6 +411,9 @@ func performAction(
case CreatePredefinedDocs:
generatePredefinedDocs(s, action)
+ case GetNodeIdentity:
+ performGetNodeIdentityAction(s, action)
+
case SetupComplete:
// no-op, just continue.
@@ -543,7 +547,7 @@ func getCollectionNames(testCase TestCase) []string {
func getCollectionNamesFromSchema(result map[string]int, schema string, nextIndex int) int {
// WARNING: This will not work with schemas ending in `type`, e.g. `user_type`
splitByType := strings.Split(schema, "type ")
- // Skip the first, as that preceeds `type ` if `type ` is present,
+ // Skip the first, as that precede `type ` if `type ` is present,
// else there are no types.
for i := 1; i < len(splitByType); i++ {
wipSplit := strings.TrimLeft(splitByType[i], " ")
@@ -688,7 +692,7 @@ ActionLoop:
} else {
// if we don't have any non-mutation actions and the change detector is enabled
// skip this test as we will not gain anything from running (change detector would
- // run an idential profile to a normal test run)
+ // run an identical profile to a normal test run)
t.Skipf("no actions to execute")
}
}
@@ -739,7 +743,7 @@ func startNodes(s *state, action Start) {
}
originalPath := databaseDir
databaseDir = s.dbPaths[nodeIndex]
- node, _, err := setupNode(s)
+ node, _, err := setupNode(s, db.WithNodeIdentity(getIdentity(s, NodeIdentity(nodeIndex))))
require.NoError(s.t, err)
databaseDir = originalPath
@@ -757,7 +761,7 @@ func startNodes(s *state, action Start) {
}
// We need to make sure the node is configured with its old address, otherwise
- // a new one may be selected and reconnnection to it will fail.
+ // a new one may be selected and reconnection to it will fail.
var addresses []string
for _, addr := range s.nodeAddresses[nodeIndex].Addrs {
addresses = append(addresses, addr.String())
@@ -817,8 +821,8 @@ func refreshCollections(
if _, ok := s.collectionIndexesByRoot[collection.Description().RootID]; !ok {
// If the root is not found here this is likely the first refreshCollections
// call of the test, we map it by root in case the collection is renamed -
- // we still wish to preserve the original index so test maintainers can refrence
- // them in a convienient manner.
+ // we still wish to preserve the original index so test maintainers can reference
+ // them in a convenient manner.
s.collectionIndexesByRoot[collection.Description().RootID] = i
}
break
@@ -858,7 +862,9 @@ func configureNode(
for _, opt := range netNodeOpts {
nodeOpts = append(nodeOpts, opt)
}
- node, path, err := setupNode(s, nodeOpts...) //disable change dector, or allow it?
+ nodeOpts = append(nodeOpts, db.WithNodeIdentity(getIdentity(s, NodeIdentity(len(s.nodes)))))
+
+ node, path, err := setupNode(s, nodeOpts...) //disable change detector, or allow it?
require.NoError(s.t, err)
s.nodeAddresses = append(s.nodeAddresses, node.Peer.PeerInfo())
@@ -1305,8 +1311,7 @@ func createDocViaColSave(
}
func makeContextForDocCreate(s *state, ctx context.Context, nodeIndex int, action *CreateDoc) context.Context {
- identity := getIdentity(s, nodeIndex, action.Identity)
- ctx = db.SetContextIdentity(ctx, identity)
+ ctx = getContextWithIdentity(ctx, s, action.Identity, nodeIndex)
ctx = encryption.SetContextConfigFromParams(ctx, action.IsDocEncrypted, action.EncryptedFields)
return ctx
}
@@ -1385,7 +1390,7 @@ func createDocViaGQL(
req := fmt.Sprintf(`mutation { %s(%s) { _docID } }`, key, params)
txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError)
- ctx := db.SetContextIdentity(db.SetContextTxn(s.ctx, txn), getIdentity(s, nodeIndex, action.Identity))
+ ctx := getContextWithIdentity(db.SetContextTxn(s.ctx, txn), s, action.Identity, nodeIndex)
result := node.ExecRequest(ctx, req)
if len(result.GQL.Errors) > 0 {
@@ -1439,8 +1444,7 @@ func deleteDoc(
for index, node := range nodes {
nodeID := nodeIDs[index]
collection := s.collections[nodeID][action.CollectionID]
- identity := getIdentity(s, nodeID, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
+ ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeID)
err := withRetryOnNode(
node,
func() error {
@@ -1513,8 +1517,7 @@ func updateDocViaColSave(
nodeIndex int,
collection client.Collection,
) error {
- identity := getIdentity(s, nodeIndex, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
+ ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeIndex)
doc, err := collection.Get(ctx, s.docIDs[action.CollectionID][action.DocID], true)
if err != nil {
@@ -1534,8 +1537,7 @@ func updateDocViaColUpdate(
nodeIndex int,
collection client.Collection,
) error {
- identity := getIdentity(s, nodeIndex, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
+ ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeIndex)
doc, err := collection.Get(ctx, s.docIDs[action.CollectionID][action.DocID], true)
if err != nil {
@@ -1571,8 +1573,7 @@ func updateDocViaGQL(
input,
)
- identity := getIdentity(s, nodeIndex, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
+ ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeIndex)
result := node.ExecRequest(ctx, request)
if len(result.GQL.Errors) > 0 {
@@ -1590,8 +1591,7 @@ func updateWithFilter(s *state, action UpdateWithFilter) {
for index, node := range nodes {
nodeID := nodeIDs[index]
collection := s.collections[nodeID][action.CollectionID]
- identity := getIdentity(s, nodeID, action.Identity)
- ctx := db.SetContextIdentity(s.ctx, identity)
+ ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeID)
err := withRetryOnNode(
node,
func() error {
@@ -1832,9 +1832,7 @@ func executeRequest(
nodeID := nodeIDs[index]
txn := getTransaction(s, node, action.TransactionID, action.ExpectedError)
- ctx := db.SetContextTxn(s.ctx, txn)
- identity := getIdentity(s, nodeID, action.Identity)
- ctx = db.SetContextIdentity(ctx, identity)
+ ctx := getContextWithIdentity(db.SetContextTxn(s.ctx, txn), s, action.Identity, nodeID)
var options []client.RequestOption
if action.OperationName.HasValue() {
@@ -2316,10 +2314,10 @@ func skipIfClientTypeUnsupported(
return filteredClients
}
-func skipIfACPTypeUnsupported(t testing.TB, supporteACPTypes immutable.Option[[]ACPType]) {
- if supporteACPTypes.HasValue() {
+func skipIfACPTypeUnsupported(t testing.TB, supportedACPTypes immutable.Option[[]ACPType]) {
+ if supportedACPTypes.HasValue() {
var isTypeSupported bool
- for _, supportedType := range supporteACPTypes.Value() {
+ for _, supportedType := range supportedACPTypes.Value() {
if supportedType == acpType {
isTypeSupported = true
break
@@ -2335,13 +2333,13 @@ func skipIfACPTypeUnsupported(t testing.TB, supporteACPTypes immutable.Option[[]
func skipIfDatabaseTypeUnsupported(
t testing.TB,
databases []DatabaseType,
- supporteDatabaseTypes immutable.Option[[]DatabaseType],
+ supportedDatabaseTypes immutable.Option[[]DatabaseType],
) []DatabaseType {
- if !supporteDatabaseTypes.HasValue() {
+ if !supportedDatabaseTypes.HasValue() {
return databases
}
filteredDatabases := []DatabaseType{}
- for _, supportedType := range supporteDatabaseTypes.Value() {
+ for _, supportedType := range supportedDatabaseTypes.Value() {
for _, database := range databases {
if supportedType == database {
filteredDatabases = append(filteredDatabases, database)
@@ -2425,3 +2423,16 @@ func parseCreateDocs(action CreateDoc, collection client.Collection) ([]*client.
return []*client.Document{val}, nil
}
}
+
+func performGetNodeIdentityAction(s *state, action GetNodeIdentity) {
+ if action.NodeID >= len(s.nodes) {
+ s.t.Fatalf("invalid nodeID: %v", action.NodeID)
+ }
+
+ actualIdent, err := s.nodes[action.NodeID].GetNodeIdentity(s.ctx)
+ require.NoError(s.t, err, s.testCase.Description)
+
+ expectedIdent := getIdentity(s, action.ExpectedIdentity)
+ expectedRawIdent := immutable.Some(expectedIdent.IntoRawIdentity().Public())
+ require.Equal(s.t, expectedRawIdent, actualIdent, "raw identity at %d mismatch", action.NodeID)
+}
From 12669e4855a490b2b329191f1f7b157bc0e001f7 Mon Sep 17 00:00:00 2001
From: Fred Carle
Date: Fri, 25 Oct 2024 17:25:52 -0400
Subject: [PATCH 08/47] fix: Add Authorization header to CORS allowed headers
(#3178)
## Relevant issue(s)
Resolves #3177
## Description
This PR adds the Authorization header to the CORS list of allowed
headers. This bug what flagged from a partner trying to use
authorization from a browser app.
---
http/middleware.go | 2 +-
http/server_test.go | 2 ++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/http/middleware.go b/http/middleware.go
index cc98473711..b57325bb15 100644
--- a/http/middleware.go
+++ b/http/middleware.go
@@ -36,7 +36,7 @@ func CorsMiddleware(allowedOrigins []string) func(http.Handler) http.Handler {
return slices.Contains(allowedOrigins, strings.ToLower(origin))
},
AllowedMethods: []string{"GET", "HEAD", "POST", "PATCH", "DELETE"},
- AllowedHeaders: []string{"Content-Type"},
+ AllowedHeaders: []string{"Content-Type", "Authorization"},
MaxAge: 300,
})
}
diff --git a/http/server_test.go b/http/server_test.go
index ec9ab8ab75..e75b1f12ca 100644
--- a/http/server_test.go
+++ b/http/server_test.go
@@ -183,6 +183,8 @@ func TestServerListenAndServeWithAllowedOrigins(t *testing.T) {
req, err := http.NewRequest(http.MethodOptions, "http://127.0.0.1:30001", nil)
require.NoError(t, err)
req.Header.Add("origin", "localhost")
+ req.Header.Add("Access-Control-Request-Method", "POST")
+ req.Header.Add("Access-Control-Request-Headers", "Authorization, Content-Type")
res, err := http.DefaultClient.Do(req)
require.NoError(t, err)
From fee0d7ee92b6bf978322fe18180a9caf4331f370 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Mon, 28 Oct 2024 10:20:46 -0400
Subject: [PATCH 09/47] test: Allow soft-referencing of Cids in tests (#3176)
## Relevant issue(s)
Resolves #3172
## Description
Allows soft-referencing of Cids in tests.
When encountering an unpleasant change-detector problem in
https://github.com/sourcenetwork/defradb/pull/3173 I realised we had no
need to reference Cids based on index, and we really only ever cared
that they were valid, and either not the same as another cid, or the
same.
This change allows us to soft-reference them without ever really caring
about where they came from. This allows us to give them descriptive ids,
and avoids having to have the test framework from having to fetch them
independently from the DB - something that raises questions over whether
we are testing the test framework or the production code, especially in
the change detector.
The system can be extended to other types fairly easily.
## How has this been tested?
I had a fairly long play around giving it bad ids in tests to make sure
it would actually fail when appropriate.
---
.../integration/query/commits/simple_test.go | 26 +++----
.../query/commits/with_delete_test.go | 7 ++
tests/integration/results.go | 77 +++++++++++++++++--
tests/integration/state.go | 4 +
tests/integration/utils.go | 17 ++--
5 files changed, 106 insertions(+), 25 deletions(-)
diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go
index ca8c1e51b5..3660efdb87 100644
--- a/tests/integration/query/commits/simple_test.go
+++ b/tests/integration/query/commits/simple_test.go
@@ -37,13 +37,13 @@ func TestQueryCommits(t *testing.T) {
Results: map[string]any{
"commits": []map[string]any{
{
- "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
+ "cid": testUtils.NewUniqueCid("name"),
},
{
- "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy",
+ "cid": testUtils.NewUniqueCid("age"),
},
{
- "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy",
+ "cid": testUtils.NewUniqueCid("head"),
},
},
},
@@ -364,7 +364,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
Results: map[string]any{
"commits": []map[string]any{
{
- "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu",
+ "cid": testUtils.NewUniqueCid("age update"),
"collectionID": int64(1),
"delta": testUtils.CBORValue(22),
"docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3",
@@ -373,13 +373,13 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
"height": int64(2),
"links": []map[string]any{
{
- "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
+ "cid": testUtils.NewUniqueCid("age create"),
"name": "_head",
},
},
},
{
- "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
+ "cid": testUtils.NewUniqueCid("age create"),
"collectionID": int64(1),
"delta": testUtils.CBORValue(21),
"docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3",
@@ -389,7 +389,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
"links": []map[string]any{},
},
{
- "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy",
+ "cid": testUtils.NewUniqueCid("name create"),
"collectionID": int64(1),
"delta": testUtils.CBORValue("John"),
"docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3",
@@ -399,7 +399,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
"links": []map[string]any{},
},
{
- "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4",
+ "cid": testUtils.NewUniqueCid("update composite"),
"collectionID": int64(1),
"delta": nil,
"docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3",
@@ -408,17 +408,17 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
"height": int64(2),
"links": []map[string]any{
{
- "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy",
+ "cid": testUtils.NewUniqueCid("create composite"),
"name": "_head",
},
{
- "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu",
+ "cid": testUtils.NewUniqueCid("age update"),
"name": "age",
},
},
},
{
- "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy",
+ "cid": testUtils.NewUniqueCid("create composite"),
"collectionID": int64(1),
"delta": nil,
"docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3",
@@ -427,11 +427,11 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) {
"height": int64(1),
"links": []map[string]any{
{
- "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
+ "cid": testUtils.NewUniqueCid("age create"),
"name": "age",
},
{
- "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy",
+ "cid": testUtils.NewUniqueCid("name create"),
"name": "name",
},
},
diff --git a/tests/integration/query/commits/with_delete_test.go b/tests/integration/query/commits/with_delete_test.go
index cd52f5d861..91b1125d6c 100644
--- a/tests/integration/query/commits/with_delete_test.go
+++ b/tests/integration/query/commits/with_delete_test.go
@@ -40,8 +40,10 @@ func TestQueryCommits_AfterDocDeletion_ShouldStillFetch(t *testing.T) {
Request: `
query {
commits(fieldId: "C") {
+ cid
fieldName
links {
+ cid
name
}
}
@@ -50,20 +52,25 @@ func TestQueryCommits_AfterDocDeletion_ShouldStillFetch(t *testing.T) {
Results: map[string]any{
"commits": []map[string]any{
{
+ "cid": testUtils.NewUniqueCid("delete"),
"fieldName": nil,
"links": []map[string]any{
{
+ "cid": testUtils.NewUniqueCid("create composite"),
"name": "_head",
},
},
},
{
+ "cid": testUtils.NewUniqueCid("create composite"),
"fieldName": nil,
"links": []map[string]any{
{
+ "cid": testUtils.NewUniqueCid("create age"),
"name": "age",
},
{
+ "cid": testUtils.NewUniqueCid("create name"),
"name": "name",
},
},
diff --git a/tests/integration/results.go b/tests/integration/results.go
index cc54565fe8..23435a3807 100644
--- a/tests/integration/results.go
+++ b/tests/integration/results.go
@@ -16,6 +16,7 @@ import (
"testing"
"time"
+ "github.com/ipfs/go-cid"
"github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -23,22 +24,37 @@ import (
"github.com/sourcenetwork/defradb/client"
)
+// Validator instances can be substituted in place of concrete values
+// and will be asserted on using their [Validate] function instead of
+// asserting direct equality.
+//
+// They may mutate test state.
+//
+// Todo: This does not currently support chaining/nesting of Validators,
+// although we would like that long term:
+// https://github.com/sourcenetwork/defradb/issues/3189
+type Validator interface {
+ Validate(s *state, actualValue any, msgAndArgs ...any)
+}
+
// AnyOf may be used as `Results` field where the value may
// be one of several values, yet the value of that field must be the same
// across all nodes due to strong eventual consistency.
type AnyOf []any
-// assertResultsAnyOf asserts that actual result is equal to at least one of the expected results.
+var _ Validator = (AnyOf)(nil)
+
+// Validate asserts that actual result is equal to at least one of the expected results.
//
// The comparison is relaxed when using client types other than goClientType.
-func assertResultsAnyOf(t testing.TB, client ClientType, expected AnyOf, actual any, msgAndArgs ...any) {
- switch client {
+func (a AnyOf) Validate(s *state, actualValue any, msgAndArgs ...any) {
+ switch s.clientType {
case HTTPClientType, CLIClientType:
- if !areResultsAnyOf(expected, actual) {
- assert.Contains(t, expected, actual, msgAndArgs...)
+ if !areResultsAnyOf(a, actualValue) {
+ assert.Contains(s.t, a, actualValue, msgAndArgs...)
}
default:
- assert.Contains(t, expected, actual, msgAndArgs...)
+ assert.Contains(s.t, a, actualValue, msgAndArgs...)
}
}
@@ -68,6 +84,55 @@ func areResultsAnyOf(expected AnyOf, actual any) bool {
return false
}
+// UniqueCid allows the referencing of Cids by an arbitrary test-defined ID.
+//
+// Instead of asserting on a specific Cid value, this type will assert that
+// no other [UniqueCid]s with different [ID]s has the first Cid value that this instance
+// describes.
+//
+// It will also ensure that all Cids described by this [UniqueCid] have the same
+// valid, Cid value.
+type UniqueCid struct {
+ // ID is the arbitrary, but hopefully descriptive, id of this [UniqueCid].
+ ID any
+}
+
+var _ Validator = (*UniqueCid)(nil)
+
+// NewUniqueCid creates a new [UniqueCid] of the given arbitrary, but hopefully descriptive,
+// id.
+//
+// All results described by [UniqueCid]s with the given id must have the same valid Cid value.
+// No other [UniqueCid] ids may describe the same Cid value.
+func NewUniqueCid(id any) *UniqueCid {
+ return &UniqueCid{
+ ID: id,
+ }
+}
+
+func (ucid *UniqueCid) Validate(s *state, actualValue any, msgAndArgs ...any) {
+ isNew := true
+ for id, value := range s.cids {
+ if id == ucid.ID {
+ require.Equal(s.t, value, actualValue)
+ isNew = false
+ } else {
+ require.NotEqual(s.t, value, actualValue, "UniqueCid must be unique!", msgAndArgs)
+ }
+ }
+
+ if isNew {
+ require.IsType(s.t, "", actualValue)
+
+ cid, err := cid.Decode(actualValue.(string))
+ if err != nil {
+ require.NoError(s.t, err)
+ }
+
+ s.cids[ucid.ID] = cid.String()
+ }
+}
+
// areResultsEqual returns true if the expected and actual results are of equal value.
//
// Values of type json.Number and immutable.Option will be reduced to their underlying types.
diff --git a/tests/integration/state.go b/tests/integration/state.go
index 77fe2e52cd..b4a3777d03 100644
--- a/tests/integration/state.go
+++ b/tests/integration/state.go
@@ -181,6 +181,9 @@ type state struct {
// nodes.
docIDs [][]client.DocID
+ // Valid Cid string values by [UniqueCid] ID.
+ cids map[any]string
+
// Indexes, by index, by collection index, by node index.
indexes [][][]client.IndexDescription
@@ -225,6 +228,7 @@ func newState(
collectionNames: collectionNames,
collectionIndexesByRoot: map[uint32]int{},
docIDs: [][]client.DocID{},
+ cids: map[any]string{},
indexes: [][][]client.IndexDescription{},
isBench: false,
}
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index 3bf34d1138..aff1ebecb7 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -2020,8 +2020,10 @@ func assertRequestResults(
actualDocs,
stack,
)
- case AnyOf:
- assertResultsAnyOf(s.t, s.clientType, exp, actual)
+
+ case Validator:
+ exp.Validate(s, actual)
+
default:
assertResultsEqual(
s.t,
@@ -2065,9 +2067,12 @@ func assertRequestResultDocs(
for field, actualValue := range actualDoc {
stack.pushMap(field)
+ pathInfo := fmt.Sprintf("node: %v, path: %s", nodeID, stack)
+
switch expectedValue := expectedDoc[field].(type) {
- case AnyOf:
- assertResultsAnyOf(s.t, s.clientType, expectedValue, actualValue)
+ case Validator:
+ expectedValue.Validate(s, actualValue, pathInfo)
+
case DocIndex:
expectedDocID := s.docIDs[expectedValue.CollectionIndex][expectedValue.Index].String()
assertResultsEqual(
@@ -2075,7 +2080,7 @@ func assertRequestResultDocs(
s.clientType,
expectedDocID,
actualValue,
- fmt.Sprintf("node: %v, path: %s", nodeID, stack),
+ pathInfo,
)
case []map[string]any:
actualValueMap := ConvertToArrayOfMaps(s.t, actualValue)
@@ -2094,7 +2099,7 @@ func assertRequestResultDocs(
s.clientType,
expectedValue,
actualValue,
- fmt.Sprintf("node: %v, path: %s", nodeID, stack),
+ pathInfo,
)
}
stack.pop()
From 3a3baac8146fcf4f2076fe560540c890f47db037 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 28 Oct 2024 12:37:21 -0400
Subject: [PATCH 10/47] bot: Update dependencies (bulk dependabot PRs)
2024-10-28 (#3188)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3187 bot: Bump vite from 5.4.9 to 5.4.10 in /playground
#3186 bot: Bump @types/react from 18.3.11 to 18.3.12 in /playground
#3185 bot: Bump eslint-plugin-react-refresh from 0.4.13 to 0.4.14 in
/playground
#3182 bot: Bump github.com/ipfs/boxo from 0.24.0 to 0.24.2
#3181 bot: Bump github.com/zalando/go-keyring from 0.2.5 to 0.2.6
#3184 bot: Bump @typescript-eslint/parser from 8.10.0 to 8.11.0 in
/playground
#3183 bot: Bump @typescript-eslint/eslint-plugin from 8.10.0 to 8.11.0
in /playground
#3180 bot: Bump github.com/lestrrat-go/jwx/v2 from 2.1.1 to 2.1.2
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Fred Carle
---
Makefile | 2 +-
go.mod | 60 +++---
go.sum | 118 +++++------
playground/package-lock.json | 376 ++++++-----------------------------
playground/package.json | 10 +-
5 files changed, 161 insertions(+), 405 deletions(-)
diff --git a/Makefile b/Makefile
index 2010e3e8c7..d82b926815 100644
--- a/Makefile
+++ b/Makefile
@@ -207,7 +207,7 @@ verify:
.PHONY: tidy
tidy:
- go mod tidy -go=1.22
+ go mod tidy
.PHONY: clean
clean:
diff --git a/go.mod b/go.mod
index 805c27b80c..7b3bcff06b 100644
--- a/go.mod
+++ b/go.mod
@@ -1,8 +1,8 @@
module github.com/sourcenetwork/defradb
-go 1.22
+go 1.22.0
-toolchain go1.22.0
+toolchain go1.22.7
require (
github.com/bits-and-blooms/bitset v1.14.3
@@ -19,7 +19,7 @@ require (
github.com/go-errors/errors v1.5.1
github.com/gofrs/uuid/v5 v5.3.0
github.com/iancoleman/strcase v0.3.0
- github.com/ipfs/boxo v0.24.0
+ github.com/ipfs/boxo v0.24.2
github.com/ipfs/go-block-format v0.2.0
github.com/ipfs/go-cid v0.4.1
github.com/ipfs/go-datastore v0.6.0
@@ -31,8 +31,8 @@ require (
github.com/jbenet/goprocess v0.1.4
github.com/joho/godotenv v1.5.1
github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c
- github.com/lestrrat-go/jwx/v2 v2.1.1
- github.com/libp2p/go-libp2p v0.36.4
+ github.com/lestrrat-go/jwx/v2 v2.1.2
+ github.com/libp2p/go-libp2p v0.37.0
github.com/libp2p/go-libp2p-gostream v0.6.0
github.com/libp2p/go-libp2p-kad-dht v0.27.0
github.com/libp2p/go-libp2p-pubsub v0.12.0
@@ -58,16 +58,17 @@ require (
github.com/tidwall/btree v1.7.0
github.com/valyala/fastjson v1.6.4
github.com/vito/go-sse v1.1.2
- github.com/zalando/go-keyring v0.2.5
+ github.com/zalando/go-keyring v0.2.6
go.opentelemetry.io/otel/metric v1.31.0
go.opentelemetry.io/otel/sdk/metric v1.31.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.28.0
- golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa
+ golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c
google.golang.org/grpc v1.67.1
)
require (
+ al.essio.dev/pkg/shellescape v1.5.1 // indirect
buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 // indirect
cloud.google.com/go v0.112.1 // indirect
cloud.google.com/go/compute/metadata v0.5.0 // indirect
@@ -94,7 +95,6 @@ require (
github.com/Jorropo/jsync v1.0.1 // indirect
github.com/NathanBaulch/protoc-gen-cobra v1.2.1 // indirect
github.com/TBD54566975/ssi-sdk v0.0.4-alpha // indirect
- github.com/alessio/shellescape v1.4.1 // indirect
github.com/awalterschulze/gographviz v2.0.3+incompatible // indirect
github.com/aws/aws-sdk-go v1.44.224 // indirect
github.com/benbjohnson/clock v1.3.5 // indirect
@@ -134,7 +134,7 @@ require (
github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect
github.com/cskr/pubsub v1.0.2 // indirect
- github.com/danieljoos/wincred v1.2.0 // indirect
+ github.com/danieljoos/wincred v1.2.2 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c // indirect
github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect
@@ -180,7 +180,7 @@ require (
github.com/google/go-cmp v0.6.0 // indirect
github.com/google/gopacket v1.1.19 // indirect
github.com/google/orderedcode v0.0.1 // indirect
- github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 // indirect
+ github.com/google/pprof v0.0.0-20241017200806-017d972448fc // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
@@ -233,7 +233,7 @@ require (
github.com/jorrizza/ed2curve25519 v0.1.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 // indirect
- github.com/klauspost/compress v1.17.9 // indirect
+ github.com/klauspost/compress v1.17.11 // indirect
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
github.com/koron/go-ssdp v0.0.4 // indirect
github.com/kr/pretty v0.3.1 // indirect
@@ -247,7 +247,7 @@ require (
github.com/lib/pq v1.10.9 // indirect
github.com/libp2p/go-buffer-pool v0.1.0 // indirect
github.com/libp2p/go-cidranger v1.1.0 // indirect
- github.com/libp2p/go-flow-metrics v0.1.0 // indirect
+ github.com/libp2p/go-flow-metrics v0.2.0 // indirect
github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect
github.com/libp2p/go-libp2p-kbucket v0.6.4 // indirect
github.com/libp2p/go-libp2p-routing-helpers v0.7.4 // indirect
@@ -283,17 +283,17 @@ require (
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect
github.com/oklog/run v1.1.0 // indirect
- github.com/onsi/ginkgo/v2 v2.20.0 // indirect
+ github.com/onsi/ginkgo/v2 v2.20.2 // indirect
github.com/opencontainers/runtime-spec v1.2.0 // indirect
github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/petermattis/goid v0.0.0-20231207134359-e60b3f734c67 // indirect
- github.com/pion/datachannel v1.5.8 // indirect
+ github.com/pion/datachannel v1.5.9 // indirect
github.com/pion/dtls/v2 v2.2.12 // indirect
- github.com/pion/ice/v2 v2.3.34 // indirect
- github.com/pion/interceptor v0.1.30 // indirect
+ github.com/pion/ice/v2 v2.3.36 // indirect
+ github.com/pion/interceptor v0.1.37 // indirect
github.com/pion/logging v0.2.2 // indirect
github.com/pion/mdns v0.0.12 // indirect
github.com/pion/randutil v0.1.0 // indirect
@@ -305,18 +305,18 @@ require (
github.com/pion/stun v0.6.1 // indirect
github.com/pion/transport/v2 v2.2.10 // indirect
github.com/pion/turn/v2 v2.1.6 // indirect
- github.com/pion/webrtc/v3 v3.3.0 // indirect
+ github.com/pion/webrtc/v3 v3.3.4 // indirect
github.com/piprate/json-gold v0.5.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/polydawn/refmt v0.89.0 // indirect
github.com/pquerna/cachecontrol v0.1.0 // indirect
- github.com/prometheus/client_golang v1.20.1 // indirect
+ github.com/prometheus/client_golang v1.20.5 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
- github.com/prometheus/common v0.55.0 // indirect
+ github.com/prometheus/common v0.60.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
- github.com/quic-go/qpack v0.4.0 // indirect
- github.com/quic-go/quic-go v0.46.0 // indirect
- github.com/quic-go/webtransport-go v0.8.0 // indirect
+ github.com/quic-go/qpack v0.5.1 // indirect
+ github.com/quic-go/quic-go v0.48.1 // indirect
+ github.com/quic-go/webtransport-go v0.8.1-0.20241018022711-4ac2c9250e66 // indirect
github.com/raulk/go-watchdog v1.3.0 // indirect
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect
github.com/rogpeppe/go-internal v1.12.0 // indirect
@@ -344,7 +344,7 @@ require (
github.com/ulikunitz/xz v0.5.11 // indirect
github.com/wasmerio/wasmer-go v1.0.4 // indirect
github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect
- github.com/wlynxg/anet v0.0.4 // indirect
+ github.com/wlynxg/anet v0.0.5 // indirect
github.com/x448/float16 v0.8.4 // indirect
github.com/zondax/hid v0.9.2 // indirect
github.com/zondax/ledger-go v0.14.3 // indirect
@@ -356,24 +356,24 @@ require (
go.opentelemetry.io/otel/sdk v1.31.0 // indirect
go.opentelemetry.io/otel/trace v1.31.0 // indirect
go.uber.org/dig v1.18.0 // indirect
- go.uber.org/fx v1.22.2 // indirect
- go.uber.org/mock v0.4.0 // indirect
+ go.uber.org/fx v1.23.0 // indirect
+ go.uber.org/mock v0.5.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
- golang.org/x/mod v0.20.0 // indirect
- golang.org/x/net v0.28.0 // indirect
- golang.org/x/oauth2 v0.22.0 // indirect
+ golang.org/x/mod v0.21.0 // indirect
+ golang.org/x/net v0.30.0 // indirect
+ golang.org/x/oauth2 v0.23.0 // indirect
golang.org/x/sync v0.8.0 // indirect
golang.org/x/sys v0.26.0 // indirect
golang.org/x/term v0.25.0 // indirect
golang.org/x/text v0.19.0 // indirect
golang.org/x/time v0.5.0 // indirect
- golang.org/x/tools v0.24.0 // indirect
+ golang.org/x/tools v0.26.0 // indirect
gonum.org/v1/gonum v0.15.0 // indirect
google.golang.org/api v0.171.0 // indirect
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142 // indirect
- google.golang.org/protobuf v1.34.2 // indirect
+ google.golang.org/protobuf v1.35.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
diff --git a/go.sum b/go.sum
index 99786417bc..39dbdab85d 100644
--- a/go.sum
+++ b/go.sum
@@ -1,3 +1,5 @@
+al.essio.dev/pkg/shellescape v1.5.1 h1:86HrALUujYS/h+GtqoB26SBEdkWfmMI6FubjXlsXyho=
+al.essio.dev/pkg/shellescape v1.5.1/go.mod h1:6sIqp7X2P6mThCQ7twERpZTuigpr6KbZWtls1U8I890=
buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 h1:tdpHgTbmbvEIARu+bixzmleMi14+3imnpoFXz+Qzjp4=
buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1/go.mod h1:xafc+XIsTxTy76GJQ1TKgvJWsSugFBqMaN27WhUblew=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
@@ -262,8 +264,6 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs=
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
-github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
-github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
@@ -427,8 +427,8 @@ github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0=
github.com/cskr/pubsub v1.0.2/go.mod h1:/8MzYXk/NJAz782G8RPkFzXTZVu63VotefPnR9TIRis=
github.com/cyware/ssi-sdk v0.0.0-20231229164914-f93f3006379f h1:72bD8UUtmnis7LACaaurCYx3UKVdMZ2vSEent7HNMg4=
github.com/cyware/ssi-sdk v0.0.0-20231229164914-f93f3006379f/go.mod h1:fXZNsGp0JHlOW4XyY3SQk1dy6D2I0HD+aiHY3Ku0el8=
-github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
-github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
+github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
+github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
@@ -702,11 +702,13 @@ github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLe
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
-github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 h1:FKHo8hFI3A+7w0aUQuYXQ+6EN5stWmeY/AZqtM8xk9k=
-github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8/go.mod h1:K1liHPHnj73Fdn/EKuT8nrFqBihUSKXoLYU0BuatOYo=
+github.com/google/pprof v0.0.0-20241017200806-017d972448fc h1:NGyrhhFhwvRAZg02jnYVg3GBQy0qGBKmFQJwaPmpmxs=
+github.com/google/pprof v0.0.0-20241017200806-017d972448fc/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
+github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
+github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -854,8 +856,8 @@ github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso=
github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA=
github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs=
github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0=
-github.com/ipfs/boxo v0.24.0 h1:D9gTU3QdxyjPMlJ6QfqhHTG3TIJPplKzjXLO2J30h9U=
-github.com/ipfs/boxo v0.24.0/go.mod h1:iP7xUPpHq2QAmVAjwtQvsNBTxTwLpFuy6ZpiRFwmzDA=
+github.com/ipfs/boxo v0.24.2 h1:feLM6DY6CNI0uSG3TvP/Hv4PdM/fsekjqSCqKtifF0E=
+github.com/ipfs/boxo v0.24.2/go.mod h1:Dt3TJjMZtF2QksMv2LC8pQlG9VQUiSV2DsHQzvDiroo=
github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA=
github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU=
github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs=
@@ -948,8 +950,8 @@ github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYs
github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM=
-github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
-github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
+github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
+github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -981,8 +983,8 @@ github.com/lestrrat-go/httprc v1.0.6 h1:qgmgIRhpvBqexMJjA/PmwSvhNk679oqD1RbovdCG
github.com/lestrrat-go/httprc v1.0.6/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo=
github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI=
github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4=
-github.com/lestrrat-go/jwx/v2 v2.1.1 h1:Y2ltVl8J6izLYFs54BVcpXLv5msSW4o8eXwnzZLI32E=
-github.com/lestrrat-go/jwx/v2 v2.1.1/go.mod h1:4LvZg7oxu6Q5VJwn7Mk/UwooNRnTHUpXBj2C4j3HNx0=
+github.com/lestrrat-go/jwx/v2 v2.1.2 h1:6poete4MPsO8+LAEVhpdrNI4Xp2xdiafgl2RD89moBc=
+github.com/lestrrat-go/jwx/v2 v2.1.2/go.mod h1:pO+Gz9whn7MPdbsqSJzG8TlEpMZCwQDXnFJ+zsUVh8Y=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
@@ -991,10 +993,10 @@ github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6
github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg=
github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38yPW7c=
github.com/libp2p/go-cidranger v1.1.0/go.mod h1:KWZTfSr+r9qEo9OkI9/SIEeAtw+NNoU0dXIXt15Okic=
-github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM=
-github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro=
-github.com/libp2p/go-libp2p v0.36.4 h1:ZaKyKSHBFbzs6CnAYMhaMc5QgV1UoCN+9WXrg8SEwI4=
-github.com/libp2p/go-libp2p v0.36.4/go.mod h1:4Y5vFyCUiJuluEPmpnKYf6WFx5ViKPUYs/ixe9ANFZ8=
+github.com/libp2p/go-flow-metrics v0.2.0 h1:EIZzjmeOE6c8Dav0sNv35vhZxATIXWZg6j/C08XmmDw=
+github.com/libp2p/go-flow-metrics v0.2.0/go.mod h1:st3qqfu8+pMfh+9Mzqb2GTiwrAGjIPszEjZmtksN8Jc=
+github.com/libp2p/go-libp2p v0.37.0 h1:8K3mcZgwTldydMCNOiNi/ZJrOB9BY+GlI3UxYzxBi9A=
+github.com/libp2p/go-libp2p v0.37.0/go.mod h1:GOKmSN99scDuYGTwaTbQPR8Nt6dxrK3ue7OjW2NGDg4=
github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl950SO9L6n94=
github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8=
github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU=
@@ -1157,8 +1159,8 @@ github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vv
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
-github.com/onsi/ginkgo/v2 v2.20.0 h1:PE84V2mHqoT1sglvHc8ZdQtPcwmvvt29WLEEO3xmdZw=
-github.com/onsi/ginkgo/v2 v2.20.0/go.mod h1:lG9ey2Z29hR41WMVthyJBGUBcBhGOtoPF2VFMvBXFCI=
+github.com/onsi/ginkgo/v2 v2.20.2 h1:7NVCeyIWROIAheY21RLS+3j2bb52W0W82tkberYytp4=
+github.com/onsi/ginkgo/v2 v2.20.2/go.mod h1:K9gyxPIlb+aIvnZ8bd9Ak+YP18w3APlR+5coaZoE2ag=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
@@ -1212,15 +1214,15 @@ github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
-github.com/pion/datachannel v1.5.8 h1:ph1P1NsGkazkjrvyMfhRBUAWMxugJjq2HfQifaOoSNo=
-github.com/pion/datachannel v1.5.8/go.mod h1:PgmdpoaNBLX9HNzNClmdki4DYW5JtI7Yibu8QzbL3tI=
+github.com/pion/datachannel v1.5.9 h1:LpIWAOYPyDrXtU+BW7X0Yt/vGtYxtXQ8ql7dFfYUVZA=
+github.com/pion/datachannel v1.5.9/go.mod h1:kDUuk4CU4Uxp82NH4LQZbISULkX/HtzKa4P7ldf9izE=
github.com/pion/dtls/v2 v2.2.7/go.mod h1:8WiMkebSHFD0T+dIU+UeBaoV7kDhOW5oDCzZ7WZ/F9s=
github.com/pion/dtls/v2 v2.2.12 h1:KP7H5/c1EiVAAKUmXyCzPiQe5+bCJrpOeKg/L05dunk=
github.com/pion/dtls/v2 v2.2.12/go.mod h1:d9SYc9fch0CqK90mRk1dC7AkzzpwJj6u2GU3u+9pqFE=
-github.com/pion/ice/v2 v2.3.34 h1:Ic1ppYCj4tUOcPAp76U6F3fVrlSw8A9JtRXLqw6BbUM=
-github.com/pion/ice/v2 v2.3.34/go.mod h1:mBF7lnigdqgtB+YHkaY/Y6s6tsyRyo4u4rPGRuOjUBQ=
-github.com/pion/interceptor v0.1.30 h1:au5rlVHsgmxNi+v/mjOPazbW1SHzfx7/hYOEYQnUcxA=
-github.com/pion/interceptor v0.1.30/go.mod h1:RQuKT5HTdkP2Fi0cuOS5G5WNymTjzXaGF75J4k7z2nc=
+github.com/pion/ice/v2 v2.3.36 h1:SopeXiVbbcooUg2EIR8sq4b13RQ8gzrkkldOVg+bBsc=
+github.com/pion/ice/v2 v2.3.36/go.mod h1:mBF7lnigdqgtB+YHkaY/Y6s6tsyRyo4u4rPGRuOjUBQ=
+github.com/pion/interceptor v0.1.37 h1:aRA8Zpab/wE7/c0O3fh1PqY0AJI3fCSEM5lRWJVorwI=
+github.com/pion/interceptor v0.1.37/go.mod h1:JzxbJ4umVTlZAf+/utHzNesY8tmRkM2lVmkS82TTj8Y=
github.com/pion/logging v0.2.2 h1:M9+AIj/+pxNsDfAT64+MAVgJO0rsyLnoJKCqf//DoeY=
github.com/pion/logging v0.2.2/go.mod h1:k0/tDVsRCX2Mb2ZEmTqNa7CWsQPc+YYCB7Q+5pahoms=
github.com/pion/mdns v0.0.12 h1:CiMYlY+O0azojWDmxdNr7ADGrnZ+V6Ilfner+6mSVK8=
@@ -1252,8 +1254,8 @@ github.com/pion/transport/v3 v3.0.7/go.mod h1:YleKiTZ4vqNxVwh77Z0zytYi7rXHl7j6uP
github.com/pion/turn/v2 v2.1.3/go.mod h1:huEpByKKHix2/b9kmTAM3YoX6MKP+/D//0ClgUYR2fY=
github.com/pion/turn/v2 v2.1.6 h1:Xr2niVsiPTB0FPtt+yAWKFUkU1eotQbGgpTIld4x1Gc=
github.com/pion/turn/v2 v2.1.6/go.mod h1:huEpByKKHix2/b9kmTAM3YoX6MKP+/D//0ClgUYR2fY=
-github.com/pion/webrtc/v3 v3.3.0 h1:Rf4u6n6U5t5sUxhYPQk/samzU/oDv7jk6BA5hyO2F9I=
-github.com/pion/webrtc/v3 v3.3.0/go.mod h1:hVmrDJvwhEertRWObeb1xzulzHGeVUoPlWvxdGzcfU0=
+github.com/pion/webrtc/v3 v3.3.4 h1:v2heQVnXTSqNRXcaFQVOhIOYkLMxOu1iJG8uy1djvkk=
+github.com/pion/webrtc/v3 v3.3.4/go.mod h1:liNa+E1iwyzyXqNUwvoMRNQ10x8h8FOeJKL8RkIbamE=
github.com/piprate/json-gold v0.5.0 h1:RmGh1PYboCFcchVFuh2pbSWAZy4XJaqTMU4KQYsApbM=
github.com/piprate/json-gold v0.5.0/go.mod h1:WZ501QQMbZZ+3pXFPhQKzNwS1+jls0oqov3uQ2WasLs=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
@@ -1277,8 +1279,8 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn
github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og=
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
-github.com/prometheus/client_golang v1.20.1 h1:IMJXHOD6eARkQpxo8KkhgEVFlBNm+nkrFUyGlIu7Na8=
-github.com/prometheus/client_golang v1.20.1/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
+github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
+github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
@@ -1294,8 +1296,8 @@ github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt2
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s=
-github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc=
-github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8=
+github.com/prometheus/common v0.60.0 h1:+V9PAREWNvJMAuJ1x1BaWl9dewMW4YrHZQbx0sJNllA=
+github.com/prometheus/common v0.60.0/go.mod h1:h0LYf1R1deLSKtD4Vdg8gy4RuOvENW2J/h19V5NADQw=
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
@@ -1305,12 +1307,12 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O
github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
-github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo=
-github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A=
-github.com/quic-go/quic-go v0.46.0 h1:uuwLClEEyk1DNvchH8uCByQVjo3yKL9opKulExNDs7Y=
-github.com/quic-go/quic-go v0.46.0/go.mod h1:1dLehS7TIR64+vxGR70GDcatWTOtMX2PUtnKsjbTurI=
-github.com/quic-go/webtransport-go v0.8.0 h1:HxSrwun11U+LlmwpgM1kEqIqH90IT4N8auv/cD7QFJg=
-github.com/quic-go/webtransport-go v0.8.0/go.mod h1:N99tjprW432Ut5ONql/aUhSLT0YVSlwHohQsuac9WaM=
+github.com/quic-go/qpack v0.5.1 h1:giqksBPnT/HDtZ6VhtFKgoLOWmlyo9Ei6u9PqzIMbhI=
+github.com/quic-go/qpack v0.5.1/go.mod h1:+PC4XFrEskIVkcLzpEkbLqq1uCoxPhQuvK5rH1ZgaEg=
+github.com/quic-go/quic-go v0.48.1 h1:y/8xmfWI9qmGTc+lBr4jKRUWLGSlSigv847ULJ4hYXA=
+github.com/quic-go/quic-go v0.48.1/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs=
+github.com/quic-go/webtransport-go v0.8.1-0.20241018022711-4ac2c9250e66 h1:4WFk6u3sOT6pLa1kQ50ZVdm8BQFgJNA117cepZxtLIg=
+github.com/quic-go/webtransport-go v0.8.1-0.20241018022711-4ac2c9250e66/go.mod h1:Vp72IJajgeOL6ddqrAhmp7IM9zbTcgkQxD/YdxrVwMw=
github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk=
github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
@@ -1498,8 +1500,8 @@ github.com/whyrusleeping/chunker v0.0.0-20181014151217-fe64bd25879f/go.mod h1:p9
github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdznlJHPMoKr0XTrX+IlJs1LH3lyx2nfr1dOlZ79k=
github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc=
github.com/wlynxg/anet v0.0.3/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA=
-github.com/wlynxg/anet v0.0.4 h1:0de1OFQxnNqAu+x2FAKKCVIrnfGKQbs7FQz++tB0+Uw=
-github.com/wlynxg/anet v0.0.4/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA=
+github.com/wlynxg/anet v0.0.5 h1:J3VJGi1gvo0JwZ/P1/Yc/8p63SoW98B5dHkYDmpgvvU=
+github.com/wlynxg/anet v0.0.5/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
@@ -1510,8 +1512,8 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-github.com/zalando/go-keyring v0.2.5 h1:Bc2HHpjALryKD62ppdEzaFG6VxL6Bc+5v0LYpN8Lba8=
-github.com/zalando/go-keyring v0.2.5/go.mod h1:HL4k+OXQfJUWaMnqyuSOc0drfGPX2b51Du6K+MRgZMk=
+github.com/zalando/go-keyring v0.2.6 h1:r7Yc3+H+Ux0+M72zacZoItR3UDxeWfKTcabvkI8ua9s=
+github.com/zalando/go-keyring v0.2.6/go.mod h1:2TCrxYrbUNYfNS/Kgy/LSrkSQzZ5UPVH85RwfczwvcI=
github.com/zondax/hid v0.9.2 h1:WCJFnEDMiqGF64nlZz28E9qLVZ0KSJ7xpc5DLEyma2U=
github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM=
github.com/zondax/ledger-go v0.14.3 h1:wEpJt2CEcBJ428md/5MgSLsXLBos98sBOyxNmCjfUCw=
@@ -1554,14 +1556,14 @@ go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/dig v1.18.0 h1:imUL1UiY0Mg4bqbFfsRQO5G4CGRBec/ZujWTvSVp3pw=
go.uber.org/dig v1.18.0/go.mod h1:Us0rSJiThwCv2GteUN0Q7OKvU7n5J4dxZ9JKUXozFdE=
-go.uber.org/fx v1.22.2 h1:iPW+OPxv0G8w75OemJ1RAnTUrF55zOJlXlo1TbJ0Buw=
-go.uber.org/fx v1.22.2/go.mod h1:o/D9n+2mLP6v1EG+qsdT1O8wKopYAsqZasju97SDFCU=
+go.uber.org/fx v1.23.0 h1:lIr/gYWQGfTwGcSXWXu4vP5Ws6iqnNEIY+F/aFzCKTg=
+go.uber.org/fx v1.23.0/go.mod h1:o/D9n+2mLP6v1EG+qsdT1O8wKopYAsqZasju97SDFCU=
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
-go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU=
-go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc=
+go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
+go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=
@@ -1612,8 +1614,8 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw=
-golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa h1:ELnwvuAXPNtPk1TJRuGkI9fDTwym6AYBu0qzT8AcHdI=
-golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ=
+golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY=
+golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -1642,8 +1644,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
-golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
+golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -1711,8 +1713,8 @@ golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
-golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
-golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
+golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
+golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -1740,8 +1742,8 @@ golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri
golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg=
golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg=
golang.org/x/oauth2 v0.1.0/go.mod h1:G9FE4dLTsbXUu90h/Pf85g4w1D+SSAgR+q46nJZ8M4A=
-golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
-golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
+golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -1967,8 +1969,8 @@ golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
-golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
+golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ=
+golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -1977,8 +1979,8 @@ golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8T
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
-golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
-golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
+golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
+golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
gonum.org/v1/gonum v0.15.0 h1:2lYxjRbTYyxkJxlhC+LvJIx3SsANPdRybu1tGj9/OrQ=
gonum.org/v1/gonum v0.15.0/go.mod h1:xzZVBJBtS+Mz4q0Yl2LJTk+OxOg4jiXZ7qBoM0uISGo=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
@@ -2221,8 +2223,8 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
-google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
-google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
+google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA=
+google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index ffe998c5f6..d986bf13eb 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -15,17 +15,17 @@
"swagger-ui-react": "^5.17.14"
},
"devDependencies": {
- "@types/react": "^18.3.11",
+ "@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.10.0",
- "@typescript-eslint/parser": "^8.10.0",
+ "@typescript-eslint/eslint-plugin": "^8.11.0",
+ "@typescript-eslint/parser": "^8.11.0",
"@vitejs/plugin-react-swc": "^3.7.1",
"eslint": "^9.13.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.13",
+ "eslint-plugin-react-refresh": "^0.4.14",
"typescript": "^5.6.3",
- "vite": "^5.4.9"
+ "vite": "^5.4.10"
}
},
"node_modules/@babel/runtime": {
@@ -2475,9 +2475,9 @@
}
},
"node_modules/@types/react": {
- "version": "18.3.11",
- "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.11.tgz",
- "integrity": "sha512-r6QZ069rFTjrEYgFdOck1gK7FLVsgJE7tTz0pQBczlBNUhBNk0MQH4UbnFSwjpQLMkLzgqvBBa+qGpLje16eTQ==",
+ "version": "18.3.12",
+ "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.12.tgz",
+ "integrity": "sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==",
"devOptional": true,
"dependencies": {
"@types/prop-types": "*",
@@ -2524,16 +2524,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.10.0.tgz",
- "integrity": "sha512-phuB3hoP7FFKbRXxjl+DRlQDuJqhpOnm5MmtROXyWi3uS/Xg2ZXqiQfcG2BJHiN4QKyzdOJi3NEn/qTnjUlkmQ==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.11.0.tgz",
+ "integrity": "sha512-KhGn2LjW1PJT2A/GfDpiyOfS4a8xHQv2myUagTM5+zsormOmBlYsnQ6pobJ8XxJmh6hnHwa2Mbe3fPrDJoDhbA==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.10.0",
- "@typescript-eslint/type-utils": "8.10.0",
- "@typescript-eslint/utils": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0",
+ "@typescript-eslint/scope-manager": "8.11.0",
+ "@typescript-eslint/type-utils": "8.11.0",
+ "@typescript-eslint/utils": "8.11.0",
+ "@typescript-eslint/visitor-keys": "8.11.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2556,63 +2557,17 @@
}
}
},
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz",
- "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
- "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
- "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/parser": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.10.0.tgz",
- "integrity": "sha512-E24l90SxuJhytWJ0pTQydFT46Nk0Z+bsLKo/L8rtQSL93rQ6byd1V/QbDpHUTdLPOMsBCcYXZweADNCfOCmOAg==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.11.0.tgz",
+ "integrity": "sha512-lmt73NeHdy1Q/2ul295Qy3uninSqi6wQI18XwSpm8w0ZbQXUpjCAWP1Vlv/obudoBiIjJVjlztjQ+d/Md98Yxg==",
"dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.10.0",
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/typescript-estree": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0",
+ "@typescript-eslint/scope-manager": "8.11.0",
+ "@typescript-eslint/types": "8.11.0",
+ "@typescript-eslint/typescript-estree": "8.11.0",
+ "@typescript-eslint/visitor-keys": "8.11.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2631,89 +2586,15 @@
}
}
},
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz",
- "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
- "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz",
- "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
- "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.1.tgz",
- "integrity": "sha512-X4JdU+66Mazev/J0gfXlcC/dV6JI37h+93W9BRYXrSn0hrE64IoWgVkO9MSJgEzoWkxONgaQpICWg8vAN74wlA==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.11.0.tgz",
+ "integrity": "sha512-Uholz7tWhXmA4r6epo+vaeV7yjdKy5QFCERMjs1kMVsLRKIrSdM6o21W2He9ftp5PP6aWOVpD5zvrvuHZC0bMQ==",
"dev": true,
+ "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.8.1",
- "@typescript-eslint/visitor-keys": "8.8.1"
+ "@typescript-eslint/types": "8.11.0",
+ "@typescript-eslint/visitor-keys": "8.11.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2724,55 +2605,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.10.0.tgz",
- "integrity": "sha512-PCpUOpyQSpxBn230yIcK+LeCQaXuxrgCm2Zk1S+PTIRJsEfU6nJ0TtwyH8pIwPK/vJoA+7TZtzyAJSGBz+s/dg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/typescript-estree": "8.10.0",
- "@typescript-eslint/utils": "8.10.0",
- "debug": "^4.3.4",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
- "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz",
- "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.11.0.tgz",
+ "integrity": "sha512-ItiMfJS6pQU0NIKAaybBKkuVzo6IdnAhPFZA/2Mba/uBjuPQPet/8+zh5GtLHwmuFRShZx+8lhIs7/QeDHflOg==",
"dev": true,
+ "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0",
+ "@typescript-eslint/typescript-estree": "8.11.0",
+ "@typescript-eslint/utils": "8.11.0",
"debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
"ts-api-utils": "^1.3.0"
},
"engines": {
@@ -2788,28 +2629,12 @@
}
}
},
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
- "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/types": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.1.tgz",
- "integrity": "sha512-WCcTP4SDXzMd23N27u66zTKMuEevH4uzU8C9jf0RO4E04yVHgQgW+r+TeVTNnO1KIfrL8ebgVVYYMMO3+jC55Q==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.11.0.tgz",
+ "integrity": "sha512-tn6sNMHf6EBAYMvmPUaKaVeYvhUsrE6x+bXQTxjQRp360h1giATU0WvgeEys1spbvb5R+VpNOZ+XJmjD8wOUHw==",
"dev": true,
+ "license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -2819,13 +2644,14 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.1.tgz",
- "integrity": "sha512-A5d1R9p+X+1js4JogdNilDuuq+EHZdsH9MjTVxXOdVFfTJXunKJR/v+fNNyO4TnoOn5HqobzfRlc70NC6HTcdg==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.11.0.tgz",
+ "integrity": "sha512-yHC3s1z1RCHoCz5t06gf7jH24rr3vns08XXhfEqzYpd6Hll3z/3g23JRi0jM8A47UFKNc3u/y5KIMx8Ynbjohg==",
"dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/types": "8.8.1",
- "@typescript-eslint/visitor-keys": "8.8.1",
+ "@typescript-eslint/types": "8.11.0",
+ "@typescript-eslint/visitor-keys": "8.11.0",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -2847,15 +2673,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.10.0.tgz",
- "integrity": "sha512-Oq4uZ7JFr9d1ZunE/QKy5egcDRXT/FrS2z/nlxzPua2VHFtmMvFNDvpq1m/hq0ra+T52aUezfcjGRIB7vNJF9w==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.11.0.tgz",
+ "integrity": "sha512-CYiX6WZcbXNJV7UNB4PLDIBtSdRmRI/nb0FMyqHPTQD1rMjA0foPLaPUV39C/MxkTd/QKSeX+Gb34PPsDVC35g==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.10.0",
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/typescript-estree": "8.10.0"
+ "@typescript-eslint/scope-manager": "8.11.0",
+ "@typescript-eslint/types": "8.11.0",
+ "@typescript-eslint/typescript-estree": "8.11.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2868,88 +2695,14 @@
"eslint": "^8.57.0 || ^9.0.0"
}
},
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz",
- "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz",
- "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz",
- "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "@typescript-eslint/visitor-keys": "8.10.0",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.10.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz",
- "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.10.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.8.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.1.tgz",
- "integrity": "sha512-0/TdC3aeRAsW7MDvYRwEc1Uwm0TIBfzjPFgg60UU2Haj5qsCs9cc3zNgY71edqE3LbWfF/WoZQd3lJoDXFQpag==",
+ "version": "8.11.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.11.0.tgz",
+ "integrity": "sha512-EaewX6lxSjRJnc+99+dqzTeoDZUfyrA52d2/HRrkI830kgovWsmIiTfmr0NZorzqic7ga+1bS60lRBUgR3n/Bw==",
"dev": true,
+ "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.8.1",
+ "@typescript-eslint/types": "8.11.0",
"eslint-visitor-keys": "^3.4.3"
},
"engines": {
@@ -3627,9 +3380,9 @@
}
},
"node_modules/eslint-plugin-react-refresh": {
- "version": "0.4.13",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.13.tgz",
- "integrity": "sha512-f1EppwrpJRWmqDTyvAyomFVDYRtrS7iTEqv3nokETnMiMzs2SSTmKRTACce4O2p4jYyowiSMvpdwC/RLcMFhuQ==",
+ "version": "0.4.14",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.14.tgz",
+ "integrity": "sha512-aXvzCTK7ZBv1e7fahFuR3Z/fyQQSIQ711yPgYRj+Oj64tyTgO4iQIDmYXDBqvSWQ/FA4OSCsXOStlF+noU0/NA==",
"dev": true,
"peerDependencies": {
"eslint": ">=7"
@@ -4563,6 +4316,7 @@
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"
@@ -6154,9 +5908,9 @@
"optional": true
},
"node_modules/vite": {
- "version": "5.4.9",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.9.tgz",
- "integrity": "sha512-20OVpJHh0PAM0oSOELa5GaZNWeDjcAvQjGXy2Uyr+Tp+/D2/Hdz6NLgpJLsarPTA2QJ6v8mX2P1ZfbsSKvdMkg==",
+ "version": "5.4.10",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.10.tgz",
+ "integrity": "sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==",
"dev": true,
"dependencies": {
"esbuild": "^0.21.3",
diff --git a/playground/package.json b/playground/package.json
index 8bb52949fa..e63e8ab850 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -17,16 +17,16 @@
"swagger-ui-react": "^5.17.14"
},
"devDependencies": {
- "@types/react": "^18.3.11",
+ "@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.10.0",
- "@typescript-eslint/parser": "^8.10.0",
+ "@typescript-eslint/eslint-plugin": "^8.11.0",
+ "@typescript-eslint/parser": "^8.11.0",
"@vitejs/plugin-react-swc": "^3.7.1",
"eslint": "^9.13.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.13",
+ "eslint-plugin-react-refresh": "^0.4.14",
"typescript": "^5.6.3",
- "vite": "^5.4.9"
+ "vite": "^5.4.10"
}
}
From b4b2bf23dfb0d0fb83f1fb1d505f44abbcf58c56 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Tue, 29 Oct 2024 12:04:24 -0400
Subject: [PATCH 11/47] refactor: Remove indirection from crdt packages (#3192)
## Relevant issue(s)
Resolves #3191
## Description
Removes various items of indirection from the merkle/crdt and core/crdt
packages that was making the code quite a lot harder to follow than it
need to be.
---
internal/core/crdt/base.go | 45 ++++------------
internal/core/crdt/base_test.go | 28 +++-------
internal/core/crdt/composite.go | 19 ++++---
internal/core/crdt/counter.go | 32 ++++++-----
internal/core/crdt/lwwreg.go | 32 ++++++-----
internal/core/crdt/lwwreg_test.go | 70 -------------------------
internal/core/replicated.go | 9 ----
internal/merkle/crdt/composite.go | 13 +++--
internal/merkle/crdt/counter.go | 25 +++++----
internal/merkle/crdt/lwwreg.go | 23 ++++----
internal/merkle/crdt/merklecrdt.go | 40 ++------------
internal/merkle/crdt/merklecrdt_test.go | 55 -------------------
12 files changed, 107 insertions(+), 284 deletions(-)
delete mode 100644 internal/merkle/crdt/merklecrdt_test.go
diff --git a/internal/core/crdt/base.go b/internal/core/crdt/base.go
index 87a4af445f..7d158af6b8 100644
--- a/internal/core/crdt/base.go
+++ b/internal/core/crdt/base.go
@@ -21,40 +21,9 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
)
-// baseCRDT is embedded as a base layer into all
-// the core CRDT implementations to reduce code
-// duplication, and better manage the overhead
-// tasks that all the CRDTs need to implement anyway
-type baseCRDT struct {
- store datastore.DSReaderWriter
- key core.DataStoreKey
-
- // schemaVersionKey is the schema version datastore key at the time of commit.
- //
- // It can be used to identify the collection datastructure state at the time of commit.
- schemaVersionKey core.CollectionSchemaVersionKey
-
- // fieldName holds the name of the field hosting this CRDT, if this is a field level
- // commit.
- fieldName string
-}
-
-func newBaseCRDT(
- store datastore.DSReaderWriter,
- key core.DataStoreKey,
- schemaVersionKey core.CollectionSchemaVersionKey,
- fieldName string,
-) baseCRDT {
- return baseCRDT{
- store: store,
- key: key,
- schemaVersionKey: schemaVersionKey,
- fieldName: fieldName,
- }
-}
-
-func (base baseCRDT) setPriority(
+func setPriority(
ctx context.Context,
+ store datastore.DSReaderWriter,
key core.DataStoreKey,
priority uint64,
) error {
@@ -65,13 +34,17 @@ func (base baseCRDT) setPriority(
return ErrEncodingPriority
}
- return base.store.Put(ctx, prioK.ToDS(), buf[0:n])
+ return store.Put(ctx, prioK.ToDS(), buf[0:n])
}
// get the current priority for given key
-func (base baseCRDT) getPriority(ctx context.Context, key core.DataStoreKey) (uint64, error) {
+func getPriority(
+ ctx context.Context,
+ store datastore.DSReaderWriter,
+ key core.DataStoreKey,
+) (uint64, error) {
pKey := key.WithPriorityFlag()
- pbuf, err := base.store.Get(ctx, pKey.ToDS())
+ pbuf, err := store.Get(ctx, pKey.ToDS())
if err != nil {
if errors.Is(err, ds.ErrNotFound) {
return 0, nil
diff --git a/internal/core/crdt/base_test.go b/internal/core/crdt/base_test.go
index 661c5cb7ce..3943d375ce 100644
--- a/internal/core/crdt/base_test.go
+++ b/internal/core/crdt/base_test.go
@@ -24,47 +24,31 @@ func newDS() datastore.DSReaderWriter {
return datastore.AsDSReaderWriter(ds.NewMapDatastore())
}
-func newSeededDS() datastore.DSReaderWriter {
- return newDS()
-}
-
-func exampleBaseCRDT() baseCRDT {
- return newBaseCRDT(newSeededDS(), core.DataStoreKey{}, core.CollectionSchemaVersionKey{}, "")
-}
-
-func TestBaseCRDTNew(t *testing.T) {
- base := newBaseCRDT(newDS(), core.DataStoreKey{}, core.CollectionSchemaVersionKey{}, "")
- if base.store == nil {
- t.Error("newBaseCRDT needs to init store")
- }
-}
-
func TestBaseCRDTvalueKey(t *testing.T) {
- base := exampleBaseCRDT()
- vk := base.key.WithDocID("mykey").WithValueFlag()
+ vk := core.DataStoreKey{}.WithDocID("mykey").WithValueFlag()
if vk.ToString() != "/v/mykey" {
t.Errorf("Incorrect valueKey. Have %v, want %v", vk.ToString(), "/v/mykey")
}
}
func TestBaseCRDTprioryKey(t *testing.T) {
- base := exampleBaseCRDT()
- pk := base.key.WithDocID("mykey").WithPriorityFlag()
+ pk := core.DataStoreKey{}.WithDocID("mykey").WithPriorityFlag()
if pk.ToString() != "/p/mykey" {
t.Errorf("Incorrect priorityKey. Have %v, want %v", pk.ToString(), "/p/mykey")
}
}
func TestBaseCRDTSetGetPriority(t *testing.T) {
- base := exampleBaseCRDT()
+ store := newDS()
+
ctx := context.Background()
- err := base.setPriority(ctx, base.key.WithDocID("mykey"), 10)
+ err := setPriority(ctx, store, core.DataStoreKey{}.WithDocID("mykey"), 10)
if err != nil {
t.Errorf("baseCRDT failed to set Priority. err: %v", err)
return
}
- priority, err := base.getPriority(ctx, base.key.WithDocID("mykey"))
+ priority, err := getPriority(ctx, store, core.DataStoreKey{}.WithDocID("mykey"))
if err != nil {
t.Errorf("baseCRDT failed to get priority. err: %v", err)
return
diff --git a/internal/core/crdt/composite.go b/internal/core/crdt/composite.go
index d1be34d64a..1886b4574d 100644
--- a/internal/core/crdt/composite.go
+++ b/internal/core/crdt/composite.go
@@ -76,7 +76,13 @@ func (delta *CompositeDAGDelta) SetPriority(prio uint64) {
// CompositeDAG is a CRDT structure that is used to track a collection of sub MerkleCRDTs.
type CompositeDAG struct {
- baseCRDT
+ store datastore.DSReaderWriter
+ key core.DataStoreKey
+
+ // schemaVersionKey is the schema version datastore key at the time of commit.
+ //
+ // It can be used to identify the collection datastructure state at the time of commit.
+ schemaVersionKey core.CollectionSchemaVersionKey
}
var _ core.ReplicatedData = (*CompositeDAG)(nil)
@@ -86,12 +92,11 @@ func NewCompositeDAG(
schemaVersionKey core.CollectionSchemaVersionKey,
key core.DataStoreKey,
) CompositeDAG {
- return CompositeDAG{newBaseCRDT(store, key, schemaVersionKey, "")}
-}
-
-// Value is a no-op for a CompositeDAG.
-func (c CompositeDAG) Value(ctx context.Context) ([]byte, error) {
- return nil, nil
+ return CompositeDAG{
+ store: store,
+ key: key,
+ schemaVersionKey: schemaVersionKey,
+ }
}
// Set returns a new composite DAG delta CRDT with the given status.
diff --git a/internal/core/crdt/counter.go b/internal/core/crdt/counter.go
index 4aa9a40793..966052b5f0 100644
--- a/internal/core/crdt/counter.go
+++ b/internal/core/crdt/counter.go
@@ -77,7 +77,18 @@ func (delta *CounterDelta) SetPriority(prio uint64) {
// Counter, is a simple CRDT type that allows increment/decrement
// of an Int and Float data types that ensures convergence.
type Counter struct {
- baseCRDT
+ store datastore.DSReaderWriter
+ key core.DataStoreKey
+
+ // schemaVersionKey is the schema version datastore key at the time of commit.
+ //
+ // It can be used to identify the collection datastructure state at the time of commit.
+ schemaVersionKey core.CollectionSchemaVersionKey
+
+ // fieldName holds the name of the field hosting this CRDT, if this is a field level
+ // commit.
+ fieldName string
+
AllowDecrement bool
Kind client.ScalarKind
}
@@ -93,17 +104,14 @@ func NewCounter(
allowDecrement bool,
kind client.ScalarKind,
) Counter {
- return Counter{newBaseCRDT(store, key, schemaVersionKey, fieldName), allowDecrement, kind}
-}
-
-// Value gets the current counter value
-func (c Counter) Value(ctx context.Context) ([]byte, error) {
- valueK := c.key.WithValueFlag()
- buf, err := c.store.Get(ctx, valueK.ToDS())
- if err != nil {
- return nil, err
+ return Counter{
+ store: store,
+ key: key,
+ schemaVersionKey: schemaVersionKey,
+ fieldName: fieldName,
+ AllowDecrement: allowDecrement,
+ Kind: kind,
}
- return buf, nil
}
// Set generates a new delta with the supplied value.
@@ -184,7 +192,7 @@ func (c Counter) incrementValue(
return NewErrFailedToStoreValue(err)
}
- return c.setPriority(ctx, c.key, priority)
+ return setPriority(ctx, c.store, c.key, priority)
}
func (c Counter) CType() client.CType {
diff --git a/internal/core/crdt/lwwreg.go b/internal/core/crdt/lwwreg.go
index e27a4c5ace..75e1244374 100644
--- a/internal/core/crdt/lwwreg.go
+++ b/internal/core/crdt/lwwreg.go
@@ -65,7 +65,17 @@ func (delta *LWWRegDelta) SetPriority(prio uint64) {
// LWWRegister, Last-Writer-Wins Register, is a simple CRDT type that allows set/get
// of an arbitrary data type that ensures convergence.
type LWWRegister struct {
- baseCRDT
+ store datastore.DSReaderWriter
+ key core.DataStoreKey
+
+ // schemaVersionKey is the schema version datastore key at the time of commit.
+ //
+ // It can be used to identify the collection datastructure state at the time of commit.
+ schemaVersionKey core.CollectionSchemaVersionKey
+
+ // fieldName holds the name of the field hosting this CRDT, if this is a field level
+ // commit.
+ fieldName string
}
var _ core.ReplicatedData = (*LWWRegister)(nil)
@@ -77,18 +87,12 @@ func NewLWWRegister(
key core.DataStoreKey,
fieldName string,
) LWWRegister {
- return LWWRegister{newBaseCRDT(store, key, schemaVersionKey, fieldName)}
-}
-
-// Value gets the current register value
-// RETURN STATE
-func (reg LWWRegister) Value(ctx context.Context) ([]byte, error) {
- valueK := reg.key.WithValueFlag()
- buf, err := reg.store.Get(ctx, valueK.ToDS())
- if err != nil {
- return nil, err
+ return LWWRegister{
+ store: store,
+ key: key,
+ schemaVersionKey: schemaVersionKey,
+ fieldName: fieldName,
}
- return buf, nil
}
// Set generates a new delta with the supplied value
@@ -116,7 +120,7 @@ func (reg LWWRegister) Merge(ctx context.Context, delta core.Delta) error {
}
func (reg LWWRegister) setValue(ctx context.Context, val []byte, priority uint64) error {
- curPrio, err := reg.getPriority(ctx, reg.key)
+ curPrio, err := getPriority(ctx, reg.store, reg.key)
if err != nil {
return NewErrFailedToGetPriority(err)
}
@@ -161,5 +165,5 @@ func (reg LWWRegister) setValue(ctx context.Context, val []byte, priority uint64
}
}
- return reg.setPriority(ctx, reg.key, priority)
+ return setPriority(ctx, reg.store, reg.key, priority)
}
diff --git a/internal/core/crdt/lwwreg_test.go b/internal/core/crdt/lwwreg_test.go
index 5b56df7636..087adecb70 100644
--- a/internal/core/crdt/lwwreg_test.go
+++ b/internal/core/crdt/lwwreg_test.go
@@ -11,12 +11,10 @@
package crdt
import (
- "context"
"reflect"
"testing"
ds "github.com/ipfs/go-datastore"
- "github.com/stretchr/testify/require"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/core"
@@ -32,15 +30,6 @@ func setupLWWRegister() LWWRegister {
return NewLWWRegister(store, core.CollectionSchemaVersionKey{}, key, "")
}
-func setupLoadedLWWRegister(t *testing.T, ctx context.Context) LWWRegister {
- lww := setupLWWRegister()
- addDelta := lww.Set([]byte("test"))
- addDelta.SetPriority(1)
- err := lww.Merge(ctx, addDelta)
- require.NoError(t, err)
- return lww
-}
-
func TestLWWRegisterAddDelta(t *testing.T) {
lww := setupLWWRegister()
addDelta := lww.Set([]byte("test"))
@@ -50,65 +39,6 @@ func TestLWWRegisterAddDelta(t *testing.T) {
}
}
-func TestLWWRegisterInitialMerge(t *testing.T) {
- ctx := context.Background()
- lww := setupLWWRegister()
- addDelta := lww.Set([]byte("test"))
- addDelta.SetPriority(1)
- err := lww.Merge(ctx, addDelta)
- if err != nil {
- t.Errorf("Unexpected error: %s\n", err)
- return
- }
-
- val, err := lww.Value(ctx)
- if err != nil {
- t.Errorf("Unexpected error: %s", err)
- return
- }
-
- expectedVal := []byte("test")
- if string(val) != string(expectedVal) {
- t.Errorf("Mismatch value for LWWRegister, was %s want %s", val, expectedVal)
- }
-}
-
-func TestLWWRegisterFollowupMerge(t *testing.T) {
- ctx := context.Background()
- lww := setupLoadedLWWRegister(t, ctx)
- addDelta := lww.Set([]byte("test2"))
- addDelta.SetPriority(2)
- err := lww.Merge(ctx, addDelta)
- require.NoError(t, err)
-
- val, err := lww.Value(ctx)
- if err != nil {
- t.Error(err)
- }
-
- if string(val) != string([]byte("test2")) {
- t.Errorf("Incorrect merge state, want %s, have %s", []byte("test2"), val)
- }
-}
-
-func TestLWWRegisterOldMerge(t *testing.T) {
- ctx := context.Background()
- lww := setupLoadedLWWRegister(t, ctx)
- addDelta := lww.Set([]byte("test-1"))
- addDelta.SetPriority(0)
- err := lww.Merge(ctx, addDelta)
- require.NoError(t, err)
-
- val, err := lww.Value(ctx)
- if err != nil {
- t.Error(err)
- }
-
- if string(val) != string([]byte("test")) {
- t.Errorf("Incorrect merge state, want %s, have %s", []byte("test"), val)
- }
-}
-
func TestLWWRegisterDeltaInit(t *testing.T) {
delta := &LWWRegDelta{}
diff --git a/internal/core/replicated.go b/internal/core/replicated.go
index f3f1b89f2c..d833244b78 100644
--- a/internal/core/replicated.go
+++ b/internal/core/replicated.go
@@ -12,19 +12,10 @@ package core
import (
"context"
-
- cid "github.com/ipld/go-ipld-prime/linking/cid"
)
// ReplicatedData is a data type that allows concurrent writers to deterministically merge other
// replicated data so as to converge on the same state.
type ReplicatedData interface {
Merge(ctx context.Context, other Delta) error
- Value(ctx context.Context) ([]byte, error)
-}
-
-// PersistedReplicatedData persists a ReplicatedData to an underlying datastore.
-type PersistedReplicatedData interface {
- ReplicatedData
- Publish(Delta) (cid.Link, error)
}
diff --git a/internal/merkle/crdt/composite.go b/internal/merkle/crdt/composite.go
index bf277dddba..fe9c13a0f5 100644
--- a/internal/merkle/crdt/composite.go
+++ b/internal/merkle/crdt/composite.go
@@ -24,11 +24,13 @@ import (
// MerkleCompositeDAG is a MerkleCRDT implementation of the CompositeDAG using MerkleClocks.
type MerkleCompositeDAG struct {
- *baseMerkleCRDT
+ clock *clock.MerkleClock
// core.ReplicatedData
reg corecrdt.CompositeDAG
}
+var _ MerkleCRDT = (*MerkleCompositeDAG)(nil)
+
// NewMerkleCompositeDAG creates a new instance (or loaded from DB) of a MerkleCRDT
// backed by a CompositeDAG CRDT.
func NewMerkleCompositeDAG(
@@ -44,14 +46,17 @@ func NewMerkleCompositeDAG(
clock := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(),
key.ToHeadStoreKey(), compositeDag)
- base := &baseMerkleCRDT{clock: clock, crdt: compositeDag}
return &MerkleCompositeDAG{
- baseMerkleCRDT: base,
- reg: compositeDag,
+ clock: clock,
+ reg: compositeDag,
}
}
+func (m *MerkleCompositeDAG) Clock() *clock.MerkleClock {
+ return m.clock
+}
+
// Delete sets the values of CompositeDAG for a delete.
func (m *MerkleCompositeDAG) Delete(
ctx context.Context,
diff --git a/internal/merkle/crdt/counter.go b/internal/merkle/crdt/counter.go
index 21b26785b6..c43a795294 100644
--- a/internal/merkle/crdt/counter.go
+++ b/internal/merkle/crdt/counter.go
@@ -23,11 +23,12 @@ import (
// MerkleCounter is a MerkleCRDT implementation of the Counter using MerkleClocks.
type MerkleCounter struct {
- *baseMerkleCRDT
-
- reg crdt.Counter
+ clock *clock.MerkleClock
+ reg crdt.Counter
}
+var _ MerkleCRDT = (*MerkleCounter)(nil)
+
// NewMerkleCounter creates a new instance (or loaded from DB) of a MerkleCRDT
// backed by a Counter CRDT.
func NewMerkleCounter(
@@ -40,26 +41,30 @@ func NewMerkleCounter(
) *MerkleCounter {
register := crdt.NewCounter(store.Datastore(), schemaVersionKey, key, fieldName, allowDecrement, kind)
clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), key.ToHeadStoreKey(), register)
- base := &baseMerkleCRDT{clock: clk, crdt: register}
+
return &MerkleCounter{
- baseMerkleCRDT: base,
- reg: register,
+ clock: clk,
+ reg: register,
}
}
+func (m *MerkleCounter) Clock() *clock.MerkleClock {
+ return m.clock
+}
+
// Save the value of the Counter to the DAG.
-func (mc *MerkleCounter) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
+func (m *MerkleCounter) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
value, ok := data.(*DocField)
if !ok {
- return cidlink.Link{}, nil, NewErrUnexpectedValueType(mc.reg.CType(), &client.FieldValue{}, data)
+ return cidlink.Link{}, nil, NewErrUnexpectedValueType(m.reg.CType(), &client.FieldValue{}, data)
}
bytes, err := value.FieldValue.Bytes()
if err != nil {
return cidlink.Link{}, nil, err
}
- delta, err := mc.reg.Increment(ctx, bytes)
+ delta, err := m.reg.Increment(ctx, bytes)
if err != nil {
return cidlink.Link{}, nil, err
}
- return mc.clock.AddDelta(ctx, delta)
+ return m.clock.AddDelta(ctx, delta)
}
diff --git a/internal/merkle/crdt/lwwreg.go b/internal/merkle/crdt/lwwreg.go
index 00c70dc4a9..d24c2a107e 100644
--- a/internal/merkle/crdt/lwwreg.go
+++ b/internal/merkle/crdt/lwwreg.go
@@ -23,11 +23,12 @@ import (
// MerkleLWWRegister is a MerkleCRDT implementation of the LWWRegister using MerkleClocks.
type MerkleLWWRegister struct {
- *baseMerkleCRDT
-
- reg corecrdt.LWWRegister
+ clock *clock.MerkleClock
+ reg corecrdt.LWWRegister
}
+var _ MerkleCRDT = (*MerkleLWWRegister)(nil)
+
// NewMerkleLWWRegister creates a new instance (or loaded from DB) of a MerkleCRDT
// backed by a LWWRegister CRDT.
func NewMerkleLWWRegister(
@@ -38,15 +39,19 @@ func NewMerkleLWWRegister(
) *MerkleLWWRegister {
register := corecrdt.NewLWWRegister(store.Datastore(), schemaVersionKey, key, fieldName)
clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), key.ToHeadStoreKey(), register)
- base := &baseMerkleCRDT{clock: clk, crdt: register}
+
return &MerkleLWWRegister{
- baseMerkleCRDT: base,
- reg: register,
+ clock: clk,
+ reg: register,
}
}
+func (m *MerkleLWWRegister) Clock() *clock.MerkleClock {
+ return m.clock
+}
+
// Save the value of the register to the DAG.
-func (mlwwreg *MerkleLWWRegister) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
+func (m *MerkleLWWRegister) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
value, ok := data.(*DocField)
if !ok {
return cidlink.Link{}, nil, NewErrUnexpectedValueType(client.LWW_REGISTER, &client.FieldValue{}, data)
@@ -58,6 +63,6 @@ func (mlwwreg *MerkleLWWRegister) Save(ctx context.Context, data any) (cidlink.L
// Set() call on underlying LWWRegister CRDT
// persist/publish delta
- delta := mlwwreg.reg.Set(bytes)
- return mlwwreg.clock.AddDelta(ctx, delta)
+ delta := m.reg.Set(bytes)
+ return m.clock.AddDelta(ctx, delta)
}
diff --git a/internal/merkle/crdt/merklecrdt.go b/internal/merkle/crdt/merklecrdt.go
index d6e73208d5..a5cc00a9e1 100644
--- a/internal/merkle/crdt/merklecrdt.go
+++ b/internal/merkle/crdt/merklecrdt.go
@@ -21,9 +21,11 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/core"
- coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/merkle/clock"
)
+// Stores is a trimmed down [datastore.Multistore] that declares only the sub-stores
+// that should be accessed by this package and it's children.
type Stores interface {
Datastore() datastore.DSReaderWriter
Blockstore() datastore.Blockstore
@@ -35,44 +37,10 @@ type Stores interface {
// CRDT payload. It implements the ReplicatedData interface
// so it can be merged with any given semantics.
type MerkleCRDT interface {
- core.ReplicatedData
- Clock() MerkleClock
+ Clock() *clock.MerkleClock
Save(ctx context.Context, data any) (cidlink.Link, []byte, error)
}
-// MerkleClock is the logical clock implementation that manages writing to and from
-// the MerkleDAG structure, ensuring a causal ordering of events.
-type MerkleClock interface {
- AddDelta(
- ctx context.Context,
- delta core.Delta,
- links ...coreblock.DAGLink,
- ) (cidlink.Link, []byte, error)
- // ProcessBlock processes a block and updates the CRDT state.
- ProcessBlock(ctx context.Context, block *coreblock.Block, cid cidlink.Link) error
-}
-
-// baseMerkleCRDT handles the MerkleCRDT overhead functions that aren't CRDT specific like the mutations and state
-// retrieval functions. It handles creating and publishing the CRDT DAG with the help of the MerkleClock.
-type baseMerkleCRDT struct {
- clock MerkleClock
- crdt core.ReplicatedData
-}
-
-var _ core.ReplicatedData = (*baseMerkleCRDT)(nil)
-
-func (base *baseMerkleCRDT) Clock() MerkleClock {
- return base.clock
-}
-
-func (base *baseMerkleCRDT) Merge(ctx context.Context, other core.Delta) error {
- return base.crdt.Merge(ctx, other)
-}
-
-func (base *baseMerkleCRDT) Value(ctx context.Context) ([]byte, error) {
- return base.crdt.Value(ctx)
-}
-
func InstanceWithStore(
store Stores,
schemaVersionKey core.CollectionSchemaVersionKey,
diff --git a/internal/merkle/crdt/merklecrdt_test.go b/internal/merkle/crdt/merklecrdt_test.go
deleted file mode 100644
index 74f4814ca3..0000000000
--- a/internal/merkle/crdt/merklecrdt_test.go
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2022 Democratized Data Foundation
-//
-// Use of this software is governed by the Business Source License
-// included in the file licenses/BSL.txt.
-//
-// As of the Change Date specified in that file, in accordance with
-// the Business Source License, use of this software will be governed
-// by the Apache License, Version 2.0, included in the file
-// licenses/APL.txt.
-
-package merklecrdt
-
-import (
- "context"
- "testing"
-
- "github.com/ipfs/go-cid"
- ds "github.com/ipfs/go-datastore"
-
- "github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
- crdt "github.com/sourcenetwork/defradb/internal/core/crdt"
- "github.com/sourcenetwork/defradb/internal/merkle/clock"
-)
-
-func newDS() ds.Datastore {
- return ds.NewMapDatastore()
-}
-
-func newTestBaseMerkleCRDT() (*baseMerkleCRDT, datastore.DSReaderWriter) {
- s := newDS()
- multistore := datastore.MultiStoreFrom(s)
-
- reg := crdt.NewLWWRegister(multistore.Datastore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "")
- clk := clock.NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), core.HeadStoreKey{}, reg)
- return &baseMerkleCRDT{clock: clk, crdt: reg}, multistore.Rootstore()
-}
-
-func TestMerkleCRDTPublish(t *testing.T) {
- ctx := context.Background()
- bCRDT, _ := newTestBaseMerkleCRDT()
- reg := crdt.LWWRegister{}
- delta := reg.Set([]byte("test"))
-
- link, _, err := bCRDT.clock.AddDelta(ctx, delta)
- if err != nil {
- t.Error("Failed to publish delta to MerkleCRDT:", err)
- return
- }
-
- if link.Cid == cid.Undef {
- t.Error("Published returned invalid CID Undef:", link.Cid)
- return
- }
-}
From 7c7c0301c5d26264ace99fc7751e00d74d143572 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Wed, 30 Oct 2024 14:06:02 -0400
Subject: [PATCH 12/47] refactor: Breakup core/keys.go file (#3198)
## Relevant issue(s)
Resolves #3197
## Description
Extracts core/keys.go to multiple files.
---
internal/core/crdt/base.go | 6 +-
internal/core/crdt/base_test.go | 10 +-
internal/core/crdt/composite.go | 17 +-
internal/core/crdt/counter.go | 13 +-
internal/core/crdt/lwwreg.go | 9 +-
internal/core/crdt/lwwreg_test.go | 5 +-
internal/core/data.go | 29 +-
internal/core/data_test.go | 342 +++---
internal/core/encoding.go | 186 ---
internal/core/key.go | 1027 -----------------
internal/db/base/collection_keys.go | 17 +-
internal/db/collection.go | 33 +-
internal/db/collection_delete.go | 6 +-
internal/db/collection_get.go | 3 +-
internal/db/collection_id.go | 6 +-
internal/db/collection_index.go | 31 +-
internal/db/db.go | 3 +-
internal/db/description/collection.go | 32 +-
internal/db/description/schema.go | 16 +-
internal/db/fetcher/dag.go | 7 +-
internal/db/fetcher/fetcher.go | 17 +-
internal/db/fetcher/indexer_iterators.go | 26 +-
internal/db/fetcher/versioned.go | 9 +-
internal/db/index.go | 58 +-
internal/db/index_test.go | 16 +-
internal/db/indexed_docs_test.go | 17 +-
internal/db/lens.go | 6 +-
internal/db/merge.go | 11 +-
internal/db/p2p_replicator.go | 45 +-
internal/db/p2p_schema_root.go | 10 +-
internal/db/sequence.go | 6 +-
internal/db/view.go | 5 +-
internal/keys/datastore.go | 15 +
internal/keys/datastore_doc.go | 292 +++++
internal/keys/datastore_index.go | 191 +++
internal/keys/datastore_primary_doc.go | 57 +
internal/keys/datastore_view_item.go | 87 ++
internal/keys/errors.go | 44 +
internal/keys/headstore_doc.go | 94 ++
internal/keys/key.go | 22 +
internal/{core => keys}/key_test.go | 9 +-
internal/keys/peerstore.go | 17 +
internal/keys/peerstore_replicator.go | 41 +
internal/keys/peerstore_replicator_retry.go | 54 +
.../keys/peerstore_replicator_retry_doc.go | 61 +
internal/keys/systemstore.go | 26 +
internal/keys/systemstore_collection.go | 44 +
internal/keys/systemstore_collection_index.go | 86 ++
internal/keys/systemstore_collection_name.go | 41 +
internal/keys/systemstore_collection_root.go | 83 ++
.../keys/systemstore_collection_schema.go | 75 ++
internal/keys/systemstore_p2p_collection.go | 56 +
internal/keys/systemstore_schema_root.go | 69 ++
internal/keys/systemstore_schema_version.go | 44 +
.../keys/systemstore_seq_collection_id.go | 30 +
internal/keys/systemstore_seq_field_id.go | 43 +
internal/keys/systemstore_seq_index_id.go | 42 +
internal/lens/fetcher.go | 7 +-
internal/merkle/clock/clock.go | 3 +-
internal/merkle/clock/clock_test.go | 10 +-
internal/merkle/clock/heads.go | 10 +-
internal/merkle/clock/heads_test.go | 4 +-
internal/merkle/crdt/composite.go | 6 +-
internal/merkle/crdt/counter.go | 6 +-
internal/merkle/crdt/lwwreg.go | 6 +-
internal/merkle/crdt/merklecrdt.go | 6 +-
internal/planner/commit.go | 5 +-
internal/planner/multi.go | 3 +-
internal/planner/select.go | 3 +-
internal/planner/view.go | 3 +-
net/server_test.go | 5 +-
71 files changed, 2074 insertions(+), 1650 deletions(-)
delete mode 100644 internal/core/key.go
create mode 100644 internal/keys/datastore.go
create mode 100644 internal/keys/datastore_doc.go
create mode 100644 internal/keys/datastore_index.go
create mode 100644 internal/keys/datastore_primary_doc.go
create mode 100644 internal/keys/datastore_view_item.go
create mode 100644 internal/keys/errors.go
create mode 100644 internal/keys/headstore_doc.go
create mode 100644 internal/keys/key.go
rename internal/{core => keys}/key_test.go (98%)
create mode 100644 internal/keys/peerstore.go
create mode 100644 internal/keys/peerstore_replicator.go
create mode 100644 internal/keys/peerstore_replicator_retry.go
create mode 100644 internal/keys/peerstore_replicator_retry_doc.go
create mode 100644 internal/keys/systemstore.go
create mode 100644 internal/keys/systemstore_collection.go
create mode 100644 internal/keys/systemstore_collection_index.go
create mode 100644 internal/keys/systemstore_collection_name.go
create mode 100644 internal/keys/systemstore_collection_root.go
create mode 100644 internal/keys/systemstore_collection_schema.go
create mode 100644 internal/keys/systemstore_p2p_collection.go
create mode 100644 internal/keys/systemstore_schema_root.go
create mode 100644 internal/keys/systemstore_schema_version.go
create mode 100644 internal/keys/systemstore_seq_collection_id.go
create mode 100644 internal/keys/systemstore_seq_field_id.go
create mode 100644 internal/keys/systemstore_seq_index_id.go
diff --git a/internal/core/crdt/base.go b/internal/core/crdt/base.go
index 7d158af6b8..31a8b6fc63 100644
--- a/internal/core/crdt/base.go
+++ b/internal/core/crdt/base.go
@@ -18,13 +18,13 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/errors"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func setPriority(
ctx context.Context,
store datastore.DSReaderWriter,
- key core.DataStoreKey,
+ key keys.DataStoreKey,
priority uint64,
) error {
prioK := key.WithPriorityFlag()
@@ -41,7 +41,7 @@ func setPriority(
func getPriority(
ctx context.Context,
store datastore.DSReaderWriter,
- key core.DataStoreKey,
+ key keys.DataStoreKey,
) (uint64, error) {
pKey := key.WithPriorityFlag()
pbuf, err := store.Get(ctx, pKey.ToDS())
diff --git a/internal/core/crdt/base_test.go b/internal/core/crdt/base_test.go
index 3943d375ce..29e2ac9283 100644
--- a/internal/core/crdt/base_test.go
+++ b/internal/core/crdt/base_test.go
@@ -17,7 +17,7 @@ import (
ds "github.com/ipfs/go-datastore"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func newDS() datastore.DSReaderWriter {
@@ -25,14 +25,14 @@ func newDS() datastore.DSReaderWriter {
}
func TestBaseCRDTvalueKey(t *testing.T) {
- vk := core.DataStoreKey{}.WithDocID("mykey").WithValueFlag()
+ vk := keys.DataStoreKey{}.WithDocID("mykey").WithValueFlag()
if vk.ToString() != "/v/mykey" {
t.Errorf("Incorrect valueKey. Have %v, want %v", vk.ToString(), "/v/mykey")
}
}
func TestBaseCRDTprioryKey(t *testing.T) {
- pk := core.DataStoreKey{}.WithDocID("mykey").WithPriorityFlag()
+ pk := keys.DataStoreKey{}.WithDocID("mykey").WithPriorityFlag()
if pk.ToString() != "/p/mykey" {
t.Errorf("Incorrect priorityKey. Have %v, want %v", pk.ToString(), "/p/mykey")
}
@@ -42,13 +42,13 @@ func TestBaseCRDTSetGetPriority(t *testing.T) {
store := newDS()
ctx := context.Background()
- err := setPriority(ctx, store, core.DataStoreKey{}.WithDocID("mykey"), 10)
+ err := setPriority(ctx, store, keys.DataStoreKey{}.WithDocID("mykey"), 10)
if err != nil {
t.Errorf("baseCRDT failed to set Priority. err: %v", err)
return
}
- priority, err := getPriority(ctx, store, core.DataStoreKey{}.WithDocID("mykey"))
+ priority, err := getPriority(ctx, store, keys.DataStoreKey{}.WithDocID("mykey"))
if err != nil {
t.Errorf("baseCRDT failed to get priority. err: %v", err)
return
diff --git a/internal/core/crdt/composite.go b/internal/core/crdt/composite.go
index 1886b4574d..510d47d7e4 100644
--- a/internal/core/crdt/composite.go
+++ b/internal/core/crdt/composite.go
@@ -22,6 +22,7 @@ import (
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// CompositeDAGDelta represents a delta-state update made of sub-MerkleCRDTs.
@@ -77,20 +78,20 @@ func (delta *CompositeDAGDelta) SetPriority(prio uint64) {
// CompositeDAG is a CRDT structure that is used to track a collection of sub MerkleCRDTs.
type CompositeDAG struct {
store datastore.DSReaderWriter
- key core.DataStoreKey
+ key keys.DataStoreKey
// schemaVersionKey is the schema version datastore key at the time of commit.
//
// It can be used to identify the collection datastructure state at the time of commit.
- schemaVersionKey core.CollectionSchemaVersionKey
+ schemaVersionKey keys.CollectionSchemaVersionKey
}
var _ core.ReplicatedData = (*CompositeDAG)(nil)
func NewCompositeDAG(
store datastore.DSReaderWriter,
- schemaVersionKey core.CollectionSchemaVersionKey,
- key core.DataStoreKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.DataStoreKey,
) CompositeDAG {
return CompositeDAG{
store: store,
@@ -125,7 +126,7 @@ func (c CompositeDAG) Merge(ctx context.Context, delta core.Delta) error {
// We cannot rely on the dagDelta.Status here as it may have been deleted locally, this is not
// reflected in `dagDelta.Status` if sourced via P2P. Updates synced via P2P should not undelete
// the local representation of the document.
- versionKey := c.key.WithValueFlag().WithFieldID(core.DATASTORE_DOC_VERSION_FIELD_ID)
+ versionKey := c.key.WithValueFlag().WithFieldID(keys.DATASTORE_DOC_VERSION_FIELD_ID)
objectMarker, err := c.store.Get(ctx, c.key.ToPrimaryDataStoreKey().ToDS())
hasObjectMarker := !errors.Is(err, ds.ErrNotFound)
if err != nil && hasObjectMarker {
@@ -159,7 +160,7 @@ func (c CompositeDAG) Merge(ctx context.Context, delta core.Delta) error {
return nil
}
-func (c CompositeDAG) deleteWithPrefix(ctx context.Context, key core.DataStoreKey) error {
+func (c CompositeDAG) deleteWithPrefix(ctx context.Context, key keys.DataStoreKey) error {
q := query.Query{
Prefix: key.ToString(),
}
@@ -168,12 +169,12 @@ func (c CompositeDAG) deleteWithPrefix(ctx context.Context, key core.DataStoreKe
if e.Error != nil {
return err
}
- dsKey, err := core.NewDataStoreKey(e.Key)
+ dsKey, err := keys.NewDataStoreKey(e.Key)
if err != nil {
return err
}
- if dsKey.InstanceType == core.ValueKey {
+ if dsKey.InstanceType == keys.ValueKey {
err = c.store.Put(ctx, dsKey.WithDeletedFlag().ToDS(), e.Value)
if err != nil {
return err
diff --git a/internal/core/crdt/counter.go b/internal/core/crdt/counter.go
index 966052b5f0..1f287eb700 100644
--- a/internal/core/crdt/counter.go
+++ b/internal/core/crdt/counter.go
@@ -26,6 +26,7 @@ import (
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
type Incrementable interface {
@@ -78,12 +79,12 @@ func (delta *CounterDelta) SetPriority(prio uint64) {
// of an Int and Float data types that ensures convergence.
type Counter struct {
store datastore.DSReaderWriter
- key core.DataStoreKey
+ key keys.DataStoreKey
// schemaVersionKey is the schema version datastore key at the time of commit.
//
// It can be used to identify the collection datastructure state at the time of commit.
- schemaVersionKey core.CollectionSchemaVersionKey
+ schemaVersionKey keys.CollectionSchemaVersionKey
// fieldName holds the name of the field hosting this CRDT, if this is a field level
// commit.
@@ -98,8 +99,8 @@ var _ core.ReplicatedData = (*Counter)(nil)
// NewCounter returns a new instance of the Counter with the given ID.
func NewCounter(
store datastore.DSReaderWriter,
- schemaVersionKey core.CollectionSchemaVersionKey,
- key core.DataStoreKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.DataStoreKey,
fieldName string,
allowDecrement bool,
kind client.ScalarKind,
@@ -205,7 +206,7 @@ func (c Counter) CType() client.CType {
func validateAndIncrement[T Incrementable](
ctx context.Context,
store datastore.DSReaderWriter,
- key core.DataStoreKey,
+ key keys.DataStoreKey,
valueAsBytes []byte,
allowDecrement bool,
) ([]byte, error) {
@@ -230,7 +231,7 @@ func validateAndIncrement[T Incrementable](
func getCurrentValue[T Incrementable](
ctx context.Context,
store datastore.DSReaderWriter,
- key core.DataStoreKey,
+ key keys.DataStoreKey,
) (T, error) {
curValue, err := store.Get(ctx, key.ToDS())
if err != nil {
diff --git a/internal/core/crdt/lwwreg.go b/internal/core/crdt/lwwreg.go
index 75e1244374..4fdf58ab47 100644
--- a/internal/core/crdt/lwwreg.go
+++ b/internal/core/crdt/lwwreg.go
@@ -21,6 +21,7 @@ import (
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// LWWRegDelta is a single delta operation for an LWWRegister
@@ -66,12 +67,12 @@ func (delta *LWWRegDelta) SetPriority(prio uint64) {
// of an arbitrary data type that ensures convergence.
type LWWRegister struct {
store datastore.DSReaderWriter
- key core.DataStoreKey
+ key keys.DataStoreKey
// schemaVersionKey is the schema version datastore key at the time of commit.
//
// It can be used to identify the collection datastructure state at the time of commit.
- schemaVersionKey core.CollectionSchemaVersionKey
+ schemaVersionKey keys.CollectionSchemaVersionKey
// fieldName holds the name of the field hosting this CRDT, if this is a field level
// commit.
@@ -83,8 +84,8 @@ var _ core.ReplicatedData = (*LWWRegister)(nil)
// NewLWWRegister returns a new instance of the LWWReg with the given ID.
func NewLWWRegister(
store datastore.DSReaderWriter,
- schemaVersionKey core.CollectionSchemaVersionKey,
- key core.DataStoreKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.DataStoreKey,
fieldName string,
) LWWRegister {
return LWWRegister{
diff --git a/internal/core/crdt/lwwreg_test.go b/internal/core/crdt/lwwreg_test.go
index 087adecb70..c3ce9992b5 100644
--- a/internal/core/crdt/lwwreg_test.go
+++ b/internal/core/crdt/lwwreg_test.go
@@ -18,6 +18,7 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func newMockStore() datastore.DSReaderWriter {
@@ -26,8 +27,8 @@ func newMockStore() datastore.DSReaderWriter {
func setupLWWRegister() LWWRegister {
store := newMockStore()
- key := core.DataStoreKey{DocID: "AAAA-BBBB"}
- return NewLWWRegister(store, core.CollectionSchemaVersionKey{}, key, "")
+ key := keys.DataStoreKey{DocID: "AAAA-BBBB"}
+ return NewLWWRegister(store, keys.CollectionSchemaVersionKey{}, key, "")
}
func TestLWWRegisterAddDelta(t *testing.T) {
diff --git a/internal/core/data.go b/internal/core/data.go
index a756d41f91..122d255c5f 100644
--- a/internal/core/data.go
+++ b/internal/core/data.go
@@ -10,27 +10,31 @@
package core
-import "strings"
+import (
+ "strings"
+
+ "github.com/sourcenetwork/defradb/internal/keys"
+)
// Span is a range of keys from [Start, End).
type Span interface {
// Start returns the starting key of the Span.
- Start() DataStoreKey
+ Start() keys.DataStoreKey
// End returns the ending key of the Span.
- End() DataStoreKey
+ End() keys.DataStoreKey
// Compare returns -1 if the provided span is less, 0 if it is equal, and 1 if its greater.
Compare(Span) SpanComparisonResult
}
type span struct {
- start DataStoreKey
- end DataStoreKey
+ start keys.DataStoreKey
+ end keys.DataStoreKey
}
var _ Span = span{}
// NewSpan creates a new Span from the provided start and end keys.
-func NewSpan(start, end DataStoreKey) Span {
+func NewSpan(start, end keys.DataStoreKey) Span {
return span{
start: start,
end: end,
@@ -38,12 +42,12 @@ func NewSpan(start, end DataStoreKey) Span {
}
// Start returns the starting key of the Span.
-func (s span) Start() DataStoreKey {
+func (s span) Start() keys.DataStoreKey {
return s.start
}
// End returns the ending key of the Span.
-func (s span) End() DataStoreKey {
+func (s span) End() keys.DataStoreKey {
return s.end
}
@@ -136,7 +140,7 @@ func (this span) Compare(other Span) SpanComparisonResult {
return After
}
-func isAdjacent(this DataStoreKey, other DataStoreKey) bool {
+func isAdjacent(this keys.DataStoreKey, other keys.DataStoreKey) bool {
return len(this.ToString()) == len(other.ToString()) &&
(this.PrefixEnd().ToString() == other.ToString() ||
this.ToString() == other.PrefixEnd().ToString())
@@ -156,13 +160,6 @@ func NewSpans(spans ...Span) Spans {
}
}
-// HeadKeyValue is a KV store response containing the resulting core.HeadStoreKey
-// and byte array value.
-type HeadKeyValue struct {
- Key HeadStoreKey
- Value []byte
-}
-
// Merges an unordered, potentially overlapping and/or duplicated collection of Spans into
// a unique set in ascending order, where overlapping spans are merged into a single span.
// Will handle spans with keys of different lengths, where one might be a prefix of another.
diff --git a/internal/core/data_test.go b/internal/core/data_test.go
index ae3580528f..d55851b795 100644
--- a/internal/core/data_test.go
+++ b/internal/core/data_test.go
@@ -14,6 +14,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
+
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func TestMergeAscending_ReturnsEmpty_GivenEmpty(t *testing.T) {
@@ -25,8 +27,8 @@ func TestMergeAscending_ReturnsEmpty_GivenEmpty(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenSingle(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{NewSpan(start1, end1)}
result := MergeAscending(input)
@@ -37,10 +39,10 @@ func TestMergeAscending_ReturnsSingle_GivenSingle(t *testing.T) {
}
func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k4")
- end1 := MustNewDataStoreKey("/1/p/0/k5")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{
NewSpan(start1, end1),
@@ -57,12 +59,12 @@ func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *test
}
func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k7")
- end2 := MustNewDataStoreKey("/1/p/0/k8")
- start3 := MustNewDataStoreKey("/1/p/0/k4")
- end3 := MustNewDataStoreKey("/1/p/0/k5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k7")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k8")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/k5")
input := []Span{
NewSpan(start1, end1),
@@ -83,10 +85,10 @@ func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k4")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k3")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k3")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -100,10 +102,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k4")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -117,10 +119,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *test
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k4")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k3.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k3.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -134,10 +136,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k2.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -151,10 +153,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k4")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k4.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k4.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -168,10 +170,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *test
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k3.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k3.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -185,10 +187,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *te
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k4")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k4")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -202,10 +204,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k5")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k4")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -219,10 +221,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *te
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k4")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -236,10 +238,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *t
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k3")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k3")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -253,10 +255,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -270,10 +272,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t *
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k4")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -287,16 +289,16 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *t
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k6")
- end2 := MustNewDataStoreKey("/1/p/0/k7")
- start3 := MustNewDataStoreKey("/1/p/0/k9")
- end3 := MustNewDataStoreKey("/1/p/0/ka")
- start4 := MustNewDataStoreKey("/1/p/0/kc")
- end4 := MustNewDataStoreKey("/1/p/0/kd")
- start5 := MustNewDataStoreKey("/1/p/0/k4")
- end5 := MustNewDataStoreKey("/1/p/0/ka")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k6")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k7")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k9")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/ka")
+ start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
+ end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
+ start5 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -318,10 +320,10 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t
}
func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k1.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k1.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -335,10 +337,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k1")
- end2 := MustNewDataStoreKey("/1/p/0/k2.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -352,8 +354,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testi
}
func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{
NewSpan(start1, end1),
NewSpan(start1, end1),
@@ -367,10 +369,10 @@ func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := MustNewDataStoreKey("/1/p/0/k1.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k1.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -384,10 +386,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := MustNewDataStoreKey("/1/p/0/k2.5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2.5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -401,10 +403,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *test
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -418,10 +420,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) {
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := MustNewDataStoreKey("/1/p/0/k2")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -435,10 +437,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *te
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k3")
- start2 := MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := MustNewDataStoreKey("/1/p/0/k4")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -454,16 +456,16 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *tes
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCoveringMiddleSpans(
t *testing.T,
) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k4")
- end2 := MustNewDataStoreKey("/1/p/0/k5")
- start3 := MustNewDataStoreKey("/1/p/0/k7")
- end3 := MustNewDataStoreKey("/1/p/0/k8")
- start4 := MustNewDataStoreKey("/1/p/0/kc")
- end4 := MustNewDataStoreKey("/1/p/0/kd")
- start5 := MustNewDataStoreKey("/1/p/0/k4") // equal to start2
- end5 := MustNewDataStoreKey("/1/p/0/ka")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k7")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/k8")
+ start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
+ end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
+ start5 := keys.MustNewDataStoreKey("/1/p/0/k4") // equal to start2
+ end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -487,16 +489,16 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCove
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCoveringMiddleSpans(
t *testing.T,
) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k4")
- end2 := MustNewDataStoreKey("/1/p/0/k5")
- start3 := MustNewDataStoreKey("/1/p/0/k7")
- end3 := MustNewDataStoreKey("/1/p/0/k8")
- start4 := MustNewDataStoreKey("/1/p/0/kc")
- end4 := MustNewDataStoreKey("/1/p/0/kd")
- start5 := MustNewDataStoreKey("/1/p/0/k4.5") // within span2
- end5 := MustNewDataStoreKey("/1/p/0/ka")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k7")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/k8")
+ start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
+ end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
+ start5 := keys.MustNewDataStoreKey("/1/p/0/k4.5") // within span2
+ end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -519,16 +521,16 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCov
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpanCoveringMiddleSpans(
t *testing.T,
) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k4")
- end2 := MustNewDataStoreKey("/1/p/0/k5")
- start3 := MustNewDataStoreKey("/1/p/0/k7")
- end3 := MustNewDataStoreKey("/1/p/0/k8")
- start4 := MustNewDataStoreKey("/1/p/0/kc")
- end4 := MustNewDataStoreKey("/1/p/0/kd")
- start5 := MustNewDataStoreKey("/1/p/0/k5") // span2's end
- end5 := MustNewDataStoreKey("/1/p/0/ka")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k7")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/k8")
+ start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
+ end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
+ start5 := keys.MustNewDataStoreKey("/1/p/0/k5") // span2's end
+ end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -551,16 +553,16 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpa
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndEndAfterSpanCoveringMiddleSpans(
t *testing.T,
) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k4")
- end2 := MustNewDataStoreKey("/1/p/0/k6")
- start3 := MustNewDataStoreKey("/1/p/0/k8")
- end3 := MustNewDataStoreKey("/1/p/0/k9")
- start4 := MustNewDataStoreKey("/1/p/0/kd")
- end4 := MustNewDataStoreKey("/1/p/0/ke")
- start5 := MustNewDataStoreKey("/1/p/0/k5") // adjacent but before span2's end
- end5 := MustNewDataStoreKey("/1/p/0/kb")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k6")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k8")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/k9")
+ start4 := keys.MustNewDataStoreKey("/1/p/0/kd")
+ end4 := keys.MustNewDataStoreKey("/1/p/0/ke")
+ start5 := keys.MustNewDataStoreKey("/1/p/0/k5") // adjacent but before span2's end
+ end5 := keys.MustNewDataStoreKey("/1/p/0/kb")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -583,16 +585,16 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndE
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEndAfterSpanCoveringMiddleSpans(
t *testing.T,
) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k4")
- end2 := MustNewDataStoreKey("/1/p/0/k5")
- start3 := MustNewDataStoreKey("/1/p/0/k8")
- end3 := MustNewDataStoreKey("/1/p/0/k9")
- start4 := MustNewDataStoreKey("/1/p/0/kd")
- end4 := MustNewDataStoreKey("/1/p/0/ke")
- start5 := MustNewDataStoreKey("/1/p/0/k6") // adjacent and after span2's end
- end5 := MustNewDataStoreKey("/1/p/0/kb")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ start3 := keys.MustNewDataStoreKey("/1/p/0/k8")
+ end3 := keys.MustNewDataStoreKey("/1/p/0/k9")
+ start4 := keys.MustNewDataStoreKey("/1/p/0/kd")
+ end4 := keys.MustNewDataStoreKey("/1/p/0/ke")
+ start5 := keys.MustNewDataStoreKey("/1/p/0/k6") // adjacent and after span2's end
+ end5 := keys.MustNewDataStoreKey("/1/p/0/kb")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -613,10 +615,10 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEn
}
func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k1")
- end1 := MustNewDataStoreKey("/1/p/0/k2")
- start2 := MustNewDataStoreKey("/1/p/0/k4")
- end2 := MustNewDataStoreKey("/1/p/0/k5")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -632,10 +634,10 @@ func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T)
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k6")
- start2 := MustNewDataStoreKey("/1/p/0/k5")
- end2 := MustNewDataStoreKey("/1/p/0/k6")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k6")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -651,10 +653,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t *
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentAndAfter(
t *testing.T,
) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k6")
- start2 := MustNewDataStoreKey("/1/p/0/k5")
- end2 := MustNewDataStoreKey("/1/p/0/k7")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k7")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -668,10 +670,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentA
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k6")
- start2 := MustNewDataStoreKey("/1/p/0/k5")
- end2 := MustNewDataStoreKey("/1/p/0/k8")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k5")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k8")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
@@ -685,10 +687,10 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t *
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndAfterEndEndAfter(t *testing.T) {
- start1 := MustNewDataStoreKey("/1/p/0/k3")
- end1 := MustNewDataStoreKey("/1/p/0/k6")
- start2 := MustNewDataStoreKey("/1/p/0/k7")
- end2 := MustNewDataStoreKey("/1/p/0/k8")
+ start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
+ end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
+ start2 := keys.MustNewDataStoreKey("/1/p/0/k7")
+ end2 := keys.MustNewDataStoreKey("/1/p/0/k8")
input := []Span{
NewSpan(start1, end1),
NewSpan(start2, end2),
diff --git a/internal/core/encoding.go b/internal/core/encoding.go
index 8c7930d6b9..4f190f3c67 100644
--- a/internal/core/encoding.go
+++ b/internal/core/encoding.go
@@ -17,7 +17,6 @@ import (
"github.com/sourcenetwork/immutable"
"github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/internal/encoding"
)
// NormalizeFieldValue takes a field value and description and converts it to the
@@ -232,188 +231,3 @@ func convertToJSON(propertyName string, untypedValue any) (any, error) {
return untypedValue, nil
}
}
-
-// DecodeIndexDataStoreKey decodes a IndexDataStoreKey from bytes.
-// It expects the input bytes is in the following format:
-//
-// /[CollectionID]/[IndexID]/[FieldValue](/[FieldValue]...)
-//
-// Where [CollectionID] and [IndexID] are integers
-//
-// All values of the fields are converted to standardized Defra Go type
-// according to fields description.
-func DecodeIndexDataStoreKey(
- data []byte,
- indexDesc *client.IndexDescription,
- fields []client.FieldDefinition,
-) (IndexDataStoreKey, error) {
- if len(data) == 0 {
- return IndexDataStoreKey{}, ErrEmptyKey
- }
-
- if data[0] != '/' {
- return IndexDataStoreKey{}, ErrInvalidKey
- }
- data = data[1:]
-
- data, colID, err := encoding.DecodeUvarintAscending(data)
- if err != nil {
- return IndexDataStoreKey{}, err
- }
-
- key := IndexDataStoreKey{CollectionID: uint32(colID)}
-
- if data[0] != '/' {
- return IndexDataStoreKey{}, ErrInvalidKey
- }
- data = data[1:]
-
- data, indID, err := encoding.DecodeUvarintAscending(data)
- if err != nil {
- return IndexDataStoreKey{}, err
- }
- key.IndexID = uint32(indID)
-
- if len(data) == 0 {
- return key, nil
- }
-
- for len(data) > 0 {
- if data[0] != '/' {
- return IndexDataStoreKey{}, ErrInvalidKey
- }
- data = data[1:]
-
- i := len(key.Fields)
- descending := false
- var kind client.FieldKind = client.FieldKind_DocID
- // If the key has more values encoded then fields on the index description, the last
- // value must be the docID and we treat it as a string.
- if i < len(indexDesc.Fields) {
- descending = indexDesc.Fields[i].Descending
- kind = fields[i].Kind
- } else if i > len(indexDesc.Fields) {
- return IndexDataStoreKey{}, ErrInvalidKey
- }
-
- if kind != nil && kind.IsArray() {
- if arrKind, ok := kind.(client.ScalarArrayKind); ok {
- kind = arrKind.SubKind()
- }
- }
-
- var val client.NormalValue
- data, val, err = encoding.DecodeFieldValue(data, descending, kind)
- if err != nil {
- return IndexDataStoreKey{}, err
- }
-
- key.Fields = append(key.Fields, IndexedField{Value: val, Descending: descending})
- }
-
- return key, nil
-}
-
-// EncodeIndexDataStoreKey encodes a IndexDataStoreKey to bytes to be stored as a key
-// for secondary indexes.
-func EncodeIndexDataStoreKey(key *IndexDataStoreKey) []byte {
- if key.CollectionID == 0 {
- return []byte{}
- }
-
- b := encoding.EncodeUvarintAscending([]byte{'/'}, uint64(key.CollectionID))
-
- if key.IndexID == 0 {
- return b
- }
- b = append(b, '/')
- b = encoding.EncodeUvarintAscending(b, uint64(key.IndexID))
-
- for _, field := range key.Fields {
- b = append(b, '/')
- b = encoding.EncodeFieldValue(b, field.Value, field.Descending)
- }
-
- return b
-}
-
-// DecodeDataStoreKey decodes a store key into a [DataStoreKey].
-func DecodeDataStoreKey(data []byte) (DataStoreKey, error) {
- if len(data) == 0 {
- return DataStoreKey{}, ErrEmptyKey
- }
-
- if data[0] != '/' {
- return DataStoreKey{}, ErrInvalidKey
- }
- data = data[1:]
-
- data, colRootID, err := encoding.DecodeUvarintAscending(data)
- if err != nil {
- return DataStoreKey{}, err
- }
-
- var instanceType InstanceType
- if len(data) > 1 {
- if data[0] == '/' {
- data = data[1:]
- }
- instanceType = InstanceType(data[0])
- data = data[1:]
- }
-
- const docKeyLength int = 40
- var docID string
- if len(data) > docKeyLength {
- if data[0] == '/' {
- data = data[1:]
- }
- docID = string(data[:docKeyLength])
- data = data[docKeyLength:]
- }
-
- var fieldID string
- if len(data) > 1 {
- if data[0] == '/' {
- data = data[1:]
- }
- // Todo: This should be encoded/decoded properly in
- // https://github.com/sourcenetwork/defradb/issues/2818
- fieldID = string(data)
- }
-
- return DataStoreKey{
- CollectionRootID: uint32(colRootID),
- InstanceType: (instanceType),
- DocID: docID,
- FieldID: fieldID,
- }, nil
-}
-
-// EncodeDataStoreKey encodes a [*DataStoreKey] to a byte array suitable for sorting in the store.
-func EncodeDataStoreKey(key *DataStoreKey) []byte {
- var result []byte
-
- if key.CollectionRootID != 0 {
- result = encoding.EncodeUvarintAscending([]byte{'/'}, uint64(key.CollectionRootID))
- }
-
- if key.InstanceType != "" {
- result = append(result, '/')
- result = append(result, []byte(string(key.InstanceType))...)
- }
-
- if key.DocID != "" {
- result = append(result, '/')
- result = append(result, []byte(key.DocID)...)
- }
-
- if key.FieldID != "" {
- result = append(result, '/')
- // Todo: This should be encoded/decoded properly in
- // https://github.com/sourcenetwork/defradb/issues/2818
- result = append(result, []byte(key.FieldID)...)
- }
-
- return result
-}
diff --git a/internal/core/key.go b/internal/core/key.go
deleted file mode 100644
index 3e0bde87cb..0000000000
--- a/internal/core/key.go
+++ /dev/null
@@ -1,1027 +0,0 @@
-// Copyright 2022 Democratized Data Foundation
-//
-// Use of this software is governed by the Business Source License
-// included in the file licenses/BSL.txt.
-//
-// As of the Change Date specified in that file, in accordance with
-// the Business Source License, use of this software will be governed
-// by the Apache License, Version 2.0, included in the file
-// licenses/APL.txt.
-
-package core
-
-import (
- "fmt"
- "strconv"
- "strings"
-
- "github.com/ipfs/go-cid"
- ds "github.com/ipfs/go-datastore"
- "github.com/sourcenetwork/immutable"
-
- "github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/errors"
- "github.com/sourcenetwork/defradb/internal/encoding"
-)
-
-var (
- // KeyMin is a minimum key value which sorts before all other keys.
- KeyMin = []byte{}
- // KeyMax is a maximum key value which sorts after all other keys.
- KeyMax = []byte{0xff, 0xff}
-)
-
-// InstanceType is a type that represents the type of instance.
-type InstanceType string
-
-const (
- // ValueKey is a type that represents a value instance.
- ValueKey = InstanceType("v")
- // PriorityKey is a type that represents a priority instance.
- PriorityKey = InstanceType("p")
- // DeletedKey is a type that represents a deleted document.
- DeletedKey = InstanceType("d")
-)
-
-const (
- COLLECTION = "collection"
- COLLECTION_ID = "/collection/id"
- COLLECTION_NAME = "/collection/name"
- COLLECTION_SCHEMA_VERSION = "/collection/version"
- COLLECTION_ROOT = "/collection/root"
- COLLECTION_INDEX = "/collection/index"
- COLLECTION_VIEW_ITEMS = "/collection/vi"
- SCHEMA_VERSION = "/schema/version/v"
- SCHEMA_VERSION_ROOT = "/schema/version/r"
- COLLECTION_SEQ = "/seq/collection"
- INDEX_ID_SEQ = "/seq/index"
- FIELD_ID_SEQ = "/seq/field"
- PRIMARY_KEY = "/pk"
- DATASTORE_DOC_VERSION_FIELD_ID = "v"
- P2P_COLLECTION = "/p2p/collection"
- REPLICATOR = "/rep/id"
- REPLICATOR_RETRY_ID = "/rep/retry/id"
- REPLICATOR_RETRY_DOC = "/rep/retry/doc"
-)
-
-// Key is an interface that represents a key in the database.
-type Key interface {
- ToString() string
- Bytes() []byte
- ToDS() ds.Key
-}
-
-// DataStoreKey is a type that represents a key in the database.
-type DataStoreKey struct {
- CollectionRootID uint32
- InstanceType InstanceType
- DocID string
- FieldID string
-}
-
-var _ Key = (*DataStoreKey)(nil)
-
-// ViewCacheKey is a trimmed down [DataStoreKey] used for caching the results
-// of View items.
-//
-// It is stored in the format `/collection/vi/[CollectionRootID]/[ItemID]`. It points to the
-// full serialized View item.
-type ViewCacheKey struct {
- // CollectionRootID is the Root of the Collection that this item belongs to.
- CollectionRootID uint32
-
- // ItemID is the unique (to this CollectionRootID) ID of the View item.
- //
- // For now this is essentially just the index of the item in the result-set, however
- // that is likely to change in the near future.
- ItemID uint
-}
-
-var _ Key = (*ViewCacheKey)(nil)
-
-// IndexedField contains information necessary for storing a single
-// value of a field in an index.
-type IndexedField struct {
- // Value is the value of the field in the index
- Value client.NormalValue
- // Descending is true if the field is sorted in descending order
- Descending bool
-}
-
-// IndexDataStoreKey is key of an indexed document in the database.
-type IndexDataStoreKey struct {
- // CollectionID is the id of the collection
- CollectionID uint32
- // IndexID is the id of the index
- IndexID uint32
- // Fields is the values of the fields in the index
- Fields []IndexedField
-}
-
-var _ Key = (*IndexDataStoreKey)(nil)
-
-type PrimaryDataStoreKey struct {
- CollectionRootID uint32
- DocID string
-}
-
-var _ Key = (*PrimaryDataStoreKey)(nil)
-
-type HeadStoreKey struct {
- DocID string
- FieldID string //can be 'C'
- Cid cid.Cid
-}
-
-var _ Key = (*HeadStoreKey)(nil)
-
-// CollectionKey points to the json serialized description of the
-// the collection of the given ID.
-type CollectionKey struct {
- CollectionID uint32
-}
-
-var _ Key = (*CollectionKey)(nil)
-
-// CollectionNameKey points to the ID of the collection of the given
-// name.
-type CollectionNameKey struct {
- Name string
-}
-
-var _ Key = (*CollectionNameKey)(nil)
-
-// CollectionSchemaVersionKey points to nil, but the keys/prefix can be used
-// to get collections that are using, or have used a given schema version.
-//
-// If a collection is updated to a different schema version, the old entry(s)
-// of this key will be preserved.
-//
-// This key should be removed in https://github.com/sourcenetwork/defradb/issues/1085
-type CollectionSchemaVersionKey struct {
- SchemaVersionID string
- CollectionID uint32
-}
-
-var _ Key = (*CollectionSchemaVersionKey)(nil)
-
-// CollectionRootKey points to nil, but the keys/prefix can be used
-// to get collections that are of a given RootID.
-//
-// It is stored in the format `/collection/root/[RootID]/[CollectionID]`.
-type CollectionRootKey struct {
- RootID uint32
- CollectionID uint32
-}
-
-var _ Key = (*CollectionRootKey)(nil)
-
-// CollectionIndexKey to a stored description of an index
-type CollectionIndexKey struct {
- // CollectionID is the id of the collection that the index is on
- CollectionID immutable.Option[uint32]
- // IndexName is the name of the index
- IndexName string
-}
-
-var _ Key = (*CollectionIndexKey)(nil)
-
-// SchemaVersionKey points to the json serialized schema at the specified version.
-//
-// It's corresponding value is immutable.
-type SchemaVersionKey struct {
- SchemaVersionID string
-}
-
-var _ Key = (*SchemaVersionKey)(nil)
-
-// SchemaRootKey indexes schema version ids by their root schema id.
-//
-// The index is the key, there are no values stored against the key.
-type SchemaRootKey struct {
- SchemaRoot string
- SchemaVersionID string
-}
-
-var _ Key = (*SchemaRootKey)(nil)
-
-type P2PCollectionKey struct {
- CollectionID string
-}
-
-var _ Key = (*P2PCollectionKey)(nil)
-
-// CollectionIDSequenceKey is used to key the sequence used to generate collection ids.
-type CollectionIDSequenceKey struct{}
-
-var _ Key = (*CollectionIDSequenceKey)(nil)
-
-// IndexIDSequenceKey is used to key the sequence used to generate index ids.
-//
-// The sequence is specific to each collection version.
-type IndexIDSequenceKey struct {
- CollectionID uint32
-}
-
-var _ Key = (*IndexIDSequenceKey)(nil)
-
-// FieldIDSequenceKey is used to key the sequence used to generate field ids.
-//
-// The sequence is specific to each collection root. Multiple collection of the same root
-// must maintain consistent field ids.
-type FieldIDSequenceKey struct {
- CollectionRoot uint32
-}
-
-var _ Key = (*FieldIDSequenceKey)(nil)
-
-type ReplicatorKey struct {
- ReplicatorID string
-}
-
-var _ Key = (*ReplicatorKey)(nil)
-
-// Creates a new DataStoreKey from a string as best as it can,
-// splitting the input using '/' as a field deliminator. It assumes
-// that the input string is in the following format:
-//
-// /[CollectionRootId]/[InstanceType]/[DocID]/[FieldId]
-//
-// Any properties before the above (assuming a '/' deliminator) are ignored
-func NewDataStoreKey(key string) (DataStoreKey, error) {
- return DecodeDataStoreKey([]byte(key))
-}
-
-func MustNewDataStoreKey(key string) DataStoreKey {
- dsKey, err := NewDataStoreKey(key)
- if err != nil {
- panic(err)
- }
- return dsKey
-}
-
-func DataStoreKeyFromDocID(docID client.DocID) DataStoreKey {
- return DataStoreKey{
- DocID: docID.String(),
- }
-}
-
-// Creates a new HeadStoreKey from a string as best as it can,
-// splitting the input using '/' as a field deliminator. It assumes
-// that the input string is in the following format:
-//
-// /[DocID]/[FieldId]/[Cid]
-//
-// Any properties before the above are ignored
-func NewHeadStoreKey(key string) (HeadStoreKey, error) {
- elements := strings.Split(key, "/")
- if len(elements) != 4 {
- return HeadStoreKey{}, ErrInvalidKey
- }
-
- cid, err := cid.Decode(elements[3])
- if err != nil {
- return HeadStoreKey{}, err
- }
-
- return HeadStoreKey{
- // elements[0] is empty (key has leading '/')
- DocID: elements[1],
- FieldID: elements[2],
- Cid: cid,
- }, nil
-}
-
-// Returns a formatted collection key for the system data store.
-// It assumes the name of the collection is non-empty.
-func NewCollectionKey(id uint32) CollectionKey {
- return CollectionKey{CollectionID: id}
-}
-
-func NewCollectionNameKey(name string) CollectionNameKey {
- return CollectionNameKey{Name: name}
-}
-
-func NewCollectionSchemaVersionKey(schemaVersionId string, collectionID uint32) CollectionSchemaVersionKey {
- return CollectionSchemaVersionKey{
- SchemaVersionID: schemaVersionId,
- CollectionID: collectionID,
- }
-}
-
-func NewCollectionSchemaVersionKeyFromString(key string) (CollectionSchemaVersionKey, error) {
- elements := strings.Split(key, "/")
- colID, err := strconv.Atoi(elements[len(elements)-1])
- if err != nil {
- return CollectionSchemaVersionKey{}, err
- }
-
- return CollectionSchemaVersionKey{
- SchemaVersionID: elements[len(elements)-2],
- CollectionID: uint32(colID),
- }, nil
-}
-
-func NewCollectionRootKey(rootID uint32, collectionID uint32) CollectionRootKey {
- return CollectionRootKey{
- RootID: rootID,
- CollectionID: collectionID,
- }
-}
-
-// NewCollectionRootKeyFromString creates a new [CollectionRootKey].
-//
-// It expects the key to be in the format `/collection/root/[RootID]/[CollectionID]`.
-func NewCollectionRootKeyFromString(key string) (CollectionRootKey, error) {
- keyArr := strings.Split(key, "/")
- if len(keyArr) != 5 || keyArr[1] != COLLECTION || keyArr[2] != "root" {
- return CollectionRootKey{}, ErrInvalidKey
- }
- rootID, err := strconv.Atoi(keyArr[3])
- if err != nil {
- return CollectionRootKey{}, err
- }
-
- collectionID, err := strconv.Atoi(keyArr[4])
- if err != nil {
- return CollectionRootKey{}, err
- }
-
- return CollectionRootKey{
- RootID: uint32(rootID),
- CollectionID: uint32(collectionID),
- }, nil
-}
-
-// NewCollectionIndexKey creates a new CollectionIndexKey from a collection name and index name.
-func NewCollectionIndexKey(colID immutable.Option[uint32], indexName string) CollectionIndexKey {
- return CollectionIndexKey{CollectionID: colID, IndexName: indexName}
-}
-
-// NewCollectionIndexKeyFromString creates a new CollectionIndexKey from a string.
-// It expects the input string is in the following format:
-//
-// /collection/index/[CollectionID]/[IndexName]
-//
-// Where [IndexName] might be omitted. Anything else will return an error.
-func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) {
- keyArr := strings.Split(key, "/")
- if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != COLLECTION || keyArr[2] != "index" {
- return CollectionIndexKey{}, ErrInvalidKey
- }
-
- colID, err := strconv.Atoi(keyArr[3])
- if err != nil {
- return CollectionIndexKey{}, err
- }
-
- result := CollectionIndexKey{CollectionID: immutable.Some(uint32(colID))}
- if len(keyArr) == 5 {
- result.IndexName = keyArr[4]
- }
- return result, nil
-}
-
-// ToString returns the string representation of the key
-// It is in the following format:
-// /collection/index/[CollectionID]/[IndexName]
-// if [CollectionID] is empty, the rest is ignored
-func (k CollectionIndexKey) ToString() string {
- result := COLLECTION_INDEX
-
- if k.CollectionID.HasValue() {
- result = result + "/" + fmt.Sprint(k.CollectionID.Value())
- if k.IndexName != "" {
- result = result + "/" + k.IndexName
- }
- }
-
- return result
-}
-
-// Bytes returns the byte representation of the key
-func (k CollectionIndexKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-// ToDS returns the datastore key
-func (k CollectionIndexKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func NewSchemaVersionKey(schemaVersionID string) SchemaVersionKey {
- return SchemaVersionKey{SchemaVersionID: schemaVersionID}
-}
-
-func NewSchemaRootKey(schemaRoot string, schemaVersionID string) SchemaRootKey {
- return SchemaRootKey{
- SchemaRoot: schemaRoot,
- SchemaVersionID: schemaVersionID,
- }
-}
-
-func NewSchemaRootKeyFromString(keyString string) (SchemaRootKey, error) {
- keyString = strings.TrimPrefix(keyString, SCHEMA_VERSION_ROOT+"/")
- elements := strings.Split(keyString, "/")
- if len(elements) != 2 {
- return SchemaRootKey{}, ErrInvalidKey
- }
-
- return SchemaRootKey{
- SchemaRoot: elements[0],
- SchemaVersionID: elements[1],
- }, nil
-}
-
-func NewIndexIDSequenceKey(collectionID uint32) IndexIDSequenceKey {
- return IndexIDSequenceKey{CollectionID: collectionID}
-}
-
-func NewFieldIDSequenceKey(collectionRoot uint32) FieldIDSequenceKey {
- return FieldIDSequenceKey{CollectionRoot: collectionRoot}
-}
-
-func (k DataStoreKey) WithValueFlag() DataStoreKey {
- newKey := k
- newKey.InstanceType = ValueKey
- return newKey
-}
-
-func (k DataStoreKey) WithPriorityFlag() DataStoreKey {
- newKey := k
- newKey.InstanceType = PriorityKey
- return newKey
-}
-
-func (k DataStoreKey) WithDeletedFlag() DataStoreKey {
- newKey := k
- newKey.InstanceType = DeletedKey
- return newKey
-}
-
-func (k DataStoreKey) WithDocID(docID string) DataStoreKey {
- newKey := k
- newKey.DocID = docID
- return newKey
-}
-
-func (k DataStoreKey) WithInstanceInfo(key DataStoreKey) DataStoreKey {
- newKey := k
- newKey.DocID = key.DocID
- newKey.FieldID = key.FieldID
- newKey.InstanceType = key.InstanceType
- return newKey
-}
-
-func (k DataStoreKey) WithFieldID(fieldID string) DataStoreKey {
- newKey := k
- newKey.FieldID = fieldID
- return newKey
-}
-
-func (k DataStoreKey) ToHeadStoreKey() HeadStoreKey {
- return HeadStoreKey{
- DocID: k.DocID,
- FieldID: k.FieldID,
- }
-}
-
-func (k HeadStoreKey) WithDocID(docID string) HeadStoreKey {
- newKey := k
- newKey.DocID = docID
- return newKey
-}
-
-func (k HeadStoreKey) WithCid(c cid.Cid) HeadStoreKey {
- newKey := k
- newKey.Cid = c
- return newKey
-}
-
-func (k HeadStoreKey) WithFieldID(fieldID string) HeadStoreKey {
- newKey := k
- newKey.FieldID = fieldID
- return newKey
-}
-
-func (k DataStoreKey) ToString() string {
- return string(k.Bytes())
-}
-
-func (k DataStoreKey) Bytes() []byte {
- return EncodeDataStoreKey(&k)
-}
-
-func (k DataStoreKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k DataStoreKey) PrettyPrint() string {
- var result string
-
- if k.CollectionRootID != 0 {
- result = result + "/" + strconv.Itoa(int(k.CollectionRootID))
- }
- if k.InstanceType != "" {
- result = result + "/" + string(k.InstanceType)
- }
- if k.DocID != "" {
- result = result + "/" + k.DocID
- }
- if k.FieldID != "" {
- result = result + "/" + k.FieldID
- }
-
- return result
-}
-
-func (k DataStoreKey) Equal(other DataStoreKey) bool {
- return k.CollectionRootID == other.CollectionRootID &&
- k.DocID == other.DocID &&
- k.FieldID == other.FieldID &&
- k.InstanceType == other.InstanceType
-}
-
-func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey {
- return PrimaryDataStoreKey{
- CollectionRootID: k.CollectionRootID,
- DocID: k.DocID,
- }
-}
-
-func NewViewCacheColPrefix(rootID uint32) ViewCacheKey {
- return ViewCacheKey{
- CollectionRootID: rootID,
- }
-}
-
-func NewViewCacheKey(rootID uint32, itemID uint) ViewCacheKey {
- return ViewCacheKey{
- CollectionRootID: rootID,
- ItemID: itemID,
- }
-}
-
-func (k ViewCacheKey) ToString() string {
- return string(k.Bytes())
-}
-
-func (k ViewCacheKey) Bytes() []byte {
- result := []byte(COLLECTION_VIEW_ITEMS)
-
- if k.CollectionRootID != 0 {
- result = append(result, '/')
- result = encoding.EncodeUvarintAscending(result, uint64(k.CollectionRootID))
- }
-
- if k.ItemID != 0 {
- result = append(result, '/')
- result = encoding.EncodeUvarintAscending(result, uint64(k.ItemID))
- }
-
- return result
-}
-
-func (k ViewCacheKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k ViewCacheKey) PrettyPrint() string {
- result := COLLECTION_VIEW_ITEMS
-
- if k.CollectionRootID != 0 {
- result = result + "/" + strconv.Itoa(int(k.CollectionRootID))
- }
- if k.ItemID != 0 {
- result = result + "/" + strconv.Itoa(int(k.ItemID))
- }
-
- return result
-}
-
-// NewIndexDataStoreKey creates a new IndexDataStoreKey from a collection ID, index ID and fields.
-// It also validates values of the fields.
-func NewIndexDataStoreKey(collectionID, indexID uint32, fields []IndexedField) IndexDataStoreKey {
- return IndexDataStoreKey{
- CollectionID: collectionID,
- IndexID: indexID,
- Fields: fields,
- }
-}
-
-// Bytes returns the byte representation of the key
-func (k *IndexDataStoreKey) Bytes() []byte {
- return EncodeIndexDataStoreKey(k)
-}
-
-// ToDS returns the datastore key
-func (k *IndexDataStoreKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-// ToString returns the string representation of the key
-// It is in the following format:
-// /[CollectionID]/[IndexID]/[FieldValue](/[FieldValue]...)
-// If while composing the string from left to right, a component
-// is empty, the string is returned up to that point
-func (k *IndexDataStoreKey) ToString() string {
- return string(k.Bytes())
-}
-
-// Equal returns true if the two keys are equal
-func (k *IndexDataStoreKey) Equal(other IndexDataStoreKey) bool {
- if k.CollectionID != other.CollectionID || k.IndexID != other.IndexID {
- return false
- }
-
- if len(k.Fields) != len(other.Fields) {
- return false
- }
-
- for i, field := range k.Fields {
- if !field.Value.Equal(other.Fields[i].Value) || field.Descending != other.Fields[i].Descending {
- return false
- }
- }
-
- return true
-}
-
-func (k PrimaryDataStoreKey) ToDataStoreKey() DataStoreKey {
- return DataStoreKey{
- CollectionRootID: k.CollectionRootID,
- DocID: k.DocID,
- }
-}
-
-func (k PrimaryDataStoreKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k PrimaryDataStoreKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k PrimaryDataStoreKey) ToString() string {
- result := ""
-
- if k.CollectionRootID != 0 {
- result = result + "/" + fmt.Sprint(k.CollectionRootID)
- }
- result = result + PRIMARY_KEY
- if k.DocID != "" {
- result = result + "/" + k.DocID
- }
-
- return result
-}
-
-func (k CollectionKey) ToString() string {
- return fmt.Sprintf("%s/%s", COLLECTION_ID, strconv.Itoa(int(k.CollectionID)))
-}
-
-func (k CollectionKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k CollectionKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k CollectionNameKey) ToString() string {
- return fmt.Sprintf("%s/%s", COLLECTION_NAME, k.Name)
-}
-
-func (k CollectionNameKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k CollectionNameKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k CollectionSchemaVersionKey) ToString() string {
- result := COLLECTION_SCHEMA_VERSION
-
- if k.SchemaVersionID != "" {
- result = result + "/" + k.SchemaVersionID
- }
-
- if k.CollectionID != 0 {
- result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.CollectionID)))
- }
-
- return result
-}
-
-func (k CollectionSchemaVersionKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k CollectionSchemaVersionKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k CollectionRootKey) ToString() string {
- result := COLLECTION_ROOT
-
- if k.RootID != 0 {
- result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.RootID)))
- }
-
- if k.CollectionID != 0 {
- result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.CollectionID)))
- }
-
- return result
-}
-
-func (k CollectionRootKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k CollectionRootKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k SchemaVersionKey) ToString() string {
- result := SCHEMA_VERSION
-
- if k.SchemaVersionID != "" {
- result = result + "/" + k.SchemaVersionID
- }
-
- return result
-}
-
-func (k SchemaVersionKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k SchemaVersionKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k SchemaRootKey) ToString() string {
- result := SCHEMA_VERSION_ROOT
-
- if k.SchemaRoot != "" {
- result = result + "/" + k.SchemaRoot
- }
-
- if k.SchemaVersionID != "" {
- result = result + "/" + k.SchemaVersionID
- }
-
- return result
-}
-
-func (k SchemaRootKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k SchemaRootKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k CollectionIDSequenceKey) ToString() string {
- return COLLECTION_SEQ
-}
-
-func (k CollectionIDSequenceKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k CollectionIDSequenceKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k IndexIDSequenceKey) ToString() string {
- return INDEX_ID_SEQ + "/" + strconv.Itoa(int(k.CollectionID))
-}
-
-func (k IndexIDSequenceKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k IndexIDSequenceKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k FieldIDSequenceKey) ToString() string {
- return FIELD_ID_SEQ + "/" + strconv.Itoa(int(k.CollectionRoot))
-}
-
-func (k FieldIDSequenceKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k FieldIDSequenceKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-// New
-func NewP2PCollectionKey(collectionID string) P2PCollectionKey {
- return P2PCollectionKey{CollectionID: collectionID}
-}
-
-func NewP2PCollectionKeyFromString(key string) (P2PCollectionKey, error) {
- keyArr := strings.Split(key, "/")
- if len(keyArr) != 4 {
- return P2PCollectionKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key))
- }
- return NewP2PCollectionKey(keyArr[3]), nil
-}
-
-func (k P2PCollectionKey) ToString() string {
- result := P2P_COLLECTION
-
- if k.CollectionID != "" {
- result = result + "/" + k.CollectionID
- }
-
- return result
-}
-
-func (k P2PCollectionKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k P2PCollectionKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func NewReplicatorKey(id string) ReplicatorKey {
- return ReplicatorKey{ReplicatorID: id}
-}
-
-func (k ReplicatorKey) ToString() string {
- result := REPLICATOR
-
- if k.ReplicatorID != "" {
- result = result + "/" + k.ReplicatorID
- }
-
- return result
-}
-
-func (k ReplicatorKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k ReplicatorKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-func (k HeadStoreKey) ToString() string {
- var result string
-
- if k.DocID != "" {
- result = result + "/" + k.DocID
- }
- if k.FieldID != "" {
- result = result + "/" + k.FieldID
- }
- if k.Cid.Defined() {
- result = result + "/" + k.Cid.String()
- }
-
- return result
-}
-
-func (k HeadStoreKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k HeadStoreKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-// PrefixEnd determines the end key given key as a prefix, that is the key that sorts precisely
-// behind all keys starting with prefix: "1" is added to the final byte and the carry propagated.
-// The special cases of nil and KeyMin always returns KeyMax.
-func (k DataStoreKey) PrefixEnd() DataStoreKey {
- newKey := k
-
- if k.FieldID != "" {
- newKey.FieldID = string(bytesPrefixEnd([]byte(k.FieldID)))
- return newKey
- }
- if k.DocID != "" {
- newKey.DocID = string(bytesPrefixEnd([]byte(k.DocID)))
- return newKey
- }
- if k.InstanceType != "" {
- newKey.InstanceType = InstanceType(bytesPrefixEnd([]byte(k.InstanceType)))
- return newKey
- }
- if k.CollectionRootID != 0 {
- newKey.CollectionRootID = k.CollectionRootID + 1
- return newKey
- }
-
- return newKey
-}
-
-// FieldIDAsUint extracts the Field Identifier from the Key.
-// In a Primary index, the last key path is the FieldIDAsUint.
-// This may be different in Secondary Indexes.
-// An error is returned if it can't correct convert the field to a uint32.
-func (k DataStoreKey) FieldIDAsUint() (uint32, error) {
- fieldID, err := strconv.Atoi(k.FieldID)
- if err != nil {
- return 0, NewErrFailedToGetFieldIdOfKey(err)
- }
- return uint32(fieldID), nil
-}
-
-func bytesPrefixEnd(b []byte) []byte {
- end := make([]byte, len(b))
- copy(end, b)
- for i := len(end) - 1; i >= 0; i-- {
- end[i] = end[i] + 1
- if end[i] != 0 {
- return end[:i+1]
- }
- }
- // This statement will only be reached if the key is already a
- // maximal byte string (i.e. already \xff...).
- return b
-}
-
-type ReplicatorRetryIDKey struct {
- PeerID string
-}
-
-var _ Key = (*ReplicatorRetryIDKey)(nil)
-
-func NewReplicatorRetryIDKey(peerID string) ReplicatorRetryIDKey {
- return ReplicatorRetryIDKey{
- PeerID: peerID,
- }
-}
-
-// NewReplicatorRetryIDKeyFromString creates a new [ReplicatorRetryIDKey] from a string.
-//
-// It expects the input string to be in the format `/rep/retry/id/[PeerID]`.
-func NewReplicatorRetryIDKeyFromString(key string) (ReplicatorRetryIDKey, error) {
- peerID := strings.TrimPrefix(key, REPLICATOR_RETRY_ID+"/")
- if peerID == "" {
- return ReplicatorRetryIDKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key))
- }
- return NewReplicatorRetryIDKey(peerID), nil
-}
-
-func (k ReplicatorRetryIDKey) ToString() string {
- return REPLICATOR_RETRY_ID + "/" + k.PeerID
-}
-
-func (k ReplicatorRetryIDKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k ReplicatorRetryIDKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
-
-type ReplicatorRetryDocIDKey struct {
- PeerID string
- DocID string
-}
-
-var _ Key = (*ReplicatorRetryDocIDKey)(nil)
-
-func NewReplicatorRetryDocIDKey(peerID, docID string) ReplicatorRetryDocIDKey {
- return ReplicatorRetryDocIDKey{
- PeerID: peerID,
- DocID: docID,
- }
-}
-
-// NewReplicatorRetryDocIDKeyFromString creates a new [ReplicatorRetryDocIDKey] from a string.
-//
-// It expects the input string to be in the format `/rep/retry/doc/[PeerID]/[DocID]`.
-func NewReplicatorRetryDocIDKeyFromString(key string) (ReplicatorRetryDocIDKey, error) {
- trimmedKey := strings.TrimPrefix(key, REPLICATOR_RETRY_DOC+"/")
- keyArr := strings.Split(trimmedKey, "/")
- if len(keyArr) != 2 {
- return ReplicatorRetryDocIDKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key))
- }
- return NewReplicatorRetryDocIDKey(keyArr[0], keyArr[1]), nil
-}
-
-func (k ReplicatorRetryDocIDKey) ToString() string {
- keyString := REPLICATOR_RETRY_DOC + "/" + k.PeerID
- if k.DocID != "" {
- keyString += "/" + k.DocID
- }
- return keyString
-}
-
-func (k ReplicatorRetryDocIDKey) Bytes() []byte {
- return []byte(k.ToString())
-}
-
-func (k ReplicatorRetryDocIDKey) ToDS() ds.Key {
- return ds.NewKey(k.ToString())
-}
diff --git a/internal/db/base/collection_keys.go b/internal/db/base/collection_keys.go
index 8878d50b13..31cdeef18c 100644
--- a/internal/db/base/collection_keys.go
+++ b/internal/db/base/collection_keys.go
@@ -15,11 +15,12 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// MakeDataStoreKeyWithCollectionDescription returns the datastore key for the given collection description.
-func MakeDataStoreKeyWithCollectionDescription(col client.CollectionDescription) core.DataStoreKey {
- return core.DataStoreKey{
+func MakeDataStoreKeyWithCollectionDescription(col client.CollectionDescription) keys.DataStoreKey {
+ return keys.DataStoreKey{
CollectionRootID: col.RootID,
}
}
@@ -28,8 +29,8 @@ func MakeDataStoreKeyWithCollectionDescription(col client.CollectionDescription)
func MakeDataStoreKeyWithCollectionAndDocID(
col client.CollectionDescription,
docID string,
-) core.DataStoreKey {
- return core.DataStoreKey{
+) keys.DataStoreKey {
+ return keys.DataStoreKey{
CollectionRootID: col.RootID,
DocID: docID,
}
@@ -38,9 +39,9 @@ func MakeDataStoreKeyWithCollectionAndDocID(
func MakePrimaryIndexKeyForCRDT(
c client.CollectionDefinition,
ctype client.CType,
- key core.DataStoreKey,
+ key keys.DataStoreKey,
fieldName string,
-) (core.DataStoreKey, error) {
+) (keys.DataStoreKey, error) {
switch ctype {
case client.COMPOSITE:
return MakeDataStoreKeyWithCollectionDescription(c.Description).
@@ -50,7 +51,7 @@ func MakePrimaryIndexKeyForCRDT(
case client.LWW_REGISTER, client.PN_COUNTER, client.P_COUNTER:
field, ok := c.GetFieldByName(fieldName)
if !ok {
- return core.DataStoreKey{}, client.NewErrFieldNotExist(fieldName)
+ return keys.DataStoreKey{}, client.NewErrFieldNotExist(fieldName)
}
return MakeDataStoreKeyWithCollectionDescription(c.Description).
@@ -58,5 +59,5 @@ func MakePrimaryIndexKeyForCRDT(
WithFieldID(fmt.Sprint(field.ID)),
nil
}
- return core.DataStoreKey{}, ErrInvalidCrdtType
+ return keys.DataStoreKey{}, ErrInvalidCrdtType
}
diff --git a/internal/db/collection.go b/internal/db/collection.go
index 39e8757598..af631701fc 100644
--- a/internal/db/collection.go
+++ b/internal/db/collection.go
@@ -34,6 +34,7 @@ import (
"github.com/sourcenetwork/defradb/internal/db/description"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
"github.com/sourcenetwork/defradb/internal/encryption"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/lens"
merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
)
@@ -280,7 +281,7 @@ func (c *collection) getAllDocIDsChan(
ctx context.Context,
) (<-chan client.DocIDResult, error) {
txn := mustGetContextTxn(ctx)
- prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix
+ prefix := keys.PrimaryDataStoreKey{ // empty path for all keys prefix
CollectionRootID: c.Description().RootID,
}
q, err := txn.Datastore().Query(ctx, query.Query{
@@ -420,15 +421,15 @@ func (c *collection) CreateMany(
func (c *collection) getDocIDAndPrimaryKeyFromDoc(
doc *client.Document,
-) (client.DocID, core.PrimaryDataStoreKey, error) {
+) (client.DocID, keys.PrimaryDataStoreKey, error) {
docID, err := doc.GenerateDocID()
if err != nil {
- return client.DocID{}, core.PrimaryDataStoreKey{}, err
+ return client.DocID{}, keys.PrimaryDataStoreKey{}, err
}
primaryKey := c.getPrimaryKeyFromDocID(docID)
if primaryKey.DocID != doc.ID().String() {
- return client.DocID{}, core.PrimaryDataStoreKey{},
+ return client.DocID{}, keys.PrimaryDataStoreKey{},
NewErrDocVerification(doc.ID().String(), primaryKey.DocID)
}
return docID, primaryKey, nil
@@ -667,7 +668,7 @@ func (c *collection) save(
merkleCRDT, err := merklecrdt.InstanceWithStore(
txn,
- core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
+ keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
val.Type(),
fieldDescription.Kind,
fieldKey,
@@ -860,7 +861,7 @@ func (c *collection) Exists(
// check if a document exists with the given primary key
func (c *collection) exists(
ctx context.Context,
- primaryKey core.PrimaryDataStoreKey,
+ primaryKey keys.PrimaryDataStoreKey,
) (exists bool, isDeleted bool, err error) {
canRead, err := c.checkAccessOfDocWithACP(
ctx,
@@ -894,7 +895,7 @@ func (c *collection) exists(
// Calling it elsewhere could cause the omission of acp checks.
func (c *collection) saveCompositeToMerkleCRDT(
ctx context.Context,
- dsKey core.DataStoreKey,
+ dsKey keys.DataStoreKey,
links []coreblock.DAGLink,
status client.DocumentStatus,
) (cidlink.Link, []byte, error) {
@@ -902,7 +903,7 @@ func (c *collection) saveCompositeToMerkleCRDT(
dsKey = dsKey.WithFieldID(core.COMPOSITE_NAMESPACE)
merkleCRDT := merklecrdt.NewMerkleCompositeDAG(
txn,
- core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
+ keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
dsKey,
)
@@ -913,28 +914,28 @@ func (c *collection) saveCompositeToMerkleCRDT(
return merkleCRDT.Save(ctx, links)
}
-func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) core.PrimaryDataStoreKey {
- return core.PrimaryDataStoreKey{
+func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) keys.PrimaryDataStoreKey {
+ return keys.PrimaryDataStoreKey{
CollectionRootID: c.Description().RootID,
DocID: docID.String(),
}
}
-func (c *collection) getDataStoreKeyFromDocID(docID client.DocID) core.DataStoreKey {
- return core.DataStoreKey{
+func (c *collection) getDataStoreKeyFromDocID(docID client.DocID) keys.DataStoreKey {
+ return keys.DataStoreKey{
CollectionRootID: c.Description().RootID,
DocID: docID.String(),
- InstanceType: core.ValueKey,
+ InstanceType: keys.ValueKey,
}
}
-func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) {
+func (c *collection) tryGetFieldKey(primaryKey keys.PrimaryDataStoreKey, fieldName string) (keys.DataStoreKey, bool) {
fieldID, hasField := c.tryGetFieldID(fieldName)
if !hasField {
- return core.DataStoreKey{}, false
+ return keys.DataStoreKey{}, false
}
- return core.DataStoreKey{
+ return keys.DataStoreKey{
CollectionRootID: c.Description().RootID,
DocID: primaryKey.DocID,
FieldID: strconv.FormatUint(uint64(fieldID), 10),
diff --git a/internal/db/collection_delete.go b/internal/db/collection_delete.go
index 62d7c24e50..468095b54c 100644
--- a/internal/db/collection_delete.go
+++ b/internal/db/collection_delete.go
@@ -16,8 +16,8 @@ import (
"github.com/sourcenetwork/defradb/acp"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/event"
- "github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// DeleteWithFilter deletes using a filter to target documents for delete.
@@ -87,7 +87,7 @@ func (c *collection) deleteWithFilter(
// Extract the docID in the string format from the document value.
docID := doc.GetID()
- primaryKey := core.PrimaryDataStoreKey{
+ primaryKey := keys.PrimaryDataStoreKey{
CollectionRootID: c.Description().RootID,
DocID: docID,
}
@@ -109,7 +109,7 @@ func (c *collection) deleteWithFilter(
func (c *collection) applyDelete(
ctx context.Context,
- primaryKey core.PrimaryDataStoreKey,
+ primaryKey keys.PrimaryDataStoreKey,
) error {
// Must also have read permission to delete, inorder to check if document exists.
found, isDeleted, err := c.exists(ctx, primaryKey)
diff --git a/internal/db/collection_get.go b/internal/db/collection_get.go
index 05e6d43308..f2db5f0f8c 100644
--- a/internal/db/collection_get.go
+++ b/internal/db/collection_get.go
@@ -18,6 +18,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func (c *collection) Get(
@@ -55,7 +56,7 @@ func (c *collection) Get(
func (c *collection) get(
ctx context.Context,
- primaryKey core.PrimaryDataStoreKey,
+ primaryKey keys.PrimaryDataStoreKey,
fields []client.FieldDefinition,
showDeleted bool,
) (*client.Document, error) {
diff --git a/internal/db/collection_id.go b/internal/db/collection_id.go
index e635a4477f..84edcbb1c4 100644
--- a/internal/db/collection_id.go
+++ b/internal/db/collection_id.go
@@ -17,7 +17,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// setCollectionIDs sets the IDs on a collection description, including field IDs, mutating the input set.
@@ -33,7 +33,7 @@ func (db *db) setCollectionIDs(ctx context.Context, newCollections []client.Coll
// setCollectionID sets the IDs directly on a collection description, excluding stuff like field IDs,
// mutating the input set.
func (db *db) setCollectionID(ctx context.Context, newCollections []client.CollectionDefinition) error {
- colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{})
+ colSeq, err := db.getSequence(ctx, keys.CollectionIDSequenceKey{})
if err != nil {
return err
}
@@ -75,7 +75,7 @@ func (db *db) setFieldIDs(ctx context.Context, definitions []client.CollectionDe
}
for i := range definitions {
- fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(definitions[i].Description.RootID))
+ fieldSeq, err := db.getSequence(ctx, keys.NewFieldIDSequenceKey(definitions[i].Description.RootID))
if err != nil {
return err
}
diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go
index eb2b1b8d4c..b1baad8369 100644
--- a/internal/db/collection_index.go
+++ b/internal/db/collection_index.go
@@ -28,6 +28,7 @@ import (
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/description"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/request/graphql/schema"
)
@@ -61,9 +62,9 @@ func (db *db) getAllIndexDescriptions(
) (map[client.CollectionName][]client.IndexDescription, error) {
// callers of this function must set a context transaction
txn := mustGetContextTxn(ctx)
- prefix := core.NewCollectionIndexKey(immutable.None[uint32](), "")
+ prefix := keys.NewCollectionIndexKey(immutable.None[uint32](), "")
- keys, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx,
+ indexKeys, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx,
prefix.ToString(), txn.Systemstore())
if err != nil {
@@ -72,8 +73,8 @@ func (db *db) getAllIndexDescriptions(
indexes := make(map[client.CollectionName][]client.IndexDescription)
- for i := range keys {
- indexKey, err := core.NewCollectionIndexKeyFromString(keys[i])
+ for i := range indexKeys {
+ indexKey, err := keys.NewCollectionIndexKeyFromString(indexKeys[i])
if err != nil {
return nil, NewErrInvalidStoredIndexKey(indexKey.ToString())
}
@@ -98,7 +99,7 @@ func (db *db) fetchCollectionIndexDescriptions(
) ([]client.IndexDescription, error) {
// callers of this function must set a context transaction
txn := mustGetContextTxn(ctx)
- prefix := core.NewCollectionIndexKey(immutable.Some(colID), "")
+ prefix := keys.NewCollectionIndexKey(immutable.Some(colID), "")
_, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](
ctx,
prefix.ToString(),
@@ -257,7 +258,7 @@ func (c *collection) createIndex(
colSeq, err := c.db.getSequence(
ctx,
- core.NewIndexIDSequenceKey(c.ID()),
+ keys.NewIndexIDSequenceKey(c.ID()),
)
if err != nil {
return nil, err
@@ -411,7 +412,7 @@ func (c *collection) dropIndex(ctx context.Context, indexName string) error {
break
}
}
- key := core.NewCollectionIndexKey(immutable.Some(c.ID()), indexName)
+ key := keys.NewCollectionIndexKey(immutable.Some(c.ID()), indexName)
err = txn.Systemstore().Delete(ctx, key.ToDS())
if err != nil {
return err
@@ -423,7 +424,7 @@ func (c *collection) dropIndex(ctx context.Context, indexName string) error {
func (c *collection) dropAllIndexes(ctx context.Context) error {
// callers of this function must set a context transaction
txn := mustGetContextTxn(ctx)
- prefix := core.NewCollectionIndexKey(immutable.Some(c.ID()), "")
+ prefix := keys.NewCollectionIndexKey(immutable.Some(c.ID()), "")
keys, err := datastore.FetchKeysForPrefix(ctx, prefix.ToString(), txn.Systemstore())
if err != nil {
@@ -494,19 +495,19 @@ func (c *collection) checkExistingFieldsAndAdjustRelFieldNames(
func (c *collection) generateIndexNameIfNeededAndCreateKey(
ctx context.Context,
desc *client.IndexDescription,
-) (core.CollectionIndexKey, error) {
+) (keys.CollectionIndexKey, error) {
// callers of this function must set a context transaction
txn := mustGetContextTxn(ctx)
- var indexKey core.CollectionIndexKey
+ var indexKey keys.CollectionIndexKey
if desc.Name == "" {
nameIncrement := 1
for {
desc.Name = generateIndexName(c, desc.Fields, nameIncrement)
- indexKey = core.NewCollectionIndexKey(immutable.Some(c.ID()), desc.Name)
+ indexKey = keys.NewCollectionIndexKey(immutable.Some(c.ID()), desc.Name)
exists, err := txn.Systemstore().Has(ctx, indexKey.ToDS())
if err != nil {
- return core.CollectionIndexKey{}, err
+ return keys.CollectionIndexKey{}, err
}
if !exists {
break
@@ -514,13 +515,13 @@ func (c *collection) generateIndexNameIfNeededAndCreateKey(
nameIncrement++
}
} else {
- indexKey = core.NewCollectionIndexKey(immutable.Some(c.ID()), desc.Name)
+ indexKey = keys.NewCollectionIndexKey(immutable.Some(c.ID()), desc.Name)
exists, err := txn.Systemstore().Has(ctx, indexKey.ToDS())
if err != nil {
- return core.CollectionIndexKey{}, err
+ return keys.CollectionIndexKey{}, err
}
if exists {
- return core.CollectionIndexKey{}, NewErrIndexWithNameAlreadyExists(desc.Name)
+ return keys.CollectionIndexKey{}, NewErrIndexWithNameAlreadyExists(desc.Name)
}
}
return indexKey, nil
diff --git a/internal/db/db.go b/internal/db/db.go
index 2e5363b94b..f2782bbe3a 100644
--- a/internal/db/db.go
+++ b/internal/db/db.go
@@ -34,6 +34,7 @@ import (
"github.com/sourcenetwork/defradb/event"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/permission"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/request/graphql"
)
@@ -354,7 +355,7 @@ func (db *db) initialize(ctx context.Context) error {
// init meta data
// collection sequence
- _, err = db.getSequence(ctx, core.CollectionIDSequenceKey{})
+ _, err = db.getSequence(ctx, keys.CollectionIDSequenceKey{})
if err != nil {
return err
}
diff --git a/internal/db/description/collection.go b/internal/db/description/collection.go
index 20f652888e..1c8c4667ef 100644
--- a/internal/db/description/collection.go
+++ b/internal/db/description/collection.go
@@ -21,7 +21,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// SaveCollection saves the given collection to the system store overwriting any
@@ -41,14 +41,14 @@ func SaveCollection(
return client.CollectionDescription{}, err
}
- key := core.NewCollectionKey(desc.ID)
+ key := keys.NewCollectionKey(desc.ID)
err = txn.Systemstore().Put(ctx, key.ToDS(), buf)
if err != nil {
return client.CollectionDescription{}, err
}
if existing.Name.HasValue() && existing.Name != desc.Name {
- nameKey := core.NewCollectionNameKey(existing.Name.Value())
+ nameKey := keys.NewCollectionNameKey(existing.Name.Value())
idBuf, err := txn.Systemstore().Get(ctx, nameKey.ToDS())
nameIndexExsts := true
if err != nil {
@@ -82,7 +82,7 @@ func SaveCollection(
return client.CollectionDescription{}, err
}
- nameKey := core.NewCollectionNameKey(desc.Name.Value())
+ nameKey := keys.NewCollectionNameKey(desc.Name.Value())
err = txn.Systemstore().Put(ctx, nameKey.ToDS(), idBuf)
if err != nil {
return client.CollectionDescription{}, err
@@ -91,13 +91,13 @@ func SaveCollection(
// The need for this key is temporary, we should replace it with the global collection ID
// https://github.com/sourcenetwork/defradb/issues/1085
- schemaVersionKey := core.NewCollectionSchemaVersionKey(desc.SchemaVersionID, desc.ID)
+ schemaVersionKey := keys.NewCollectionSchemaVersionKey(desc.SchemaVersionID, desc.ID)
err = txn.Systemstore().Put(ctx, schemaVersionKey.ToDS(), []byte{})
if err != nil {
return client.CollectionDescription{}, err
}
- rootKey := core.NewCollectionRootKey(desc.RootID, desc.ID)
+ rootKey := keys.NewCollectionRootKey(desc.RootID, desc.ID)
err = txn.Systemstore().Put(ctx, rootKey.ToDS(), []byte{})
if err != nil {
return client.CollectionDescription{}, err
@@ -111,7 +111,7 @@ func GetCollectionByID(
txn datastore.Txn,
id uint32,
) (client.CollectionDescription, error) {
- key := core.NewCollectionKey(id)
+ key := keys.NewCollectionKey(id)
buf, err := txn.Systemstore().Get(ctx, key.ToDS())
if err != nil {
return client.CollectionDescription{}, err
@@ -134,7 +134,7 @@ func GetCollectionByName(
txn datastore.Txn,
name string,
) (client.CollectionDescription, error) {
- nameKey := core.NewCollectionNameKey(name)
+ nameKey := keys.NewCollectionNameKey(name)
idBuf, err := txn.Systemstore().Get(ctx, nameKey.ToDS())
if err != nil {
return client.CollectionDescription{}, err
@@ -154,7 +154,7 @@ func GetCollectionsByRoot(
txn datastore.Txn,
root uint32,
) ([]client.CollectionDescription, error) {
- rootKey := core.NewCollectionRootKey(root, 0)
+ rootKey := keys.NewCollectionRootKey(root, 0)
rootQuery, err := txn.Systemstore().Query(ctx, query.Query{
Prefix: rootKey.ToString(),
@@ -173,7 +173,7 @@ func GetCollectionsByRoot(
return nil, err
}
- rootKey, err := core.NewCollectionRootKeyFromString(string(res.Key))
+ rootKey, err := keys.NewCollectionRootKeyFromString(string(res.Key))
if err != nil {
if err := rootQuery.Close(); err != nil {
return nil, NewErrFailedToCloseSchemaQuery(err)
@@ -201,7 +201,7 @@ func GetCollectionsBySchemaVersionID(
txn datastore.Txn,
schemaVersionID string,
) ([]client.CollectionDescription, error) {
- schemaVersionKey := core.NewCollectionSchemaVersionKey(schemaVersionID, 0)
+ schemaVersionKey := keys.NewCollectionSchemaVersionKey(schemaVersionID, 0)
schemaVersionQuery, err := txn.Systemstore().Query(ctx, query.Query{
Prefix: schemaVersionKey.ToString(),
@@ -220,7 +220,7 @@ func GetCollectionsBySchemaVersionID(
return nil, err
}
- colSchemaVersionKey, err := core.NewCollectionSchemaVersionKeyFromString(string(res.Key))
+ colSchemaVersionKey, err := keys.NewCollectionSchemaVersionKeyFromString(string(res.Key))
if err != nil {
if err := schemaVersionQuery.Close(); err != nil {
return nil, NewErrFailedToCloseSchemaQuery(err)
@@ -233,7 +233,7 @@ func GetCollectionsBySchemaVersionID(
cols := make([]client.CollectionDescription, len(colIDs))
for i, colID := range colIDs {
- key := core.NewCollectionKey(colID)
+ key := keys.NewCollectionKey(colID)
buf, err := txn.Systemstore().Get(ctx, key.ToDS())
if err != nil {
return nil, err
@@ -286,7 +286,7 @@ func GetCollections(
txn datastore.Txn,
) ([]client.CollectionDescription, error) {
q, err := txn.Systemstore().Query(ctx, query.Query{
- Prefix: core.COLLECTION_ID,
+ Prefix: keys.COLLECTION_ID,
})
if err != nil {
return nil, NewErrFailedToCreateCollectionQuery(err)
@@ -322,7 +322,7 @@ func GetActiveCollections(
txn datastore.Txn,
) ([]client.CollectionDescription, error) {
q, err := txn.Systemstore().Query(ctx, query.Query{
- Prefix: core.NewCollectionNameKey("").ToString(),
+ Prefix: keys.NewCollectionNameKey("").ToString(),
})
if err != nil {
return nil, NewErrFailedToCreateCollectionQuery(err)
@@ -364,6 +364,6 @@ func HasCollectionByName(
txn datastore.Txn,
name string,
) (bool, error) {
- nameKey := core.NewCollectionNameKey(name)
+ nameKey := keys.NewCollectionNameKey(name)
return txn.Systemstore().Has(ctx, nameKey.ToDS())
}
diff --git a/internal/db/description/schema.go b/internal/db/description/schema.go
index f9d5935770..3df17f7e1e 100644
--- a/internal/db/description/schema.go
+++ b/internal/db/description/schema.go
@@ -18,7 +18,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// CreateSchemaVersion creates and saves to the store a new schema version.
@@ -34,7 +34,7 @@ func CreateSchemaVersion(
return client.SchemaDescription{}, err
}
- key := core.NewSchemaVersionKey(desc.VersionID)
+ key := keys.NewSchemaVersionKey(desc.VersionID)
err = txn.Systemstore().Put(ctx, key.ToDS(), buf)
if err != nil {
return client.SchemaDescription{}, err
@@ -43,7 +43,7 @@ func CreateSchemaVersion(
isNew := desc.Root == desc.VersionID
if !isNew {
// We don't need to add a root key if this is the first version
- schemaVersionHistoryKey := core.NewSchemaRootKey(desc.Root, desc.VersionID)
+ schemaVersionHistoryKey := keys.NewSchemaRootKey(desc.Root, desc.VersionID)
err = txn.Systemstore().Put(ctx, schemaVersionHistoryKey.ToDS(), []byte{})
if err != nil {
return client.SchemaDescription{}, err
@@ -62,7 +62,7 @@ func GetSchemaVersion(
txn datastore.Txn,
versionID string,
) (client.SchemaDescription, error) {
- key := core.NewSchemaVersionKey(versionID)
+ key := keys.NewSchemaVersionKey(versionID)
buf, err := txn.Systemstore().Get(ctx, key.ToDS())
if err != nil {
@@ -135,7 +135,7 @@ func GetSchemas(
versionIDs = append(versionIDs, col.SchemaVersionID)
}
- schemaVersionPrefix := core.NewSchemaVersionKey("")
+ schemaVersionPrefix := keys.NewSchemaVersionKey("")
schemaVersionQuery, err := txn.Systemstore().Query(ctx, query.Query{
Prefix: schemaVersionPrefix.ToString(),
})
@@ -181,7 +181,7 @@ func GetAllSchemas(
ctx context.Context,
txn datastore.Txn,
) ([]client.SchemaDescription, error) {
- prefix := core.NewSchemaVersionKey("")
+ prefix := keys.NewSchemaVersionKey("")
q, err := txn.Systemstore().Query(ctx, query.Query{
Prefix: prefix.ToString(),
})
@@ -226,7 +226,7 @@ func GetSchemaVersionIDs(
// It is not present in the history prefix.
schemaVersions := []string{schemaRoot}
- prefix := core.NewSchemaRootKey(schemaRoot, "")
+ prefix := keys.NewSchemaRootKey(schemaRoot, "")
q, err := txn.Systemstore().Query(ctx, query.Query{
Prefix: prefix.ToString(),
KeysOnly: true,
@@ -243,7 +243,7 @@ func GetSchemaVersionIDs(
return nil, err
}
- key, err := core.NewSchemaRootKeyFromString(res.Key)
+ key, err := keys.NewSchemaRootKeyFromString(res.Key)
if err != nil {
return nil, err
}
diff --git a/internal/db/fetcher/dag.go b/internal/db/fetcher/dag.go
index 3d3a6dd85e..395354fc08 100644
--- a/internal/db/fetcher/dag.go
+++ b/internal/db/fetcher/dag.go
@@ -21,6 +21,7 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// HeadFetcher is a utility to incrementally fetch all the MerkleCRDT heads of a given doc/field.
@@ -40,8 +41,8 @@ func (hf *HeadFetcher) Start(
if len(spans.Value) == 0 {
spans = core.NewSpans(
core.NewSpan(
- core.DataStoreKey{},
- core.DataStoreKey{}.PrefixEnd(),
+ keys.DataStoreKey{},
+ keys.DataStoreKey{}.PrefixEnd(),
),
)
}
@@ -87,7 +88,7 @@ func (hf *HeadFetcher) FetchNext() (*cid.Cid, error) {
return nil, nil
}
- headStoreKey, err := core.NewHeadStoreKey(res.Key)
+ headStoreKey, err := keys.NewHeadStoreKey(res.Key)
if err != nil {
return nil, err
}
diff --git a/internal/db/fetcher/fetcher.go b/internal/db/fetcher/fetcher.go
index 06e3255e8c..62a03a4d17 100644
--- a/internal/db/fetcher/fetcher.go
+++ b/internal/db/fetcher/fetcher.go
@@ -28,6 +28,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/permission"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
"github.com/sourcenetwork/defradb/internal/request/graphql/parser"
)
@@ -78,7 +79,7 @@ type Fetcher interface {
// keyValue is a KV store response containing the resulting core.Key and byte array value.
type keyValue struct {
- Key core.DataStoreKey
+ Key keys.DataStoreKey
Value []byte
}
@@ -366,7 +367,7 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone
}
}
- if df.kv != nil && (df.kv.Key.InstanceType != core.ValueKey && df.kv.Key.InstanceType != core.DeletedKey) {
+ if df.kv != nil && (df.kv.Key.InstanceType != keys.ValueKey && df.kv.Key.InstanceType != keys.DeletedKey) {
// We can only ready value values, if we escape the collection's value keys
// then we must be done and can stop reading
spanDone = true
@@ -450,19 +451,19 @@ func (df *DocumentFetcher) seekKV(key string) (bool, *keyValue, error) {
// - It directly interacts with the KVIterator.
// - Returns true if the entire iterator/span is exhausted
// - Returns a kv pair instead of internally updating
-func (df *DocumentFetcher) nextKVRaw() (bool, core.DataStoreKey, dsq.Result, error) {
+func (df *DocumentFetcher) nextKVRaw() (bool, keys.DataStoreKey, dsq.Result, error) {
res, available := df.kvResultsIter.NextSync()
if !available {
- return true, core.DataStoreKey{}, res, nil
+ return true, keys.DataStoreKey{}, res, nil
}
err := res.Error
if err != nil {
- return true, core.DataStoreKey{}, res, err
+ return true, keys.DataStoreKey{}, res, err
}
- dsKey, err := core.NewDataStoreKey(res.Key)
+ dsKey, err := keys.NewDataStoreKey(res.Key)
if err != nil {
- return true, core.DataStoreKey{}, res, err
+ return true, keys.DataStoreKey{}, res, err
}
return false, dsKey, res, nil
@@ -504,7 +505,7 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error {
}
}
- if kv.Key.FieldID == core.DATASTORE_DOC_VERSION_FIELD_ID {
+ if kv.Key.FieldID == keys.DATASTORE_DOC_VERSION_FIELD_ID {
df.doc.schemaVersionID = string(kv.Value)
return nil
}
diff --git a/internal/db/fetcher/indexer_iterators.go b/internal/db/fetcher/indexer_iterators.go
index ecf964185d..5e3671d3aa 100644
--- a/internal/db/fetcher/indexer_iterators.go
+++ b/internal/db/fetcher/indexer_iterators.go
@@ -22,7 +22,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/connor"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
"github.com/ipfs/go-datastore/query"
@@ -62,7 +62,7 @@ type indexIterator interface {
}
type indexIterResult struct {
- key core.IndexDataStoreKey
+ key keys.IndexDataStoreKey
foundKey bool
value []byte
}
@@ -71,7 +71,7 @@ type indexIterResult struct {
type indexPrefixIterator struct {
indexDesc client.IndexDescription
indexedFields []client.FieldDefinition
- indexKey core.IndexDataStoreKey
+ indexKey keys.IndexDataStoreKey
matchers []valueMatcher
execInfo *ExecInfo
resultIter query.Results
@@ -114,7 +114,7 @@ func (iter *indexPrefixIterator) nextResult() (indexIterResult, error) {
if !hasVal {
return indexIterResult{}, nil
}
- key, err := core.DecodeIndexDataStoreKey([]byte(res.Key), &iter.indexDesc, iter.indexedFields)
+ key, err := keys.DecodeIndexDataStoreKey([]byte(res.Key), &iter.indexDesc, iter.indexedFields)
if err != nil {
return indexIterResult{}, err
}
@@ -151,7 +151,7 @@ func (iter *indexPrefixIterator) Close() error {
}
type eqSingleIndexIterator struct {
- indexKey core.IndexDataStoreKey
+ indexKey keys.IndexDataStoreKey
execInfo *ExecInfo
ctx context.Context
@@ -305,7 +305,7 @@ func (iter *arrayIndexIterator) Close() error {
return iter.inner.Close()
}
-func executeValueMatchers(matchers []valueMatcher, fields []core.IndexedField) (bool, error) {
+func executeValueMatchers(matchers []valueMatcher, fields []keys.IndexedField) (bool, error) {
for i := range matchers {
res, err := matchers[i].Match(fields[i].Value)
if err != nil {
@@ -576,7 +576,7 @@ func (f *IndexFetcher) newPrefixIteratorFromConditions(
}
func (f *IndexFetcher) newPrefixIterator(
- indexKey core.IndexDataStoreKey,
+ indexKey keys.IndexDataStoreKey,
matchers []valueMatcher,
execInfo *ExecInfo,
) *indexPrefixIterator {
@@ -624,7 +624,7 @@ func (f *IndexFetcher) newInIndexIterator(
}
} else {
indexKey := f.newIndexDataStoreKey()
- indexKey.Fields = []core.IndexedField{{Descending: f.indexDesc.Fields[0].Descending}}
+ indexKey.Fields = []keys.IndexedField{{Descending: f.indexDesc.Fields[0].Descending}}
iter = f.newPrefixIterator(indexKey, matchers, &f.execInfo)
}
@@ -634,18 +634,18 @@ func (f *IndexFetcher) newInIndexIterator(
}, nil
}
-func (f *IndexFetcher) newIndexDataStoreKey() core.IndexDataStoreKey {
- key := core.IndexDataStoreKey{CollectionID: f.col.ID(), IndexID: f.indexDesc.ID}
+func (f *IndexFetcher) newIndexDataStoreKey() keys.IndexDataStoreKey {
+ key := keys.IndexDataStoreKey{CollectionID: f.col.ID(), IndexID: f.indexDesc.ID}
return key
}
-func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []client.NormalValue) core.IndexDataStoreKey {
- fields := make([]core.IndexedField, len(values))
+func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []client.NormalValue) keys.IndexDataStoreKey {
+ fields := make([]keys.IndexedField, len(values))
for i := range values {
fields[i].Value = values[i]
fields[i].Descending = f.indexDesc.Fields[i].Descending
}
- return core.NewIndexDataStoreKey(f.col.ID(), f.indexDesc.ID, fields)
+ return keys.NewIndexDataStoreKey(f.col.ID(), f.indexDesc.ID, fields)
}
func (f *IndexFetcher) createIndexIterator() (indexIterator, error) {
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index 2660664bcd..508b0ea406 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -28,6 +28,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
"github.com/sourcenetwork/defradb/internal/db/base"
+ "github.com/sourcenetwork/defradb/internal/keys"
merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -89,7 +90,7 @@ type VersionedFetcher struct {
root datastore.Rootstore
store datastore.Txn
- dsKey core.DataStoreKey
+ dsKey keys.DataStoreKey
version cid.Cid
queuedCids *list.List
@@ -392,7 +393,7 @@ func (vf *VersionedFetcher) processBlock(
}
mcrdt, err = merklecrdt.InstanceWithStore(
vf.store,
- core.CollectionSchemaVersionKey{},
+ keys.CollectionSchemaVersionKey{},
ctype,
kind,
dsKey,
@@ -427,7 +428,7 @@ func (vf *VersionedFetcher) Close() error {
}
// NewVersionedSpan creates a new VersionedSpan from a DataStoreKey and a version CID.
-func NewVersionedSpan(dsKey core.DataStoreKey, version cid.Cid) core.Spans {
+func NewVersionedSpan(dsKey keys.DataStoreKey, version cid.Cid) core.Spans {
// Todo: Dont abuse DataStoreKey for version cid!
- return core.NewSpans(core.NewSpan(dsKey, core.DataStoreKey{DocID: version.String()}))
+ return core.NewSpans(core.NewSpan(dsKey, keys.DataStoreKey{DocID: version.String()}))
}
diff --git a/internal/db/index.go b/internal/db/index.go
index c3860dca5a..638f0b923b 100644
--- a/internal/db/index.go
+++ b/internal/db/index.go
@@ -16,7 +16,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/errors"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/utils/slice"
)
@@ -121,28 +121,28 @@ func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]cli
func (index *collectionBaseIndex) getDocumentsIndexKey(
doc *client.Document,
appendDocID bool,
-) (core.IndexDataStoreKey, error) {
+) (keys.IndexDataStoreKey, error) {
fieldValues, err := index.getDocFieldValues(doc)
if err != nil {
- return core.IndexDataStoreKey{}, err
+ return keys.IndexDataStoreKey{}, err
}
- fields := make([]core.IndexedField, len(index.fieldsDescs))
+ fields := make([]keys.IndexedField, len(index.fieldsDescs))
for i := range index.fieldsDescs {
fields[i].Value = fieldValues[i]
fields[i].Descending = index.desc.Fields[i].Descending
}
if appendDocID {
- fields = append(fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())})
+ fields = append(fields, keys.IndexedField{Value: client.NewNormalString(doc.ID().String())})
}
- return core.NewIndexDataStoreKey(index.collection.ID(), index.desc.ID, fields), nil
+ return keys.NewIndexDataStoreKey(index.collection.ID(), index.desc.ID, fields), nil
}
func (index *collectionBaseIndex) deleteIndexKey(
ctx context.Context,
txn datastore.Txn,
- key core.IndexDataStoreKey,
+ key keys.IndexDataStoreKey,
) error {
exists, err := txn.Datastore().Has(ctx, key.ToDS())
if err != nil {
@@ -157,7 +157,7 @@ func (index *collectionBaseIndex) deleteIndexKey(
// RemoveAll remove all artifacts of the index from the storage, i.e. all index
// field values for all documents.
func (index *collectionBaseIndex) RemoveAll(ctx context.Context, txn datastore.Txn) error {
- prefixKey := core.IndexDataStoreKey{}
+ prefixKey := keys.IndexDataStoreKey{}
prefixKey.CollectionID = index.collection.ID()
prefixKey.IndexID = index.desc.ID
@@ -196,7 +196,7 @@ var _ CollectionIndex = (*collectionSimpleIndex)(nil)
func (index *collectionSimpleIndex) getDocumentsIndexKey(
doc *client.Document,
-) (core.IndexDataStoreKey, error) {
+) (keys.IndexDataStoreKey, error) {
// docID is appended, as it's part of the key for non-unique indexes
return index.collectionBaseIndex.getDocumentsIndexKey(doc, true)
}
@@ -252,7 +252,7 @@ func (index *collectionSimpleIndex) deleteDocIndex(
}
// hasIndexKeyNilField returns true if the index key has a field with nil value
-func hasIndexKeyNilField(key *core.IndexDataStoreKey) bool {
+func hasIndexKeyNilField(key *keys.IndexDataStoreKey) bool {
for i := range key.Fields {
if key.Fields[i].Value.IsNil() {
return true
@@ -270,7 +270,7 @@ var _ CollectionIndex = (*collectionUniqueIndex)(nil)
func (index *collectionUniqueIndex) save(
ctx context.Context,
txn datastore.Txn,
- key *core.IndexDataStoreKey,
+ key *keys.IndexDataStoreKey,
val []byte,
) error {
err := txn.Datastore().Put(ctx, key.ToDS(), val)
@@ -312,20 +312,20 @@ func newUniqueIndexError(doc *client.Document, fieldsDescs []client.SchemaFieldD
func (index *collectionBaseIndex) getDocumentsUniqueIndexRecord(
doc *client.Document,
-) (core.IndexDataStoreKey, []byte, error) {
+) (keys.IndexDataStoreKey, []byte, error) {
key, err := index.getDocumentsIndexKey(doc, false)
if err != nil {
- return core.IndexDataStoreKey{}, nil, err
+ return keys.IndexDataStoreKey{}, nil, err
}
return makeUniqueKeyValueRecord(key, doc)
}
func makeUniqueKeyValueRecord(
- key core.IndexDataStoreKey,
+ key keys.IndexDataStoreKey,
doc *client.Document,
-) (core.IndexDataStoreKey, []byte, error) {
+) (keys.IndexDataStoreKey, []byte, error) {
if hasIndexKeyNilField(&key) {
- key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())})
+ key.Fields = append(key.Fields, keys.IndexedField{Value: client.NewNormalString(doc.ID().String())})
return key, []byte{}, nil
} else {
return key, []byte(doc.ID().String()), nil
@@ -336,10 +336,10 @@ func (index *collectionUniqueIndex) prepareUniqueIndexRecordToStore(
ctx context.Context,
txn datastore.Txn,
doc *client.Document,
-) (core.IndexDataStoreKey, []byte, error) {
+) (keys.IndexDataStoreKey, []byte, error) {
key, val, err := index.getDocumentsUniqueIndexRecord(doc)
if err != nil {
- return core.IndexDataStoreKey{}, nil, err
+ return keys.IndexDataStoreKey{}, nil, err
}
return key, val, validateUniqueKeyValue(ctx, txn, key, val, doc, index.fieldsDescs)
}
@@ -347,7 +347,7 @@ func (index *collectionUniqueIndex) prepareUniqueIndexRecordToStore(
func validateUniqueKeyValue(
ctx context.Context,
txn datastore.Txn,
- key core.IndexDataStoreKey,
+ key keys.IndexDataStoreKey,
val []byte,
doc *client.Document,
fieldsDescs []client.SchemaFieldDescription,
@@ -455,7 +455,7 @@ func newCollectionArrayBaseIndex(base collectionBaseIndex) collectionArrayBaseIn
func (index *collectionArrayBaseIndex) newIndexKeyGenerator(
doc *client.Document,
appendDocID bool,
-) (func() (core.IndexDataStoreKey, bool), error) {
+) (func() (keys.IndexDataStoreKey, bool), error) {
key, err := index.getDocumentsIndexKey(doc, appendDocID)
if err != nil {
return nil, err
@@ -478,15 +478,15 @@ func (index *collectionArrayBaseIndex) newIndexKeyGenerator(
// This function generates the next key by iterating through all possible combinations.
// It works pretty much like a digital clock that first iterates through seconds, then minutes, etc.
- return func() (core.IndexDataStoreKey, bool) {
+ return func() (keys.IndexDataStoreKey, bool) {
if done {
- return core.IndexDataStoreKey{}, false
+ return keys.IndexDataStoreKey{}, false
}
- resultKey := core.IndexDataStoreKey{
+ resultKey := keys.IndexDataStoreKey{
CollectionID: key.CollectionID,
IndexID: key.IndexID,
- Fields: make([]core.IndexedField, len(key.Fields)),
+ Fields: make([]keys.IndexedField, len(key.Fields)),
}
copy(resultKey.Fields, key.Fields)
@@ -520,12 +520,12 @@ func (index *collectionArrayBaseIndex) newIndexKeyGenerator(
func (index *collectionArrayBaseIndex) getAllKeys(
doc *client.Document,
appendDocID bool,
-) ([]core.IndexDataStoreKey, error) {
+) ([]keys.IndexDataStoreKey, error) {
getNextOldKey, err := index.newIndexKeyGenerator(doc, appendDocID)
if err != nil {
return nil, err
}
- keys := make([]core.IndexDataStoreKey, 0)
+ keys := make([]keys.IndexDataStoreKey, 0)
for {
key, ok := getNextOldKey()
if !ok {
@@ -542,7 +542,7 @@ func (index *collectionArrayBaseIndex) deleteRetiredKeysAndReturnNew(
oldDoc *client.Document,
newDoc *client.Document,
appendDocID bool,
-) ([]core.IndexDataStoreKey, error) {
+) ([]keys.IndexDataStoreKey, error) {
prevKeys, err := index.getAllKeys(oldDoc, appendDocID)
if err != nil {
return nil, err
@@ -553,7 +553,7 @@ func (index *collectionArrayBaseIndex) deleteRetiredKeysAndReturnNew(
}
for _, prevKey := range prevKeys {
- keyEqual := func(key core.IndexDataStoreKey) bool { return prevKey.Equal(key) }
+ keyEqual := func(key keys.IndexDataStoreKey) bool { return prevKey.Equal(key) }
rem, removedVal := slice.RemoveFirstIf(currentKeys, keyEqual)
// If a previous keys is not among the current keys, it should be retired
if !removedVal.HasValue() {
@@ -683,7 +683,7 @@ func (index *collectionArrayUniqueIndex) addNewUniqueKey(
ctx context.Context,
txn datastore.Txn,
doc *client.Document,
- key core.IndexDataStoreKey,
+ key keys.IndexDataStoreKey,
) error {
key, val, err := makeUniqueKeyValueRecord(key, doc)
if err != nil {
diff --git a/internal/db/index_test.go b/internal/db/index_test.go
index 779bcdff84..950f41c47f 100644
--- a/internal/db/index_test.go
+++ b/internal/db/index_test.go
@@ -29,7 +29,7 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/datastore/mocks"
"github.com/sourcenetwork/defradb/errors"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/request/graphql/schema"
)
@@ -244,7 +244,7 @@ func (f *indexTestFixture) dropIndex(colName, indexName string) error {
}
func (f *indexTestFixture) countIndexPrefixes(indexName string) int {
- prefix := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), indexName)
+ prefix := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), indexName)
q, err := f.txn.Systemstore().Query(f.ctx, query.Query{
Prefix: prefix.ToString(),
})
@@ -422,7 +422,7 @@ func TestCreateIndex_ShouldSaveToSystemStorage(t *testing.T) {
_, err := f.createCollectionIndex(desc)
assert.NoError(t, err)
- key := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), name)
+ key := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), name)
data, err := f.txn.Systemstore().Get(f.ctx, key.ToDS())
assert.NoError(t, err)
var deserialized client.IndexDescription
@@ -474,7 +474,7 @@ func TestCreateIndex_WithMultipleCollectionsAndIndexes_AssignIncrementedIDPerCol
desc, err := f.createCollectionIndexFor(col.Name().Value(), makeIndex(fieldName))
require.NoError(t, err)
assert.Equal(t, expectedID, desc.ID)
- seqKey := core.NewIndexIDSequenceKey(col.ID())
+ seqKey := keys.NewIndexIDSequenceKey(col.ID())
storedSeqKey, err := f.txn.Systemstore().Get(f.ctx, seqKey.ToDS())
assert.NoError(t, err)
storedSeqVal := binary.BigEndian.Uint64(storedSeqKey)
@@ -563,7 +563,7 @@ func TestGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
+ indexKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
err := f.txn.Systemstore().Put(f.ctx, indexKey.ToDS(), []byte("invalid"))
assert.NoError(t, err)
@@ -575,7 +575,7 @@ func TestGetIndexes_IfInvalidIndexKeyIsStored_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
+ indexKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
key := ds.NewKey(indexKey.ToString() + "/invalid")
desc := client.IndexDescription{
Name: "some_index_name",
@@ -741,7 +741,7 @@ func TestGetCollectionIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
+ indexKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
err := f.txn.Systemstore().Put(f.ctx, indexKey.ToDS(), []byte("invalid"))
assert.NoError(t, err)
@@ -998,7 +998,7 @@ func TestDropIndex_ShouldDeleteIndex(t *testing.T) {
err := f.dropIndex(usersColName, desc.Name)
assert.NoError(t, err)
- indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), desc.Name)
+ indexKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), desc.Name)
_, err = f.txn.Systemstore().Get(f.ctx, indexKey.ToDS())
assert.Error(t, err)
}
diff --git a/internal/db/indexed_docs_test.go b/internal/db/indexed_docs_test.go
index 4cd591a536..2c6ce0af53 100644
--- a/internal/db/indexed_docs_test.go
+++ b/internal/db/indexed_docs_test.go
@@ -31,6 +31,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
fetcherMocks "github.com/sourcenetwork/defradb/internal/db/fetcher/mocks"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -154,8 +155,8 @@ func (b *indexKeyBuilder) Unique() *indexKeyBuilder {
return b
}
-func (b *indexKeyBuilder) Build() core.IndexDataStoreKey {
- key := core.IndexDataStoreKey{}
+func (b *indexKeyBuilder) Build() keys.IndexDataStoreKey {
+ key := keys.IndexDataStoreKey{}
if b.colName == "" {
return key
@@ -238,11 +239,11 @@ indexLoop:
if i < len(b.descendingFields) {
descending = b.descendingFields[i]
}
- key.Fields = append(key.Fields, core.IndexedField{Value: val, Descending: descending})
+ key.Fields = append(key.Fields, keys.IndexedField{Value: val, Descending: descending})
}
if !b.isUnique || hasNilValue {
- key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(b.doc.ID().String())})
+ key.Fields = append(key.Fields, keys.IndexedField{Value: client.NewNormalString(b.doc.ID().String())})
}
}
@@ -287,7 +288,7 @@ func (f *indexTestFixture) stubSystemStore(systemStoreOn *mocks.DSReaderWriter_E
indexOnNameDescData, err := json.Marshal(desc)
require.NoError(f.t, err)
- colIndexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "")
+ colIndexKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), "")
matchPrefixFunc := func(q query.Query) bool {
return q.Prefix == colIndexKey.ToDS().String()
}
@@ -301,11 +302,11 @@ func (f *indexTestFixture) stubSystemStore(systemStoreOn *mocks.DSReaderWriter_E
systemStoreOn.Query(mock.Anything, mock.Anything).Maybe().
Return(mocks.NewQueryResultsWithValues(f.t), nil)
- colIndexOnNameKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), testUsersColIndexName)
+ colIndexOnNameKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), testUsersColIndexName)
systemStoreOn.Get(mock.Anything, colIndexOnNameKey.ToDS()).Maybe().Return(indexOnNameDescData, nil)
if f.users != nil {
- sequenceKey := core.NewIndexIDSequenceKey(f.users.ID())
+ sequenceKey := keys.NewIndexIDSequenceKey(f.users.ID())
systemStoreOn.Get(mock.Anything, sequenceKey.ToDS()).Maybe().Return([]byte{0, 0, 0, 0, 0, 0, 0, 1}, nil)
}
@@ -684,7 +685,7 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T)
doc := f.newUserDoc("John", 21, f.users)
f.saveDocToCollection(doc, f.users)
- fieldKeyString := core.DataStoreKey{
+ fieldKeyString := keys.DataStoreKey{
CollectionRootID: f.users.Description().RootID,
}.WithDocID(doc.ID().String()).
WithFieldID("1").
diff --git a/internal/db/lens.go b/internal/db/lens.go
index bf0c9ce03a..0ad3d55994 100644
--- a/internal/db/lens.go
+++ b/internal/db/lens.go
@@ -18,8 +18,8 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/errors"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/description"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error {
@@ -35,7 +35,7 @@ func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error {
return err
}
- colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{})
+ colSeq, err := db.getSequence(ctx, keys.CollectionIDSequenceKey{})
if err != nil {
return err
}
@@ -125,7 +125,7 @@ func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error {
}
if schemaFound {
- schemaRootKey := core.NewSchemaRootKey(schema.Root, cfg.DestinationSchemaVersionID)
+ schemaRootKey := keys.NewSchemaRootKey(schema.Root, cfg.DestinationSchemaVersionID)
err = txn.Systemstore().Put(ctx, schemaRootKey.ToDS(), []byte{})
if err != nil {
return err
diff --git a/internal/db/merge.go b/internal/db/merge.go
index 8b08b333e4..d1b96d5b77 100644
--- a/internal/db/merge.go
+++ b/internal/db/merge.go
@@ -30,6 +30,7 @@ import (
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/encryption"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
)
@@ -133,7 +134,7 @@ type mergeProcessor struct {
encBlockLS linking.LinkSystem
mCRDTs map[string]merklecrdt.MerkleCRDT
col *collection
- dsKey core.DataStoreKey
+ dsKey keys.DataStoreKey
// composites is a list of composites that need to be merged.
composites *list.List
// missingEncryptionBlocks is a list of blocks that we failed to fetch
@@ -145,7 +146,7 @@ type mergeProcessor struct {
func (db *db) newMergeProcessor(
txn datastore.Txn,
col *collection,
- dsKey core.DataStoreKey,
+ dsKey keys.DataStoreKey,
) (*mergeProcessor, error) {
blockLS := cidlink.DefaultLinkSystem()
blockLS.SetReadStorage(txn.Blockstore().AsIPLDStorage())
@@ -433,7 +434,7 @@ func (mp *mergeProcessor) initCRDTForType(field string) (merklecrdt.MerkleCRDT,
return mcrdt, nil
}
- schemaVersionKey := core.CollectionSchemaVersionKey{
+ schemaVersionKey := keys.CollectionSchemaVersionKey{
SchemaVersionID: mp.col.Schema().VersionID,
CollectionID: mp.col.ID(),
}
@@ -490,7 +491,7 @@ func getCollectionFromRootSchema(ctx context.Context, db *db, rootSchema string)
// getHeadsAsMergeTarget retrieves the heads of the composite DAG for the given document
// and returns them as a merge target.
-func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey core.DataStoreKey) (mergeTarget, error) {
+func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey keys.DataStoreKey) (mergeTarget, error) {
cids, err := getHeads(ctx, txn, dsKey)
if err != nil {
@@ -512,7 +513,7 @@ func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey core.Da
}
// getHeads retrieves the heads associated with the given datastore key.
-func getHeads(ctx context.Context, txn datastore.Txn, dsKey core.DataStoreKey) ([]cid.Cid, error) {
+func getHeads(ctx context.Context, txn datastore.Txn, dsKey keys.DataStoreKey) ([]cid.Cid, error) {
headset := clock.NewHeadSet(txn.Headstore(), dsKey.ToHeadStoreKey())
cids, _, err := headset.List(ctx)
diff --git a/internal/db/p2p_replicator.go b/internal/db/p2p_replicator.go
index 7764a6dec9..61c082d210 100644
--- a/internal/db/p2p_replicator.go
+++ b/internal/db/p2p_replicator.go
@@ -27,6 +27,7 @@ import (
"github.com/sourcenetwork/defradb/event"
"github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
)
@@ -61,7 +62,7 @@ func (db *db) SetReplicator(ctx context.Context, rep client.ReplicatorParams) er
storedRep := client.Replicator{}
storedSchemas := make(map[string]struct{})
- repKey := core.NewReplicatorKey(rep.Info.ID.String())
+ repKey := keys.NewReplicatorKey(rep.Info.ID.String())
hasOldRep, err := txn.Peerstore().Has(ctx, repKey.ToDS())
if err != nil {
return err
@@ -171,7 +172,7 @@ func (db *db) getDocsHeads(
log.ErrorContextE(ctx, "Key channel error", docIDResult.Err)
continue
}
- docID := core.DataStoreKeyFromDocID(docIDResult.ID)
+ docID := keys.DataStoreKeyFromDocID(docIDResult.ID)
headset := clock.NewHeadSet(
txn.Headstore(),
docID.WithFieldID(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(),
@@ -226,7 +227,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams)
storedRep := client.Replicator{}
storedSchemas := make(map[string]struct{})
- repKey := core.NewReplicatorKey(rep.Info.ID.String())
+ repKey := keys.NewReplicatorKey(rep.Info.ID.String())
hasOldRep, err := txn.Peerstore().Has(ctx, repKey.ToDS())
if err != nil {
return err
@@ -257,7 +258,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams)
collections = append(collections, col)
}
// make sure the replicator exists in the datastore
- key := core.NewReplicatorKey(rep.Info.ID.String())
+ key := keys.NewReplicatorKey(rep.Info.ID.String())
_, err = txn.Peerstore().Get(ctx, key.ToDS())
if err != nil {
return err
@@ -276,7 +277,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.ReplicatorParams)
}
// Persist the replicator to the store, deleting it if no schemas remain
- key := core.NewReplicatorKey(rep.Info.ID.String())
+ key := keys.NewReplicatorKey(rep.Info.ID.String())
if len(rep.Collections) == 0 {
err := txn.Peerstore().Delete(ctx, key.ToDS())
if err != nil {
@@ -312,7 +313,7 @@ func (db *db) GetAllReplicators(ctx context.Context) ([]client.Replicator, error
// create collection system prefix query
query := query.Query{
- Prefix: core.NewReplicatorKey("").ToString(),
+ Prefix: keys.NewReplicatorKey("").ToString(),
}
results, err := txn.Peerstore().Query(ctx, query)
if err != nil {
@@ -376,7 +377,7 @@ func (db *db) handleReplicatorFailure(ctx context.Context, peerID, docID string)
if err != nil {
return err
}
- docIDKey := core.NewReplicatorRetryDocIDKey(peerID, docID)
+ docIDKey := keys.NewReplicatorRetryDocIDKey(peerID, docID)
err = txn.Peerstore().Put(ctx, docIDKey.ToDS(), []byte{})
if err != nil {
return err
@@ -424,7 +425,7 @@ func updateReplicatorStatus(
peerID string,
active bool,
) error {
- key := core.NewReplicatorKey(peerID)
+ key := keys.NewReplicatorKey(peerID)
repBytes, err := txn.Peerstore().Get(ctx, key.ToDS())
if err != nil {
return err
@@ -465,7 +466,7 @@ func createIfNotExistsReplicatorRetry(
peerID string,
retryIntervals []time.Duration,
) error {
- key := core.NewReplicatorRetryIDKey(peerID)
+ key := keys.NewReplicatorRetryIDKey(peerID)
exists, err := txn.Peerstore().Has(ctx, key.ToDS())
if err != nil {
return err
@@ -490,7 +491,7 @@ func createIfNotExistsReplicatorRetry(
func (db *db) retryReplicators(ctx context.Context) {
q := query.Query{
- Prefix: core.REPLICATOR_RETRY_ID,
+ Prefix: keys.REPLICATOR_RETRY_ID,
}
results, err := db.Peerstore().Query(ctx, q)
if err != nil {
@@ -500,7 +501,7 @@ func (db *db) retryReplicators(ctx context.Context) {
defer closeQueryResults(results)
now := time.Now()
for result := range results.Next() {
- key, err := core.NewReplicatorRetryIDKeyFromString(result.Key)
+ key, err := keys.NewReplicatorRetryIDKeyFromString(result.Key)
if err != nil {
log.ErrorContextE(ctx, "Failed to parse replicator retry ID key", err)
continue
@@ -520,7 +521,7 @@ func (db *db) retryReplicators(ctx context.Context) {
if now.After(rInfo.NextRetry) && !rInfo.Retrying {
// The replicator might have been deleted by the time we reach this point.
// If it no longer exists, we delete the retry key and all retry docs.
- exists, err := db.Peerstore().Has(ctx, core.NewReplicatorKey(key.PeerID).ToDS())
+ exists, err := db.Peerstore().Has(ctx, keys.NewReplicatorKey(key.PeerID).ToDS())
if err != nil {
log.ErrorContextE(ctx, "Failed to check if replicator exists", err)
continue
@@ -543,7 +544,7 @@ func (db *db) retryReplicators(ctx context.Context) {
}
}
-func (db *db) setReplicatorAsRetrying(ctx context.Context, key core.ReplicatorRetryIDKey, rInfo retryInfo) error {
+func (db *db) setReplicatorAsRetrying(ctx context.Context, key keys.ReplicatorRetryIDKey, rInfo retryInfo) error {
rInfo.Retrying = true
rInfo.NumRetries++
b, err := cbor.Marshal(rInfo)
@@ -559,7 +560,7 @@ func setReplicatorNextRetry(
peerID string,
retryIntervals []time.Duration,
) error {
- key := core.NewReplicatorRetryIDKey(peerID)
+ key := keys.NewReplicatorRetryIDKey(peerID)
b, err := txn.Peerstore().Get(ctx, key.ToDS())
if err != nil {
return err
@@ -597,7 +598,7 @@ func setReplicatorNextRetry(
// would be a high chance of unnecessary transaction conflicts.
func (db *db) retryReplicator(ctx context.Context, peerID string) {
log.InfoContext(ctx, "Retrying replicator", corelog.String("PeerID", peerID))
- key := core.NewReplicatorRetryDocIDKey(peerID, "")
+ key := keys.NewReplicatorRetryDocIDKey(peerID, "")
q := query.Query{
Prefix: key.ToString(),
}
@@ -613,7 +614,7 @@ func (db *db) retryReplicator(ctx context.Context, peerID string) {
return
default:
}
- key, err := core.NewReplicatorRetryDocIDKeyFromString(result.Key)
+ key, err := keys.NewReplicatorRetryDocIDKeyFromString(result.Key)
if err != nil {
log.ErrorContextE(ctx, "Failed to parse retry doc key", err)
continue
@@ -645,7 +646,7 @@ func (db *db) retryDoc(ctx context.Context, docID string) error {
return err
}
defer txn.Discard(ctx)
- headStoreKey := core.HeadStoreKey{
+ headStoreKey := keys.HeadStoreKey{
DocID: docID,
FieldID: core.COMPOSITE_NAMESPACE,
}
@@ -706,7 +707,7 @@ func deleteReplicatorRetryIfNoMoreDocs(
txn datastore.Txn,
peerID string,
) (bool, error) {
- key := core.NewReplicatorRetryDocIDKey(peerID, "")
+ key := keys.NewReplicatorRetryDocIDKey(peerID, "")
q := query.Query{
Prefix: key.ToString(),
KeysOnly: true,
@@ -721,7 +722,7 @@ func deleteReplicatorRetryIfNoMoreDocs(
return false, err
}
if len(entries) == 0 {
- key := core.NewReplicatorRetryIDKey(peerID)
+ key := keys.NewReplicatorRetryIDKey(peerID)
return true, txn.Peerstore().Delete(ctx, key.ToDS())
}
return false, nil
@@ -729,12 +730,12 @@ func deleteReplicatorRetryIfNoMoreDocs(
// deleteReplicatorRetryAndDocs deletes the replicator retry and all retry docs.
func (db *db) deleteReplicatorRetryAndDocs(ctx context.Context, peerID string) error {
- key := core.NewReplicatorRetryIDKey(peerID)
+ key := keys.NewReplicatorRetryIDKey(peerID)
err := db.Peerstore().Delete(ctx, key.ToDS())
if err != nil {
return err
}
- docKey := core.NewReplicatorRetryDocIDKey(peerID, "")
+ docKey := keys.NewReplicatorRetryDocIDKey(peerID, "")
q := query.Query{
Prefix: docKey.ToString(),
KeysOnly: true,
@@ -745,7 +746,7 @@ func (db *db) deleteReplicatorRetryAndDocs(ctx context.Context, peerID string) e
}
defer closeQueryResults(results)
for result := range results.Next() {
- err = db.Peerstore().Delete(ctx, core.NewReplicatorRetryDocIDKey(peerID, result.Key).ToDS())
+ err = db.Peerstore().Delete(ctx, keys.NewReplicatorRetryDocIDKey(peerID, result.Key).ToDS())
if err != nil {
return err
}
diff --git a/internal/db/p2p_schema_root.go b/internal/db/p2p_schema_root.go
index 6f85ea682b..2df2dcc931 100644
--- a/internal/db/p2p_schema_root.go
+++ b/internal/db/p2p_schema_root.go
@@ -19,7 +19,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/event"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
const marker = byte(0xff)
@@ -64,7 +64,7 @@ func (db *db) AddP2PCollections(ctx context.Context, collectionIDs []string) err
// Ensure we can add all the collections to the store on the transaction
// before adding to topics.
for _, col := range storeCollections {
- key := core.NewP2PCollectionKey(col.SchemaRoot())
+ key := keys.NewP2PCollectionKey(col.SchemaRoot())
err = txn.Systemstore().Put(ctx, key.ToDS(), []byte{marker})
if err != nil {
return err
@@ -121,7 +121,7 @@ func (db *db) RemoveP2PCollections(ctx context.Context, collectionIDs []string)
// Ensure we can remove all the collections to the store on the transaction
// before adding to topics.
for _, col := range storeCollections {
- key := core.NewP2PCollectionKey(col.SchemaRoot())
+ key := keys.NewP2PCollectionKey(col.SchemaRoot())
err = txn.Systemstore().Delete(ctx, key.ToDS())
if err != nil {
return err
@@ -154,7 +154,7 @@ func (db *db) GetAllP2PCollections(ctx context.Context) ([]string, error) {
defer txn.Discard(ctx)
query := dsq.Query{
- Prefix: core.NewP2PCollectionKey("").ToString(),
+ Prefix: keys.NewP2PCollectionKey("").ToString(),
}
results, err := txn.Systemstore().Query(ctx, query)
if err != nil {
@@ -163,7 +163,7 @@ func (db *db) GetAllP2PCollections(ctx context.Context) ([]string, error) {
collectionIDs := []string{}
for result := range results.Next() {
- key, err := core.NewP2PCollectionKeyFromString(result.Key)
+ key, err := keys.NewP2PCollectionKeyFromString(result.Key)
if err != nil {
return nil, err
}
diff --git a/internal/db/sequence.go b/internal/db/sequence.go
index 06c2989b99..8a9facaa63 100644
--- a/internal/db/sequence.go
+++ b/internal/db/sequence.go
@@ -17,15 +17,15 @@ import (
ds "github.com/ipfs/go-datastore"
"github.com/sourcenetwork/defradb/errors"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
type sequence struct {
- key core.Key
+ key keys.Key
val uint64
}
-func (db *db) getSequence(ctx context.Context, key core.Key) (*sequence, error) {
+func (db *db) getSequence(ctx context.Context, key keys.Key) (*sequence, error) {
seq := &sequence{
key: key,
val: uint64(0),
diff --git a/internal/db/view.go b/internal/db/view.go
index 9c1e5eaafd..e9a4d0d31c 100644
--- a/internal/db/view.go
+++ b/internal/db/view.go
@@ -25,6 +25,7 @@ import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/description"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner"
)
@@ -210,7 +211,7 @@ func (db *db) buildViewCache(ctx context.Context, col client.CollectionDefinitio
return err
}
- itemKey := core.NewViewCacheKey(col.Description.RootID, itemID)
+ itemKey := keys.NewViewCacheKey(col.Description.RootID, itemID)
err = txn.Datastore().Put(ctx, itemKey.ToDS(), serializedItem)
if err != nil {
return err
@@ -227,7 +228,7 @@ func (db *db) buildViewCache(ctx context.Context, col client.CollectionDefinitio
func (db *db) clearViewCache(ctx context.Context, col client.CollectionDefinition) error {
txn := mustGetContextTxn(ctx)
- prefix := core.NewViewCacheColPrefix(col.Description.RootID)
+ prefix := keys.NewViewCacheColPrefix(col.Description.RootID)
q, err := txn.Datastore().Query(ctx, query.Query{
Prefix: prefix.ToString(),
diff --git a/internal/keys/datastore.go b/internal/keys/datastore.go
new file mode 100644
index 0000000000..7caaf64e94
--- /dev/null
+++ b/internal/keys/datastore.go
@@ -0,0 +1,15 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+const (
+ COLLECTION_VIEW_ITEMS = "/collection/vi"
+)
diff --git a/internal/keys/datastore_doc.go b/internal/keys/datastore_doc.go
new file mode 100644
index 0000000000..1665fb7ea3
--- /dev/null
+++ b/internal/keys/datastore_doc.go
@@ -0,0 +1,292 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strconv"
+
+ ds "github.com/ipfs/go-datastore"
+
+ "github.com/sourcenetwork/defradb/client"
+ "github.com/sourcenetwork/defradb/internal/encoding"
+)
+
+// InstanceType is a type that represents the type of instance.
+type InstanceType string
+
+const (
+ // ValueKey is a type that represents a value instance.
+ ValueKey = InstanceType("v")
+ // PriorityKey is a type that represents a priority instance.
+ PriorityKey = InstanceType("p")
+ // DeletedKey is a type that represents a deleted document.
+ DeletedKey = InstanceType("d")
+
+ DATASTORE_DOC_VERSION_FIELD_ID = "v"
+)
+
+// DataStoreKey is a type that represents a key in the database.
+type DataStoreKey struct {
+ CollectionRootID uint32
+ InstanceType InstanceType
+ DocID string
+ FieldID string
+}
+
+var _ Key = (*DataStoreKey)(nil)
+
+// Creates a new DataStoreKey from a string as best as it can,
+// splitting the input using '/' as a field deliminator. It assumes
+// that the input string is in the following format:
+//
+// /[CollectionRootId]/[InstanceType]/[DocID]/[FieldId]
+//
+// Any properties before the above (assuming a '/' deliminator) are ignored
+func NewDataStoreKey(key string) (DataStoreKey, error) {
+ return DecodeDataStoreKey([]byte(key))
+}
+
+func MustNewDataStoreKey(key string) DataStoreKey {
+ dsKey, err := NewDataStoreKey(key)
+ if err != nil {
+ panic(err)
+ }
+ return dsKey
+}
+
+func DataStoreKeyFromDocID(docID client.DocID) DataStoreKey {
+ return DataStoreKey{
+ DocID: docID.String(),
+ }
+}
+
+func (k DataStoreKey) WithValueFlag() DataStoreKey {
+ newKey := k
+ newKey.InstanceType = ValueKey
+ return newKey
+}
+
+func (k DataStoreKey) WithPriorityFlag() DataStoreKey {
+ newKey := k
+ newKey.InstanceType = PriorityKey
+ return newKey
+}
+
+func (k DataStoreKey) WithDeletedFlag() DataStoreKey {
+ newKey := k
+ newKey.InstanceType = DeletedKey
+ return newKey
+}
+
+func (k DataStoreKey) WithDocID(docID string) DataStoreKey {
+ newKey := k
+ newKey.DocID = docID
+ return newKey
+}
+
+func (k DataStoreKey) WithInstanceInfo(key DataStoreKey) DataStoreKey {
+ newKey := k
+ newKey.DocID = key.DocID
+ newKey.FieldID = key.FieldID
+ newKey.InstanceType = key.InstanceType
+ return newKey
+}
+
+func (k DataStoreKey) WithFieldID(fieldID string) DataStoreKey {
+ newKey := k
+ newKey.FieldID = fieldID
+ return newKey
+}
+
+func (k DataStoreKey) ToHeadStoreKey() HeadStoreKey {
+ return HeadStoreKey{
+ DocID: k.DocID,
+ FieldID: k.FieldID,
+ }
+}
+
+func (k DataStoreKey) ToString() string {
+ return string(k.Bytes())
+}
+
+func (k DataStoreKey) Bytes() []byte {
+ return EncodeDataStoreKey(&k)
+}
+
+func (k DataStoreKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
+
+func (k DataStoreKey) PrettyPrint() string {
+ var result string
+
+ if k.CollectionRootID != 0 {
+ result = result + "/" + strconv.Itoa(int(k.CollectionRootID))
+ }
+ if k.InstanceType != "" {
+ result = result + "/" + string(k.InstanceType)
+ }
+ if k.DocID != "" {
+ result = result + "/" + k.DocID
+ }
+ if k.FieldID != "" {
+ result = result + "/" + k.FieldID
+ }
+
+ return result
+}
+
+func (k DataStoreKey) Equal(other DataStoreKey) bool {
+ return k.CollectionRootID == other.CollectionRootID &&
+ k.DocID == other.DocID &&
+ k.FieldID == other.FieldID &&
+ k.InstanceType == other.InstanceType
+}
+
+func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey {
+ return PrimaryDataStoreKey{
+ CollectionRootID: k.CollectionRootID,
+ DocID: k.DocID,
+ }
+}
+
+// PrefixEnd determines the end key given key as a prefix, that is the key that sorts precisely
+// behind all keys starting with prefix: "1" is added to the final byte and the carry propagated.
+// The special cases of nil and KeyMin always returns KeyMax.
+func (k DataStoreKey) PrefixEnd() DataStoreKey {
+ newKey := k
+
+ if k.FieldID != "" {
+ newKey.FieldID = string(bytesPrefixEnd([]byte(k.FieldID)))
+ return newKey
+ }
+ if k.DocID != "" {
+ newKey.DocID = string(bytesPrefixEnd([]byte(k.DocID)))
+ return newKey
+ }
+ if k.InstanceType != "" {
+ newKey.InstanceType = InstanceType(bytesPrefixEnd([]byte(k.InstanceType)))
+ return newKey
+ }
+ if k.CollectionRootID != 0 {
+ newKey.CollectionRootID = k.CollectionRootID + 1
+ return newKey
+ }
+
+ return newKey
+}
+
+// FieldIDAsUint extracts the Field Identifier from the Key.
+// In a Primary index, the last key path is the FieldIDAsUint.
+// This may be different in Secondary Indexes.
+// An error is returned if it can't correct convert the field to a uint32.
+func (k DataStoreKey) FieldIDAsUint() (uint32, error) {
+ fieldID, err := strconv.Atoi(k.FieldID)
+ if err != nil {
+ return 0, NewErrFailedToGetFieldIdOfKey(err)
+ }
+ return uint32(fieldID), nil
+}
+
+func bytesPrefixEnd(b []byte) []byte {
+ end := make([]byte, len(b))
+ copy(end, b)
+ for i := len(end) - 1; i >= 0; i-- {
+ end[i] = end[i] + 1
+ if end[i] != 0 {
+ return end[:i+1]
+ }
+ }
+ // This statement will only be reached if the key is already a
+ // maximal byte string (i.e. already \xff...).
+ return b
+}
+
+// DecodeDataStoreKey decodes a store key into a [DataStoreKey].
+func DecodeDataStoreKey(data []byte) (DataStoreKey, error) {
+ if len(data) == 0 {
+ return DataStoreKey{}, ErrEmptyKey
+ }
+
+ if data[0] != '/' {
+ return DataStoreKey{}, ErrInvalidKey
+ }
+ data = data[1:]
+
+ data, colRootID, err := encoding.DecodeUvarintAscending(data)
+ if err != nil {
+ return DataStoreKey{}, err
+ }
+
+ var instanceType InstanceType
+ if len(data) > 1 {
+ if data[0] == '/' {
+ data = data[1:]
+ }
+ instanceType = InstanceType(data[0])
+ data = data[1:]
+ }
+
+ const docKeyLength int = 40
+ var docID string
+ if len(data) > docKeyLength {
+ if data[0] == '/' {
+ data = data[1:]
+ }
+ docID = string(data[:docKeyLength])
+ data = data[docKeyLength:]
+ }
+
+ var fieldID string
+ if len(data) > 1 {
+ if data[0] == '/' {
+ data = data[1:]
+ }
+ // Todo: This should be encoded/decoded properly in
+ // https://github.com/sourcenetwork/defradb/issues/2818
+ fieldID = string(data)
+ }
+
+ return DataStoreKey{
+ CollectionRootID: uint32(colRootID),
+ InstanceType: (instanceType),
+ DocID: docID,
+ FieldID: fieldID,
+ }, nil
+}
+
+// EncodeDataStoreKey encodes a [*DataStoreKey] to a byte array suitable for sorting in the store.
+func EncodeDataStoreKey(key *DataStoreKey) []byte {
+ var result []byte
+
+ if key.CollectionRootID != 0 {
+ result = encoding.EncodeUvarintAscending([]byte{'/'}, uint64(key.CollectionRootID))
+ }
+
+ if key.InstanceType != "" {
+ result = append(result, '/')
+ result = append(result, []byte(string(key.InstanceType))...)
+ }
+
+ if key.DocID != "" {
+ result = append(result, '/')
+ result = append(result, []byte(key.DocID)...)
+ }
+
+ if key.FieldID != "" {
+ result = append(result, '/')
+ // Todo: This should be encoded/decoded properly in
+ // https://github.com/sourcenetwork/defradb/issues/2818
+ result = append(result, []byte(key.FieldID)...)
+ }
+
+ return result
+}
diff --git a/internal/keys/datastore_index.go b/internal/keys/datastore_index.go
new file mode 100644
index 0000000000..83ed77b364
--- /dev/null
+++ b/internal/keys/datastore_index.go
@@ -0,0 +1,191 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ ds "github.com/ipfs/go-datastore"
+
+ "github.com/sourcenetwork/defradb/client"
+ "github.com/sourcenetwork/defradb/internal/encoding"
+)
+
+// IndexedField contains information necessary for storing a single
+// value of a field in an index.
+type IndexedField struct {
+ // Value is the value of the field in the index
+ Value client.NormalValue
+ // Descending is true if the field is sorted in descending order
+ Descending bool
+}
+
+// IndexDataStoreKey is key of an indexed document in the database.
+type IndexDataStoreKey struct {
+ // CollectionID is the id of the collection
+ CollectionID uint32
+ // IndexID is the id of the index
+ IndexID uint32
+ // Fields is the values of the fields in the index
+ Fields []IndexedField
+}
+
+var _ Key = (*IndexDataStoreKey)(nil)
+
+// NewIndexDataStoreKey creates a new IndexDataStoreKey from a collection ID, index ID and fields.
+// It also validates values of the fields.
+func NewIndexDataStoreKey(collectionID, indexID uint32, fields []IndexedField) IndexDataStoreKey {
+ return IndexDataStoreKey{
+ CollectionID: collectionID,
+ IndexID: indexID,
+ Fields: fields,
+ }
+}
+
+// Bytes returns the byte representation of the key
+func (k *IndexDataStoreKey) Bytes() []byte {
+ return EncodeIndexDataStoreKey(k)
+}
+
+// ToDS returns the datastore key
+func (k *IndexDataStoreKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
+
+// ToString returns the string representation of the key
+// It is in the following format:
+// /[CollectionID]/[IndexID]/[FieldValue](/[FieldValue]...)
+// If while composing the string from left to right, a component
+// is empty, the string is returned up to that point
+func (k *IndexDataStoreKey) ToString() string {
+ return string(k.Bytes())
+}
+
+// Equal returns true if the two keys are equal
+func (k *IndexDataStoreKey) Equal(other IndexDataStoreKey) bool {
+ if k.CollectionID != other.CollectionID || k.IndexID != other.IndexID {
+ return false
+ }
+
+ if len(k.Fields) != len(other.Fields) {
+ return false
+ }
+
+ for i, field := range k.Fields {
+ if !field.Value.Equal(other.Fields[i].Value) || field.Descending != other.Fields[i].Descending {
+ return false
+ }
+ }
+
+ return true
+}
+
+// DecodeIndexDataStoreKey decodes a IndexDataStoreKey from bytes.
+// It expects the input bytes is in the following format:
+//
+// /[CollectionID]/[IndexID]/[FieldValue](/[FieldValue]...)
+//
+// Where [CollectionID] and [IndexID] are integers
+//
+// All values of the fields are converted to standardized Defra Go type
+// according to fields description.
+func DecodeIndexDataStoreKey(
+ data []byte,
+ indexDesc *client.IndexDescription,
+ fields []client.FieldDefinition,
+) (IndexDataStoreKey, error) {
+ if len(data) == 0 {
+ return IndexDataStoreKey{}, ErrEmptyKey
+ }
+
+ if data[0] != '/' {
+ return IndexDataStoreKey{}, ErrInvalidKey
+ }
+ data = data[1:]
+
+ data, colID, err := encoding.DecodeUvarintAscending(data)
+ if err != nil {
+ return IndexDataStoreKey{}, err
+ }
+
+ key := IndexDataStoreKey{CollectionID: uint32(colID)}
+
+ if data[0] != '/' {
+ return IndexDataStoreKey{}, ErrInvalidKey
+ }
+ data = data[1:]
+
+ data, indID, err := encoding.DecodeUvarintAscending(data)
+ if err != nil {
+ return IndexDataStoreKey{}, err
+ }
+ key.IndexID = uint32(indID)
+
+ if len(data) == 0 {
+ return key, nil
+ }
+
+ for len(data) > 0 {
+ if data[0] != '/' {
+ return IndexDataStoreKey{}, ErrInvalidKey
+ }
+ data = data[1:]
+
+ i := len(key.Fields)
+ descending := false
+ var kind client.FieldKind = client.FieldKind_DocID
+ // If the key has more values encoded then fields on the index description, the last
+ // value must be the docID and we treat it as a string.
+ if i < len(indexDesc.Fields) {
+ descending = indexDesc.Fields[i].Descending
+ kind = fields[i].Kind
+ } else if i > len(indexDesc.Fields) {
+ return IndexDataStoreKey{}, ErrInvalidKey
+ }
+
+ if kind != nil && kind.IsArray() {
+ if arrKind, ok := kind.(client.ScalarArrayKind); ok {
+ kind = arrKind.SubKind()
+ }
+ }
+
+ var val client.NormalValue
+ data, val, err = encoding.DecodeFieldValue(data, descending, kind)
+ if err != nil {
+ return IndexDataStoreKey{}, err
+ }
+
+ key.Fields = append(key.Fields, IndexedField{Value: val, Descending: descending})
+ }
+
+ return key, nil
+}
+
+// EncodeIndexDataStoreKey encodes a IndexDataStoreKey to bytes to be stored as a key
+// for secondary indexes.
+func EncodeIndexDataStoreKey(key *IndexDataStoreKey) []byte {
+ if key.CollectionID == 0 {
+ return []byte{}
+ }
+
+ b := encoding.EncodeUvarintAscending([]byte{'/'}, uint64(key.CollectionID))
+
+ if key.IndexID == 0 {
+ return b
+ }
+ b = append(b, '/')
+ b = encoding.EncodeUvarintAscending(b, uint64(key.IndexID))
+
+ for _, field := range key.Fields {
+ b = append(b, '/')
+ b = encoding.EncodeFieldValue(b, field.Value, field.Descending)
+ }
+
+ return b
+}
diff --git a/internal/keys/datastore_primary_doc.go b/internal/keys/datastore_primary_doc.go
new file mode 100644
index 0000000000..6f531d3c7a
--- /dev/null
+++ b/internal/keys/datastore_primary_doc.go
@@ -0,0 +1,57 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "fmt"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+const (
+ PRIMARY_KEY = "/pk"
+)
+
+type PrimaryDataStoreKey struct {
+ CollectionRootID uint32
+ DocID string
+}
+
+var _ Key = (*PrimaryDataStoreKey)(nil)
+
+func (k PrimaryDataStoreKey) ToDataStoreKey() DataStoreKey {
+ return DataStoreKey{
+ CollectionRootID: k.CollectionRootID,
+ DocID: k.DocID,
+ }
+}
+
+func (k PrimaryDataStoreKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k PrimaryDataStoreKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
+
+func (k PrimaryDataStoreKey) ToString() string {
+ result := ""
+
+ if k.CollectionRootID != 0 {
+ result = result + "/" + fmt.Sprint(k.CollectionRootID)
+ }
+ result = result + PRIMARY_KEY
+ if k.DocID != "" {
+ result = result + "/" + k.DocID
+ }
+
+ return result
+}
diff --git a/internal/keys/datastore_view_item.go b/internal/keys/datastore_view_item.go
new file mode 100644
index 0000000000..b1280db327
--- /dev/null
+++ b/internal/keys/datastore_view_item.go
@@ -0,0 +1,87 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strconv"
+
+ ds "github.com/ipfs/go-datastore"
+
+ "github.com/sourcenetwork/defradb/internal/encoding"
+)
+
+// ViewCacheKey is a trimmed down [DataStoreKey] used for caching the results
+// of View items.
+//
+// It is stored in the format `/collection/vi/[CollectionRootID]/[ItemID]`. It points to the
+// full serialized View item.
+type ViewCacheKey struct {
+ // CollectionRootID is the Root of the Collection that this item belongs to.
+ CollectionRootID uint32
+
+ // ItemID is the unique (to this CollectionRootID) ID of the View item.
+ //
+ // For now this is essentially just the index of the item in the result-set, however
+ // that is likely to change in the near future.
+ ItemID uint
+}
+
+var _ Key = (*ViewCacheKey)(nil)
+
+func NewViewCacheColPrefix(rootID uint32) ViewCacheKey {
+ return ViewCacheKey{
+ CollectionRootID: rootID,
+ }
+}
+
+func NewViewCacheKey(rootID uint32, itemID uint) ViewCacheKey {
+ return ViewCacheKey{
+ CollectionRootID: rootID,
+ ItemID: itemID,
+ }
+}
+
+func (k ViewCacheKey) ToString() string {
+ return string(k.Bytes())
+}
+
+func (k ViewCacheKey) Bytes() []byte {
+ result := []byte(COLLECTION_VIEW_ITEMS)
+
+ if k.CollectionRootID != 0 {
+ result = append(result, '/')
+ result = encoding.EncodeUvarintAscending(result, uint64(k.CollectionRootID))
+ }
+
+ if k.ItemID != 0 {
+ result = append(result, '/')
+ result = encoding.EncodeUvarintAscending(result, uint64(k.ItemID))
+ }
+
+ return result
+}
+
+func (k ViewCacheKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
+
+func (k ViewCacheKey) PrettyPrint() string {
+ result := COLLECTION_VIEW_ITEMS
+
+ if k.CollectionRootID != 0 {
+ result = result + "/" + strconv.Itoa(int(k.CollectionRootID))
+ }
+ if k.ItemID != 0 {
+ result = result + "/" + strconv.Itoa(int(k.ItemID))
+ }
+
+ return result
+}
diff --git a/internal/keys/errors.go b/internal/keys/errors.go
new file mode 100644
index 0000000000..bd6bf7bb8e
--- /dev/null
+++ b/internal/keys/errors.go
@@ -0,0 +1,44 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "github.com/sourcenetwork/defradb/errors"
+)
+
+const (
+ errFailedToGetFieldIdOfKey string = "failed to get FieldID of Key"
+ errInvalidFieldIndex string = "invalid field index"
+ errInvalidFieldValue string = "invalid field value"
+)
+
+var (
+ ErrFailedToGetFieldIdOfKey = errors.New(errFailedToGetFieldIdOfKey)
+ ErrEmptyKey = errors.New("received empty key string")
+ ErrInvalidKey = errors.New("invalid key string")
+ ErrInvalidFieldIndex = errors.New(errInvalidFieldIndex)
+ ErrInvalidFieldValue = errors.New(errInvalidFieldValue)
+)
+
+// NewErrFailedToGetFieldIdOfKey returns the error indicating failure to get FieldID of Key.
+func NewErrFailedToGetFieldIdOfKey(inner error) error {
+ return errors.Wrap(errFailedToGetFieldIdOfKey, inner)
+}
+
+// NewErrInvalidFieldIndex returns the error indicating invalid field index.
+func NewErrInvalidFieldIndex(i int) error {
+ return errors.New(errInvalidFieldIndex, errors.NewKV("index", i))
+}
+
+// NewErrInvalidFieldValue returns the error indicating invalid field value.
+func NewErrInvalidFieldValue(reason string) error {
+ return errors.New(errInvalidFieldValue, errors.NewKV("Reason", reason))
+}
diff --git a/internal/keys/headstore_doc.go b/internal/keys/headstore_doc.go
new file mode 100644
index 0000000000..5d3ec2306e
--- /dev/null
+++ b/internal/keys/headstore_doc.go
@@ -0,0 +1,94 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strings"
+
+ "github.com/ipfs/go-cid"
+ ds "github.com/ipfs/go-datastore"
+)
+
+type HeadStoreKey struct {
+ DocID string
+ FieldID string //can be 'C'
+ Cid cid.Cid
+}
+
+var _ Key = (*HeadStoreKey)(nil)
+
+// Creates a new HeadStoreKey from a string as best as it can,
+// splitting the input using '/' as a field deliminator. It assumes
+// that the input string is in the following format:
+//
+// /[DocID]/[FieldId]/[Cid]
+//
+// Any properties before the above are ignored
+func NewHeadStoreKey(key string) (HeadStoreKey, error) {
+ elements := strings.Split(key, "/")
+ if len(elements) != 4 {
+ return HeadStoreKey{}, ErrInvalidKey
+ }
+
+ cid, err := cid.Decode(elements[3])
+ if err != nil {
+ return HeadStoreKey{}, err
+ }
+
+ return HeadStoreKey{
+ // elements[0] is empty (key has leading '/')
+ DocID: elements[1],
+ FieldID: elements[2],
+ Cid: cid,
+ }, nil
+}
+
+func (k HeadStoreKey) WithDocID(docID string) HeadStoreKey {
+ newKey := k
+ newKey.DocID = docID
+ return newKey
+}
+
+func (k HeadStoreKey) WithCid(c cid.Cid) HeadStoreKey {
+ newKey := k
+ newKey.Cid = c
+ return newKey
+}
+
+func (k HeadStoreKey) WithFieldID(fieldID string) HeadStoreKey {
+ newKey := k
+ newKey.FieldID = fieldID
+ return newKey
+}
+
+func (k HeadStoreKey) ToString() string {
+ var result string
+
+ if k.DocID != "" {
+ result = result + "/" + k.DocID
+ }
+ if k.FieldID != "" {
+ result = result + "/" + k.FieldID
+ }
+ if k.Cid.Defined() {
+ result = result + "/" + k.Cid.String()
+ }
+
+ return result
+}
+
+func (k HeadStoreKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k HeadStoreKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/key.go b/internal/keys/key.go
new file mode 100644
index 0000000000..893b9790b4
--- /dev/null
+++ b/internal/keys/key.go
@@ -0,0 +1,22 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ ds "github.com/ipfs/go-datastore"
+)
+
+// Key is an interface that represents a key in the database.
+type Key interface {
+ ToString() string
+ Bytes() []byte
+ ToDS() ds.Key
+}
diff --git a/internal/core/key_test.go b/internal/keys/key_test.go
similarity index 98%
rename from internal/core/key_test.go
rename to internal/keys/key_test.go
index 4cdb46b72d..37ce364183 100644
--- a/internal/core/key_test.go
+++ b/internal/keys/key_test.go
@@ -8,7 +8,7 @@
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
-package core
+package keys
import (
"fmt"
@@ -95,7 +95,7 @@ func encodeKey(colID, indexID uint32, fieldParts ...any) []byte {
}
for i := 0; i < len(fieldParts)/partSize; i++ {
b = append(b, '/')
- isDescending := fieldParts[i*partSize+1].(bool)
+ isDescending, _ := fieldParts[i*partSize+1].(bool)
if fieldParts[i*partSize] == nil {
if isDescending {
b = encoding.EncodeNullDescending(b)
@@ -103,10 +103,11 @@ func encodeKey(colID, indexID uint32, fieldParts ...any) []byte {
b = encoding.EncodeNullAscending(b)
}
} else {
+ v, _ := fieldParts[i*partSize].(int)
if isDescending {
- b = encoding.EncodeUvarintDescending(b, uint64(fieldParts[i*partSize].(int)))
+ b = encoding.EncodeUvarintDescending(b, uint64(v))
} else {
- b = encoding.EncodeUvarintAscending(b, uint64(fieldParts[i*partSize].(int)))
+ b = encoding.EncodeUvarintAscending(b, uint64(v))
}
}
}
diff --git a/internal/keys/peerstore.go b/internal/keys/peerstore.go
new file mode 100644
index 0000000000..7fa628c833
--- /dev/null
+++ b/internal/keys/peerstore.go
@@ -0,0 +1,17 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+const (
+ REPLICATOR = "/rep/id"
+ REPLICATOR_RETRY_ID = "/rep/retry/id"
+ REPLICATOR_RETRY_DOC = "/rep/retry/doc"
+)
diff --git a/internal/keys/peerstore_replicator.go b/internal/keys/peerstore_replicator.go
new file mode 100644
index 0000000000..c54113d6a5
--- /dev/null
+++ b/internal/keys/peerstore_replicator.go
@@ -0,0 +1,41 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import ds "github.com/ipfs/go-datastore"
+
+type ReplicatorKey struct {
+ ReplicatorID string
+}
+
+var _ Key = (*ReplicatorKey)(nil)
+
+func NewReplicatorKey(id string) ReplicatorKey {
+ return ReplicatorKey{ReplicatorID: id}
+}
+
+func (k ReplicatorKey) ToString() string {
+ result := REPLICATOR
+
+ if k.ReplicatorID != "" {
+ result = result + "/" + k.ReplicatorID
+ }
+
+ return result
+}
+
+func (k ReplicatorKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k ReplicatorKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/peerstore_replicator_retry.go b/internal/keys/peerstore_replicator_retry.go
new file mode 100644
index 0000000000..380676073a
--- /dev/null
+++ b/internal/keys/peerstore_replicator_retry.go
@@ -0,0 +1,54 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+
+ "github.com/sourcenetwork/defradb/errors"
+)
+
+type ReplicatorRetryIDKey struct {
+ PeerID string
+}
+
+var _ Key = (*ReplicatorRetryIDKey)(nil)
+
+func NewReplicatorRetryIDKey(peerID string) ReplicatorRetryIDKey {
+ return ReplicatorRetryIDKey{
+ PeerID: peerID,
+ }
+}
+
+// NewReplicatorRetryIDKeyFromString creates a new [ReplicatorRetryIDKey] from a string.
+//
+// It expects the input string to be in the format `/rep/retry/id/[PeerID]`.
+func NewReplicatorRetryIDKeyFromString(key string) (ReplicatorRetryIDKey, error) {
+ peerID := strings.TrimPrefix(key, REPLICATOR_RETRY_ID+"/")
+ if peerID == "" {
+ return ReplicatorRetryIDKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key))
+ }
+ return NewReplicatorRetryIDKey(peerID), nil
+}
+
+func (k ReplicatorRetryIDKey) ToString() string {
+ return REPLICATOR_RETRY_ID + "/" + k.PeerID
+}
+
+func (k ReplicatorRetryIDKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k ReplicatorRetryIDKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/peerstore_replicator_retry_doc.go b/internal/keys/peerstore_replicator_retry_doc.go
new file mode 100644
index 0000000000..c77fc7617a
--- /dev/null
+++ b/internal/keys/peerstore_replicator_retry_doc.go
@@ -0,0 +1,61 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+
+ "github.com/sourcenetwork/defradb/errors"
+)
+
+type ReplicatorRetryDocIDKey struct {
+ PeerID string
+ DocID string
+}
+
+var _ Key = (*ReplicatorRetryDocIDKey)(nil)
+
+func NewReplicatorRetryDocIDKey(peerID, docID string) ReplicatorRetryDocIDKey {
+ return ReplicatorRetryDocIDKey{
+ PeerID: peerID,
+ DocID: docID,
+ }
+}
+
+// NewReplicatorRetryDocIDKeyFromString creates a new [ReplicatorRetryDocIDKey] from a string.
+//
+// It expects the input string to be in the format `/rep/retry/doc/[PeerID]/[DocID]`.
+func NewReplicatorRetryDocIDKeyFromString(key string) (ReplicatorRetryDocIDKey, error) {
+ trimmedKey := strings.TrimPrefix(key, REPLICATOR_RETRY_DOC+"/")
+ keyArr := strings.Split(trimmedKey, "/")
+ if len(keyArr) != 2 {
+ return ReplicatorRetryDocIDKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key))
+ }
+ return NewReplicatorRetryDocIDKey(keyArr[0], keyArr[1]), nil
+}
+
+func (k ReplicatorRetryDocIDKey) ToString() string {
+ keyString := REPLICATOR_RETRY_DOC + "/" + k.PeerID
+ if k.DocID != "" {
+ keyString += "/" + k.DocID
+ }
+ return keyString
+}
+
+func (k ReplicatorRetryDocIDKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k ReplicatorRetryDocIDKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore.go b/internal/keys/systemstore.go
new file mode 100644
index 0000000000..d3f82a8af5
--- /dev/null
+++ b/internal/keys/systemstore.go
@@ -0,0 +1,26 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+const (
+ COLLECTION = "collection"
+ COLLECTION_ROOT = "/collection/root"
+ COLLECTION_INDEX = "/collection/index"
+ COLLECTION_NAME = "/collection/name"
+ COLLECTION_SCHEMA_VERSION = "/collection/version"
+ COLLECTION_ID = "/collection/id"
+ P2P_COLLECTION = "/p2p/collection"
+ SCHEMA_VERSION_ROOT = "/schema/version/r"
+ SCHEMA_VERSION = "/schema/version/v"
+ COLLECTION_SEQ = "/seq/collection"
+ INDEX_ID_SEQ = "/seq/index"
+ FIELD_ID_SEQ = "/seq/field"
+)
diff --git a/internal/keys/systemstore_collection.go b/internal/keys/systemstore_collection.go
new file mode 100644
index 0000000000..675ab74e76
--- /dev/null
+++ b/internal/keys/systemstore_collection.go
@@ -0,0 +1,44 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "fmt"
+ "strconv"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// CollectionKey points to the json serialized description of the
+// the collection of the given ID.
+type CollectionKey struct {
+ CollectionID uint32
+}
+
+var _ Key = (*CollectionKey)(nil)
+
+// Returns a formatted collection key for the system data store.
+// It assumes the name of the collection is non-empty.
+func NewCollectionKey(id uint32) CollectionKey {
+ return CollectionKey{CollectionID: id}
+}
+
+func (k CollectionKey) ToString() string {
+ return fmt.Sprintf("%s/%s", COLLECTION_ID, strconv.Itoa(int(k.CollectionID)))
+}
+
+func (k CollectionKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k CollectionKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_collection_index.go b/internal/keys/systemstore_collection_index.go
new file mode 100644
index 0000000000..34eed12482
--- /dev/null
+++ b/internal/keys/systemstore_collection_index.go
@@ -0,0 +1,86 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+ "github.com/sourcenetwork/immutable"
+)
+
+// CollectionIndexKey to a stored description of an index
+type CollectionIndexKey struct {
+ // CollectionID is the id of the collection that the index is on
+ CollectionID immutable.Option[uint32]
+ // IndexName is the name of the index
+ IndexName string
+}
+
+var _ Key = (*CollectionIndexKey)(nil)
+
+// NewCollectionIndexKey creates a new CollectionIndexKey from a collection name and index name.
+func NewCollectionIndexKey(colID immutable.Option[uint32], indexName string) CollectionIndexKey {
+ return CollectionIndexKey{CollectionID: colID, IndexName: indexName}
+}
+
+// NewCollectionIndexKeyFromString creates a new CollectionIndexKey from a string.
+// It expects the input string is in the following format:
+//
+// /collection/index/[CollectionID]/[IndexName]
+//
+// Where [IndexName] might be omitted. Anything else will return an error.
+func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) {
+ keyArr := strings.Split(key, "/")
+ if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != COLLECTION || keyArr[2] != "index" {
+ return CollectionIndexKey{}, ErrInvalidKey
+ }
+
+ colID, err := strconv.Atoi(keyArr[3])
+ if err != nil {
+ return CollectionIndexKey{}, err
+ }
+
+ result := CollectionIndexKey{CollectionID: immutable.Some(uint32(colID))}
+ if len(keyArr) == 5 {
+ result.IndexName = keyArr[4]
+ }
+ return result, nil
+}
+
+// ToString returns the string representation of the key
+// It is in the following format:
+// /collection/index/[CollectionID]/[IndexName]
+// if [CollectionID] is empty, the rest is ignored
+func (k CollectionIndexKey) ToString() string {
+ result := COLLECTION_INDEX
+
+ if k.CollectionID.HasValue() {
+ result = result + "/" + fmt.Sprint(k.CollectionID.Value())
+ if k.IndexName != "" {
+ result = result + "/" + k.IndexName
+ }
+ }
+
+ return result
+}
+
+// Bytes returns the byte representation of the key
+func (k CollectionIndexKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+// ToDS returns the datastore key
+func (k CollectionIndexKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_collection_name.go b/internal/keys/systemstore_collection_name.go
new file mode 100644
index 0000000000..6d03a42e23
--- /dev/null
+++ b/internal/keys/systemstore_collection_name.go
@@ -0,0 +1,41 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "fmt"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// CollectionNameKey points to the ID of the collection of the given
+// name.
+type CollectionNameKey struct {
+ Name string
+}
+
+var _ Key = (*CollectionNameKey)(nil)
+
+func NewCollectionNameKey(name string) CollectionNameKey {
+ return CollectionNameKey{Name: name}
+}
+
+func (k CollectionNameKey) ToString() string {
+ return fmt.Sprintf("%s/%s", COLLECTION_NAME, k.Name)
+}
+
+func (k CollectionNameKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k CollectionNameKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_collection_root.go b/internal/keys/systemstore_collection_root.go
new file mode 100644
index 0000000000..bef50e2ec2
--- /dev/null
+++ b/internal/keys/systemstore_collection_root.go
@@ -0,0 +1,83 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// CollectionRootKey points to nil, but the keys/prefix can be used
+// to get collections that are of a given RootID.
+//
+// It is stored in the format `/collection/root/[RootID]/[CollectionID]`.
+type CollectionRootKey struct {
+ RootID uint32
+ CollectionID uint32
+}
+
+var _ Key = (*CollectionRootKey)(nil)
+
+func NewCollectionRootKey(rootID uint32, collectionID uint32) CollectionRootKey {
+ return CollectionRootKey{
+ RootID: rootID,
+ CollectionID: collectionID,
+ }
+}
+
+// NewCollectionRootKeyFromString creates a new [CollectionRootKey].
+//
+// It expects the key to be in the format `/collection/root/[RootID]/[CollectionID]`.
+func NewCollectionRootKeyFromString(key string) (CollectionRootKey, error) {
+ keyArr := strings.Split(key, "/")
+ if len(keyArr) != 5 || keyArr[1] != COLLECTION || keyArr[2] != "root" {
+ return CollectionRootKey{}, ErrInvalidKey
+ }
+ rootID, err := strconv.Atoi(keyArr[3])
+ if err != nil {
+ return CollectionRootKey{}, err
+ }
+
+ collectionID, err := strconv.Atoi(keyArr[4])
+ if err != nil {
+ return CollectionRootKey{}, err
+ }
+
+ return CollectionRootKey{
+ RootID: uint32(rootID),
+ CollectionID: uint32(collectionID),
+ }, nil
+}
+
+func (k CollectionRootKey) ToString() string {
+ result := COLLECTION_ROOT
+
+ if k.RootID != 0 {
+ result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.RootID)))
+ }
+
+ if k.CollectionID != 0 {
+ result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.CollectionID)))
+ }
+
+ return result
+}
+
+func (k CollectionRootKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k CollectionRootKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_collection_schema.go b/internal/keys/systemstore_collection_schema.go
new file mode 100644
index 0000000000..eb84e95812
--- /dev/null
+++ b/internal/keys/systemstore_collection_schema.go
@@ -0,0 +1,75 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// CollectionSchemaVersionKey points to nil, but the keys/prefix can be used
+// to get collections that are using, or have used a given schema version.
+//
+// If a collection is updated to a different schema version, the old entry(s)
+// of this key will be preserved.
+//
+// This key should be removed in https://github.com/sourcenetwork/defradb/issues/1085
+type CollectionSchemaVersionKey struct {
+ SchemaVersionID string
+ CollectionID uint32
+}
+
+var _ Key = (*CollectionSchemaVersionKey)(nil)
+
+func NewCollectionSchemaVersionKey(schemaVersionId string, collectionID uint32) CollectionSchemaVersionKey {
+ return CollectionSchemaVersionKey{
+ SchemaVersionID: schemaVersionId,
+ CollectionID: collectionID,
+ }
+}
+
+func NewCollectionSchemaVersionKeyFromString(key string) (CollectionSchemaVersionKey, error) {
+ elements := strings.Split(key, "/")
+ colID, err := strconv.Atoi(elements[len(elements)-1])
+ if err != nil {
+ return CollectionSchemaVersionKey{}, err
+ }
+
+ return CollectionSchemaVersionKey{
+ SchemaVersionID: elements[len(elements)-2],
+ CollectionID: uint32(colID),
+ }, nil
+}
+
+func (k CollectionSchemaVersionKey) ToString() string {
+ result := COLLECTION_SCHEMA_VERSION
+
+ if k.SchemaVersionID != "" {
+ result = result + "/" + k.SchemaVersionID
+ }
+
+ if k.CollectionID != 0 {
+ result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.CollectionID)))
+ }
+
+ return result
+}
+
+func (k CollectionSchemaVersionKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k CollectionSchemaVersionKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_p2p_collection.go b/internal/keys/systemstore_p2p_collection.go
new file mode 100644
index 0000000000..129c3c27a5
--- /dev/null
+++ b/internal/keys/systemstore_p2p_collection.go
@@ -0,0 +1,56 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+
+ "github.com/sourcenetwork/defradb/errors"
+)
+
+type P2PCollectionKey struct {
+ CollectionID string
+}
+
+var _ Key = (*P2PCollectionKey)(nil)
+
+// New
+func NewP2PCollectionKey(collectionID string) P2PCollectionKey {
+ return P2PCollectionKey{CollectionID: collectionID}
+}
+
+func NewP2PCollectionKeyFromString(key string) (P2PCollectionKey, error) {
+ keyArr := strings.Split(key, "/")
+ if len(keyArr) != 4 {
+ return P2PCollectionKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key))
+ }
+ return NewP2PCollectionKey(keyArr[3]), nil
+}
+
+func (k P2PCollectionKey) ToString() string {
+ result := P2P_COLLECTION
+
+ if k.CollectionID != "" {
+ result = result + "/" + k.CollectionID
+ }
+
+ return result
+}
+
+func (k P2PCollectionKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k P2PCollectionKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_schema_root.go b/internal/keys/systemstore_schema_root.go
new file mode 100644
index 0000000000..848e05c83c
--- /dev/null
+++ b/internal/keys/systemstore_schema_root.go
@@ -0,0 +1,69 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strings"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// SchemaRootKey indexes schema version ids by their root schema id.
+//
+// The index is the key, there are no values stored against the key.
+type SchemaRootKey struct {
+ SchemaRoot string
+ SchemaVersionID string
+}
+
+var _ Key = (*SchemaRootKey)(nil)
+
+func NewSchemaRootKey(schemaRoot string, schemaVersionID string) SchemaRootKey {
+ return SchemaRootKey{
+ SchemaRoot: schemaRoot,
+ SchemaVersionID: schemaVersionID,
+ }
+}
+
+func NewSchemaRootKeyFromString(keyString string) (SchemaRootKey, error) {
+ keyString = strings.TrimPrefix(keyString, SCHEMA_VERSION_ROOT+"/")
+ elements := strings.Split(keyString, "/")
+ if len(elements) != 2 {
+ return SchemaRootKey{}, ErrInvalidKey
+ }
+
+ return SchemaRootKey{
+ SchemaRoot: elements[0],
+ SchemaVersionID: elements[1],
+ }, nil
+}
+
+func (k SchemaRootKey) ToString() string {
+ result := SCHEMA_VERSION_ROOT
+
+ if k.SchemaRoot != "" {
+ result = result + "/" + k.SchemaRoot
+ }
+
+ if k.SchemaVersionID != "" {
+ result = result + "/" + k.SchemaVersionID
+ }
+
+ return result
+}
+
+func (k SchemaRootKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k SchemaRootKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_schema_version.go b/internal/keys/systemstore_schema_version.go
new file mode 100644
index 0000000000..d435aa7e4a
--- /dev/null
+++ b/internal/keys/systemstore_schema_version.go
@@ -0,0 +1,44 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import ds "github.com/ipfs/go-datastore"
+
+// SchemaVersionKey points to the json serialized schema at the specified version.
+//
+// It's corresponding value is immutable.
+type SchemaVersionKey struct {
+ SchemaVersionID string
+}
+
+var _ Key = (*SchemaVersionKey)(nil)
+
+func NewSchemaVersionKey(schemaVersionID string) SchemaVersionKey {
+ return SchemaVersionKey{SchemaVersionID: schemaVersionID}
+}
+
+func (k SchemaVersionKey) ToString() string {
+ result := SCHEMA_VERSION
+
+ if k.SchemaVersionID != "" {
+ result = result + "/" + k.SchemaVersionID
+ }
+
+ return result
+}
+
+func (k SchemaVersionKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k SchemaVersionKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_seq_collection_id.go b/internal/keys/systemstore_seq_collection_id.go
new file mode 100644
index 0000000000..a589e64cce
--- /dev/null
+++ b/internal/keys/systemstore_seq_collection_id.go
@@ -0,0 +1,30 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import ds "github.com/ipfs/go-datastore"
+
+// CollectionIDSequenceKey is used to key the sequence used to generate collection ids.
+type CollectionIDSequenceKey struct{}
+
+var _ Key = (*CollectionIDSequenceKey)(nil)
+
+func (k CollectionIDSequenceKey) ToString() string {
+ return COLLECTION_SEQ
+}
+
+func (k CollectionIDSequenceKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k CollectionIDSequenceKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_seq_field_id.go b/internal/keys/systemstore_seq_field_id.go
new file mode 100644
index 0000000000..7cd01e79d3
--- /dev/null
+++ b/internal/keys/systemstore_seq_field_id.go
@@ -0,0 +1,43 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strconv"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// FieldIDSequenceKey is used to key the sequence used to generate field ids.
+//
+// The sequence is specific to each collection root. Multiple collection of the same root
+// must maintain consistent field ids.
+type FieldIDSequenceKey struct {
+ CollectionRoot uint32
+}
+
+var _ Key = (*FieldIDSequenceKey)(nil)
+
+func NewFieldIDSequenceKey(collectionRoot uint32) FieldIDSequenceKey {
+ return FieldIDSequenceKey{CollectionRoot: collectionRoot}
+}
+
+func (k FieldIDSequenceKey) ToString() string {
+ return FIELD_ID_SEQ + "/" + strconv.Itoa(int(k.CollectionRoot))
+}
+
+func (k FieldIDSequenceKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k FieldIDSequenceKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/keys/systemstore_seq_index_id.go b/internal/keys/systemstore_seq_index_id.go
new file mode 100644
index 0000000000..5fbf09d145
--- /dev/null
+++ b/internal/keys/systemstore_seq_index_id.go
@@ -0,0 +1,42 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strconv"
+
+ ds "github.com/ipfs/go-datastore"
+)
+
+// IndexIDSequenceKey is used to key the sequence used to generate index ids.
+//
+// The sequence is specific to each collection version.
+type IndexIDSequenceKey struct {
+ CollectionID uint32
+}
+
+var _ Key = (*IndexIDSequenceKey)(nil)
+
+func NewIndexIDSequenceKey(collectionID uint32) IndexIDSequenceKey {
+ return IndexIDSequenceKey{CollectionID: collectionID}
+}
+
+func (k IndexIDSequenceKey) ToString() string {
+ return INDEX_ID_SEQ + "/" + strconv.Itoa(int(k.CollectionID))
+}
+
+func (k IndexIDSequenceKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k IndexIDSequenceKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
diff --git a/internal/lens/fetcher.go b/internal/lens/fetcher.go
index bbe0c45a0d..db9e418afa 100644
--- a/internal/lens/fetcher.go
+++ b/internal/lens/fetcher.go
@@ -25,6 +25,7 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -294,10 +295,10 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string
return core.ErrInvalidKey
}
- datastoreKeyBase := core.DataStoreKey{
+ datastoreKeyBase := keys.DataStoreKey{
CollectionRootID: f.col.Description().RootID,
DocID: docID,
- InstanceType: core.ValueKey,
+ InstanceType: keys.ValueKey,
}
for fieldName, value := range modifiedFieldValuesByName {
@@ -320,7 +321,7 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string
}
}
- versionKey := datastoreKeyBase.WithFieldID(core.DATASTORE_DOC_VERSION_FIELD_ID)
+ versionKey := datastoreKeyBase.WithFieldID(keys.DATASTORE_DOC_VERSION_FIELD_ID)
err := f.txn.Datastore().Put(ctx, versionKey.ToDS(), []byte(f.targetVersionID))
if err != nil {
return err
diff --git a/internal/merkle/clock/clock.go b/internal/merkle/clock/clock.go
index 9f0f6e77fb..94180f2144 100644
--- a/internal/merkle/clock/clock.go
+++ b/internal/merkle/clock/clock.go
@@ -27,6 +27,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
"github.com/sourcenetwork/defradb/internal/encryption"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
var (
@@ -47,7 +48,7 @@ func NewMerkleClock(
headstore datastore.DSReaderWriter,
blockstore datastore.Blockstore,
encstore datastore.Blockstore,
- namespace core.HeadStoreKey,
+ namespace keys.HeadStoreKey,
crdt core.ReplicatedData,
) *MerkleClock {
return &MerkleClock{
diff --git a/internal/merkle/clock/clock_test.go b/internal/merkle/clock/clock_test.go
index f3f2f6e155..c0f169c0a5 100644
--- a/internal/merkle/clock/clock_test.go
+++ b/internal/merkle/clock/clock_test.go
@@ -19,10 +19,10 @@ import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
ccid "github.com/sourcenetwork/defradb/internal/core/cid"
"github.com/sourcenetwork/defradb/internal/core/crdt"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func newDS() ds.Datastore {
@@ -33,12 +33,12 @@ func newTestMerkleClock() *MerkleClock {
s := newDS()
multistore := datastore.MultiStoreFrom(s)
- reg := crdt.NewLWWRegister(multistore.Rootstore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "")
+ reg := crdt.NewLWWRegister(multistore.Rootstore(), keys.CollectionSchemaVersionKey{}, keys.DataStoreKey{}, "")
return NewMerkleClock(
multistore.Headstore(),
multistore.Blockstore(),
multistore.Encstore(),
- core.HeadStoreKey{DocID: request.DocIDArgName, FieldID: "1"},
+ keys.HeadStoreKey{DocID: request.DocIDArgName, FieldID: "1"},
reg,
)
}
@@ -46,8 +46,8 @@ func newTestMerkleClock() *MerkleClock {
func TestNewMerkleClock(t *testing.T) {
s := newDS()
multistore := datastore.MultiStoreFrom(s)
- reg := crdt.NewLWWRegister(multistore.Rootstore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "")
- clk := NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), core.HeadStoreKey{}, reg)
+ reg := crdt.NewLWWRegister(multistore.Rootstore(), keys.CollectionSchemaVersionKey{}, keys.DataStoreKey{}, "")
+ clk := NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), keys.HeadStoreKey{}, reg)
if clk.headstore != multistore.Headstore() {
t.Error("MerkleClock store not correctly set")
diff --git a/internal/merkle/clock/heads.go b/internal/merkle/clock/heads.go
index 9b1fad43dd..0dcf2a8f99 100644
--- a/internal/merkle/clock/heads.go
+++ b/internal/merkle/clock/heads.go
@@ -21,23 +21,23 @@ import (
"github.com/sourcenetwork/corelog"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// heads manages the current Merkle-CRDT heads.
type heads struct {
store datastore.DSReaderWriter
- namespace core.HeadStoreKey
+ namespace keys.HeadStoreKey
}
-func NewHeadSet(store datastore.DSReaderWriter, namespace core.HeadStoreKey) *heads {
+func NewHeadSet(store datastore.DSReaderWriter, namespace keys.HeadStoreKey) *heads {
return &heads{
store: store,
namespace: namespace,
}
}
-func (hh *heads) key(c cid.Cid) core.HeadStoreKey {
+func (hh *heads) key(c cid.Cid) keys.HeadStoreKey {
return hh.namespace.WithCid(c)
}
@@ -102,7 +102,7 @@ func (hh *heads) List(ctx context.Context) ([]cid.Cid, uint64, error) {
return nil, 0, NewErrFailedToGetNextQResult(r.Error)
}
- headKey, err := core.NewHeadStoreKey(r.Key)
+ headKey, err := keys.NewHeadStoreKey(r.Key)
if err != nil {
return nil, 0, err
}
diff --git a/internal/merkle/clock/heads_test.go b/internal/merkle/clock/heads_test.go
index 0eb7acdd0e..cb8e1d1014 100644
--- a/internal/merkle/clock/heads_test.go
+++ b/internal/merkle/clock/heads_test.go
@@ -22,8 +22,8 @@ import (
"github.com/ipfs/go-cid"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
ccid "github.com/sourcenetwork/defradb/internal/core/cid"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func newRandomCID() cid.Cid {
@@ -45,7 +45,7 @@ func newHeadSet() *heads {
return NewHeadSet(
datastore.AsDSReaderWriter(s),
- core.HeadStoreKey{}.WithDocID("myDocID").WithFieldID("1"),
+ keys.HeadStoreKey{}.WithDocID("myDocID").WithFieldID("1"),
)
}
diff --git a/internal/merkle/crdt/composite.go b/internal/merkle/crdt/composite.go
index fe9c13a0f5..44df12f83f 100644
--- a/internal/merkle/crdt/composite.go
+++ b/internal/merkle/crdt/composite.go
@@ -16,9 +16,9 @@ import (
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
"github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
corecrdt "github.com/sourcenetwork/defradb/internal/core/crdt"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
)
@@ -35,8 +35,8 @@ var _ MerkleCRDT = (*MerkleCompositeDAG)(nil)
// backed by a CompositeDAG CRDT.
func NewMerkleCompositeDAG(
store Stores,
- schemaVersionKey core.CollectionSchemaVersionKey,
- key core.DataStoreKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.DataStoreKey,
) *MerkleCompositeDAG {
compositeDag := corecrdt.NewCompositeDAG(
store.Datastore(),
diff --git a/internal/merkle/crdt/counter.go b/internal/merkle/crdt/counter.go
index c43a795294..50434ed7da 100644
--- a/internal/merkle/crdt/counter.go
+++ b/internal/merkle/crdt/counter.go
@@ -16,8 +16,8 @@ import (
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
"github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/core/crdt"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
)
@@ -33,8 +33,8 @@ var _ MerkleCRDT = (*MerkleCounter)(nil)
// backed by a Counter CRDT.
func NewMerkleCounter(
store Stores,
- schemaVersionKey core.CollectionSchemaVersionKey,
- key core.DataStoreKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.DataStoreKey,
fieldName string,
allowDecrement bool,
kind client.ScalarKind,
diff --git a/internal/merkle/crdt/lwwreg.go b/internal/merkle/crdt/lwwreg.go
index d24c2a107e..18fc7ee35d 100644
--- a/internal/merkle/crdt/lwwreg.go
+++ b/internal/merkle/crdt/lwwreg.go
@@ -16,8 +16,8 @@ import (
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
"github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/internal/core"
corecrdt "github.com/sourcenetwork/defradb/internal/core/crdt"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
)
@@ -33,8 +33,8 @@ var _ MerkleCRDT = (*MerkleLWWRegister)(nil)
// backed by a LWWRegister CRDT.
func NewMerkleLWWRegister(
store Stores,
- schemaVersionKey core.CollectionSchemaVersionKey,
- key core.DataStoreKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.DataStoreKey,
fieldName string,
) *MerkleLWWRegister {
register := corecrdt.NewLWWRegister(store.Datastore(), schemaVersionKey, key, fieldName)
diff --git a/internal/merkle/crdt/merklecrdt.go b/internal/merkle/crdt/merklecrdt.go
index a5cc00a9e1..3dd47ad0dc 100644
--- a/internal/merkle/crdt/merklecrdt.go
+++ b/internal/merkle/crdt/merklecrdt.go
@@ -20,7 +20,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
)
@@ -43,10 +43,10 @@ type MerkleCRDT interface {
func InstanceWithStore(
store Stores,
- schemaVersionKey core.CollectionSchemaVersionKey,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
cType client.CType,
kind client.FieldKind,
- key core.DataStoreKey,
+ key keys.DataStoreKey,
fieldName string,
) (MerkleCRDT, error) {
switch cType {
diff --git a/internal/planner/commit.go b/internal/planner/commit.go
index d9551dc7ab..dc9a0ce3d7 100644
--- a/internal/planner/commit.go
+++ b/internal/planner/commit.go
@@ -20,6 +20,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -68,7 +69,7 @@ func (n *dagScanNode) Kind() string {
func (n *dagScanNode) Init() error {
if len(n.spans.Value) == 0 {
if n.commitSelect.DocID.HasValue() {
- dsKey := core.DataStoreKey{}.WithDocID(n.commitSelect.DocID.Value())
+ dsKey := keys.DataStoreKey{}.WithDocID(n.commitSelect.DocID.Value())
if n.commitSelect.FieldID.HasValue() {
field := n.commitSelect.FieldID.Value()
@@ -112,7 +113,7 @@ func (n *dagScanNode) Spans(spans core.Spans) {
for i, span := range headSetSpans.Value {
if span.Start().FieldID != fieldID {
- headSetSpans.Value[i] = core.NewSpan(span.Start().WithFieldID(fieldID), core.DataStoreKey{})
+ headSetSpans.Value[i] = core.NewSpan(span.Start().WithFieldID(fieldID), keys.DataStoreKey{})
}
}
diff --git a/internal/planner/multi.go b/internal/planner/multi.go
index 4b82826118..c4c3278480 100644
--- a/internal/planner/multi.go
+++ b/internal/planner/multi.go
@@ -13,6 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
/*
@@ -156,7 +157,7 @@ func (p *parallelNode) nextAppend(index int, plan planNode) (bool, error) {
}
// pass the doc key as a reference through the spans interface
- spans := core.NewSpans(core.NewSpan(core.DataStoreKey{DocID: key}, core.DataStoreKey{}))
+ spans := core.NewSpans(core.NewSpan(keys.DataStoreKey{DocID: key}, keys.DataStoreKey{}))
plan.Spans(spans)
err := plan.Init()
if err != nil {
diff --git a/internal/planner/select.go b/internal/planner/select.go
index 064f9b2fec..9393103e40 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -19,6 +19,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -264,7 +265,7 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
return nil, err
}
spans := fetcher.NewVersionedSpan(
- core.DataStoreKey{DocID: n.selectReq.DocIDs.Value()[0]},
+ keys.DataStoreKey{DocID: n.selectReq.DocIDs.Value()[0]},
c,
) // @todo check len
origScan.Spans(spans)
diff --git a/internal/planner/view.go b/internal/planner/view.go
index 0226a2c9c6..e5beef128b 100644
--- a/internal/planner/view.go
+++ b/internal/planner/view.go
@@ -15,6 +15,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -199,7 +200,7 @@ func (n *cachedViewFetcher) Init() error {
n.queryResults = nil
}
- prefix := core.NewViewCacheColPrefix(n.def.Description.RootID)
+ prefix := keys.NewViewCacheColPrefix(n.def.Description.RootID)
var err error
n.queryResults, err = n.p.txn.Datastore().Query(n.p.ctx, query.Query{
diff --git a/net/server_test.go b/net/server_test.go
index 4dc6428205..a2cda4c76b 100644
--- a/net/server_test.go
+++ b/net/server_test.go
@@ -22,6 +22,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
func TestNewServerSimple(t *testing.T) {
@@ -74,7 +75,7 @@ func TestGetHeadLog(t *testing.T) {
}
func getHead(ctx context.Context, db client.DB, docID client.DocID) (cid.Cid, error) {
- prefix := core.DataStoreKeyFromDocID(docID).ToHeadStoreKey().WithFieldID(core.COMPOSITE_NAMESPACE).ToString()
+ prefix := keys.DataStoreKeyFromDocID(docID).ToHeadStoreKey().WithFieldID(core.COMPOSITE_NAMESPACE).ToString()
results, err := db.Headstore().Query(ctx, query.Query{Prefix: prefix})
if err != nil {
return cid.Undef, err
@@ -85,7 +86,7 @@ func getHead(ctx context.Context, db client.DB, docID client.DocID) (cid.Cid, er
}
if len(entries) > 0 {
- hsKey, err := core.NewHeadStoreKey(entries[0].Key)
+ hsKey, err := keys.NewHeadStoreKey(entries[0].Key)
if err != nil {
return cid.Undef, err
}
From 2bd985782508634d493ae631bfadbc555e20b3ef Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 4 Nov 2024 12:16:33 -0500
Subject: [PATCH 13/47] bot: Update dependencies (bulk dependabot PRs)
04-11-2024 (#3207)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3205 bot: Bump @typescript-eslint/parser from 8.11.0 to 8.12.2 in
/playground
#3204 bot: Bump eslint from 9.13.0 to 9.14.0 in /playground
#3202 bot: Bump github.com/multiformats/go-multiaddr from 0.13.0 to
0.14.0
⚠️ The following PRs were resolved manually due to merge conflicts:
#3203 bot: Bump @typescript-eslint/eslint-plugin from 8.11.0 to 8.12.2
in /playground
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
go.mod | 2 +-
go.sum | 4 +-
playground/package-lock.json | 416 ++++++++++++++++++++++++++++-------
playground/package.json | 6 +-
4 files changed, 345 insertions(+), 83 deletions(-)
diff --git a/go.mod b/go.mod
index 7b3bcff06b..bd49118689 100644
--- a/go.mod
+++ b/go.mod
@@ -38,7 +38,7 @@ require (
github.com/libp2p/go-libp2p-pubsub v0.12.0
github.com/libp2p/go-libp2p-record v0.2.0
github.com/mr-tron/base58 v1.2.0
- github.com/multiformats/go-multiaddr v0.13.0
+ github.com/multiformats/go-multiaddr v0.14.0
github.com/multiformats/go-multibase v0.2.0
github.com/multiformats/go-multicodec v0.9.0
github.com/multiformats/go-multihash v0.2.3
diff --git a/go.sum b/go.sum
index 39dbdab85d..76ed0cb1ce 100644
--- a/go.sum
+++ b/go.sum
@@ -1107,8 +1107,8 @@ github.com/multiformats/go-base32 v0.1.0/go.mod h1:Kj3tFY6zNr+ABYMqeUNeGvkIC/UYg
github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9rQyccr0=
github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4=
github.com/multiformats/go-multiaddr v0.1.1/go.mod h1:aMKBKNEYmzmDmxfX88/vz+J5IU55txyt0p4aiWVohjo=
-github.com/multiformats/go-multiaddr v0.13.0 h1:BCBzs61E3AGHcYYTv8dqRH43ZfyrqM8RXVPT8t13tLQ=
-github.com/multiformats/go-multiaddr v0.13.0/go.mod h1:sBXrNzucqkFJhvKOiwwLyqamGa/P5EIXNPLovyhQCII=
+github.com/multiformats/go-multiaddr v0.14.0 h1:bfrHrJhrRuh/NXH5mCnemjpbGjzRw/b+tJFOD41g2tU=
+github.com/multiformats/go-multiaddr v0.14.0/go.mod h1:6EkVAxtznq2yC3QT5CM1UTAwG0GTP3EWAIcjHuzQ+r4=
github.com/multiformats/go-multiaddr-dns v0.4.0 h1:P76EJ3qzBXpUXZ3twdCDx/kvagMsNo0LMFXpyms/zgU=
github.com/multiformats/go-multiaddr-dns v0.4.0/go.mod h1:7hfthtB4E4pQwirrz+J0CcDUfbWzTqEzVyYKKIKpgkc=
github.com/multiformats/go-multiaddr-fmt v0.1.0 h1:WLEFClPycPkp4fnIzoFoV9FVd49/eQsuaL3/CWe167E=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index d986bf13eb..0c226c3c46 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -18,10 +18,10 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.11.0",
- "@typescript-eslint/parser": "^8.11.0",
+ "@typescript-eslint/eslint-plugin": "^8.12.2",
+ "@typescript-eslint/parser": "^8.12.2",
"@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.13.0",
+ "eslint": "^9.14.0",
"eslint-plugin-react-hooks": "^5.0.0",
"eslint-plugin-react-refresh": "^0.4.14",
"typescript": "^5.6.3",
@@ -513,11 +513,10 @@
}
},
"node_modules/@eslint-community/regexpp": {
- "version": "4.11.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz",
- "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==",
+ "version": "4.12.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
+ "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
"dev": true,
- "license": "MIT",
"engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
}
@@ -616,9 +615,9 @@
}
},
"node_modules/@eslint/js": {
- "version": "9.13.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz",
- "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==",
+ "version": "9.14.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.14.0.tgz",
+ "integrity": "sha512-pFoEtFWCPyDOl+C6Ift+wC7Ro89otjigCf5vcuWqWgqNSQbRrpjSvdeE6ofLz4dHmyxD5f7gIdGT4+p36L6Twg==",
"dev": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -742,27 +741,40 @@
}
},
"node_modules/@humanfs/core": {
- "version": "0.19.0",
- "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.0.tgz",
- "integrity": "sha512-2cbWIHbZVEweE853g8jymffCA+NCMiuqeECeBBLm8dg2oFdjuGJhgN4UAbI+6v0CKbbhvtXA4qV8YR5Ji86nmw==",
+ "version": "0.19.1",
+ "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
+ "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
"dev": true,
"engines": {
"node": ">=18.18.0"
}
},
"node_modules/@humanfs/node": {
- "version": "0.16.5",
- "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.5.tgz",
- "integrity": "sha512-KSPA4umqSG4LHYRodq31VDwKAvaTF4xmVlzM8Aeh4PlU1JQ3IG0wiA8C25d3RQ9nJyM3mBHyI53K06VVL/oFFg==",
+ "version": "0.16.6",
+ "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz",
+ "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==",
"dev": true,
"dependencies": {
- "@humanfs/core": "^0.19.0",
+ "@humanfs/core": "^0.19.1",
"@humanwhocodes/retry": "^0.3.0"
},
"engines": {
"node": ">=18.18.0"
}
},
+ "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz",
+ "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==",
+ "dev": true,
+ "engines": {
+ "node": ">=18.18"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/nzakas"
+ }
+ },
"node_modules/@humanwhocodes/module-importer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
@@ -778,9 +790,9 @@
}
},
"node_modules/@humanwhocodes/retry": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz",
- "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==",
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.0.tgz",
+ "integrity": "sha512-xnRgu9DxZbkWak/te3fcytNyp8MTbuiZIaueg2rgEvBuN55n04nwLYLU9TX/VVlusc9L2ZNXi99nUFNkHXtr5g==",
"dev": true,
"engines": {
"node": ">=18.18"
@@ -2524,17 +2536,16 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.11.0.tgz",
- "integrity": "sha512-KhGn2LjW1PJT2A/GfDpiyOfS4a8xHQv2myUagTM5+zsormOmBlYsnQ6pobJ8XxJmh6hnHwa2Mbe3fPrDJoDhbA==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.12.2.tgz",
+ "integrity": "sha512-gQxbxM8mcxBwaEmWdtLCIGLfixBMHhQjBqR8sVWNTPpcj45WlYL2IObS/DNMLH1DBP0n8qz+aiiLTGfopPEebw==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.11.0",
- "@typescript-eslint/type-utils": "8.11.0",
- "@typescript-eslint/utils": "8.11.0",
- "@typescript-eslint/visitor-keys": "8.11.0",
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/type-utils": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2557,17 +2568,63 @@
}
}
},
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
+ "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
+ "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
+ "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/parser": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.11.0.tgz",
- "integrity": "sha512-lmt73NeHdy1Q/2ul295Qy3uninSqi6wQI18XwSpm8w0ZbQXUpjCAWP1Vlv/obudoBiIjJVjlztjQ+d/Md98Yxg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.12.2.tgz",
+ "integrity": "sha512-MrvlXNfGPLH3Z+r7Tk+Z5moZAc0dzdVjTgUgwsdGweH7lydysQsnSww3nAmsq8blFuRD5VRlAr9YdEFw3e6PBw==",
"dev": true,
- "license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.11.0",
- "@typescript-eslint/types": "8.11.0",
- "@typescript-eslint/typescript-estree": "8.11.0",
- "@typescript-eslint/visitor-keys": "8.11.0",
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"debug": "^4.3.4"
},
"engines": {
@@ -2586,6 +2643,81 @@
}
}
},
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
+ "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
+ "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
+ "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
+ "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.11.0.tgz",
@@ -2605,15 +2737,55 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.11.0.tgz",
- "integrity": "sha512-ItiMfJS6pQU0NIKAaybBKkuVzo6IdnAhPFZA/2Mba/uBjuPQPet/8+zh5GtLHwmuFRShZx+8lhIs7/QeDHflOg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.12.2.tgz",
+ "integrity": "sha512-bwuU4TAogPI+1q/IJSKuD4shBLc/d2vGcRT588q+jzayQyjVK2X6v/fbR4InY2U2sgf8MEvVCqEWUzYzgBNcGQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/typescript-estree": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2",
+ "debug": "^4.3.4",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
+ "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
+ "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
"dev": true,
- "license": "MIT",
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.11.0",
- "@typescript-eslint/utils": "8.11.0",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
"ts-api-utils": "^1.3.0"
},
"engines": {
@@ -2629,6 +2801,23 @@
}
}
},
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
+ "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/types": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.11.0.tgz",
@@ -2673,16 +2862,15 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.11.0.tgz",
- "integrity": "sha512-CYiX6WZcbXNJV7UNB4PLDIBtSdRmRI/nb0FMyqHPTQD1rMjA0foPLaPUV39C/MxkTd/QKSeX+Gb34PPsDVC35g==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz",
+ "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.11.0",
- "@typescript-eslint/types": "8.11.0",
- "@typescript-eslint/typescript-estree": "8.11.0"
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.12.2"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2695,6 +2883,81 @@
"eslint": "^8.57.0 || ^9.0.0"
}
},
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
+ "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
+ "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
+ "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
+ "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.12.2",
+ "eslint-visitor-keys": "^3.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.11.0.tgz",
@@ -2726,11 +2989,10 @@
}
},
"node_modules/acorn": {
- "version": "8.12.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz",
- "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
+ "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"dev": true,
- "license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
@@ -3308,21 +3570,21 @@
}
},
"node_modules/eslint": {
- "version": "9.13.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz",
- "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==",
+ "version": "9.14.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.14.0.tgz",
+ "integrity": "sha512-c2FHsVBr87lnUtjP4Yhvk4yEhKrQavGafRA/Se1ouse8PfbfC/Qh9Mxa00yWsZRlqeUB9raXip0aiiUZkgnr9g==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.11.0",
+ "@eslint-community/regexpp": "^4.12.1",
"@eslint/config-array": "^0.18.0",
"@eslint/core": "^0.7.0",
"@eslint/eslintrc": "^3.1.0",
- "@eslint/js": "9.13.0",
+ "@eslint/js": "9.14.0",
"@eslint/plugin-kit": "^0.2.0",
- "@humanfs/node": "^0.16.5",
+ "@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
- "@humanwhocodes/retry": "^0.3.1",
+ "@humanwhocodes/retry": "^0.4.0",
"@types/estree": "^1.0.6",
"@types/json-schema": "^7.0.15",
"ajv": "^6.12.4",
@@ -3330,9 +3592,9 @@
"cross-spawn": "^7.0.2",
"debug": "^4.3.2",
"escape-string-regexp": "^4.0.0",
- "eslint-scope": "^8.1.0",
- "eslint-visitor-keys": "^4.1.0",
- "espree": "^10.2.0",
+ "eslint-scope": "^8.2.0",
+ "eslint-visitor-keys": "^4.2.0",
+ "espree": "^10.3.0",
"esquery": "^1.5.0",
"esutils": "^2.0.2",
"fast-deep-equal": "^3.1.3",
@@ -3389,9 +3651,9 @@
}
},
"node_modules/eslint-scope": {
- "version": "8.1.0",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.1.0.tgz",
- "integrity": "sha512-14dSvlhaVhKKsa9Fx1l8A17s7ah7Ef7wCakJ10LYk6+GYmP9yDti2oq2SEwcyndt6knfcZyhyxwY3i9yL78EQw==",
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
+ "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
"dev": true,
"dependencies": {
"esrecurse": "^4.3.0",
@@ -3435,9 +3697,9 @@
}
},
"node_modules/eslint/node_modules/eslint-visitor-keys": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.1.0.tgz",
- "integrity": "sha512-Q7lok0mqMUSf5a/AdAZkA5a/gHcO6snwQClVNNvFKCAVlxXucdU8pKydU5ZVZjBx5xr37vGbFFWtLQYreLzrZg==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -3460,14 +3722,14 @@
}
},
"node_modules/espree": {
- "version": "10.2.0",
- "resolved": "https://registry.npmjs.org/espree/-/espree-10.2.0.tgz",
- "integrity": "sha512-upbkBJbckcCNBDBDXEbuhjbP68n+scUd3k/U2EkyM9nw+I/jPiL4cLF/Al06CF96wRltFda16sxDFrxsI1v0/g==",
+ "version": "10.3.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz",
+ "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
"dev": true,
"dependencies": {
- "acorn": "^8.12.0",
+ "acorn": "^8.14.0",
"acorn-jsx": "^5.3.2",
- "eslint-visitor-keys": "^4.1.0"
+ "eslint-visitor-keys": "^4.2.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -3477,9 +3739,9 @@
}
},
"node_modules/espree/node_modules/eslint-visitor-keys": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.1.0.tgz",
- "integrity": "sha512-Q7lok0mqMUSf5a/AdAZkA5a/gHcO6snwQClVNNvFKCAVlxXucdU8pKydU5ZVZjBx5xr37vGbFFWtLQYreLzrZg==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
diff --git a/playground/package.json b/playground/package.json
index e63e8ab850..3f16d6cc6e 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -20,10 +20,10 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.11.0",
- "@typescript-eslint/parser": "^8.11.0",
+ "@typescript-eslint/eslint-plugin": "^8.12.2",
+ "@typescript-eslint/parser": "^8.12.2",
"@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.13.0",
+ "eslint": "^9.14.0",
"eslint-plugin-react-hooks": "^5.0.0",
"eslint-plugin-react-refresh": "^0.4.14",
"typescript": "^5.6.3",
From 2b0b8624ac7bf4149f927d2192a46ca5e290ddb9 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Mon, 4 Nov 2024 15:16:25 -0500
Subject: [PATCH 14/47] refactor: Simplify merkle/crdt code (#3200)
## Relevant issue(s)
Resolves #3199
## Description
Simplifies merkle/crdt code via a handful of small changes.
---
internal/db/base/collection_keys.go | 29 ------------
internal/db/collection.go | 67 +++++++++-------------------
internal/db/collection_delete.go | 15 ++++---
internal/db/fetcher/versioned.go | 69 +++++++++++++----------------
internal/db/merge.go | 2 +-
internal/keys/datastore_doc.go | 6 +++
internal/merkle/crdt/composite.go | 16 +------
internal/merkle/crdt/counter.go | 10 ++---
internal/merkle/crdt/errors.go | 35 ---------------
internal/merkle/crdt/lwwreg.go | 11 ++---
internal/merkle/crdt/merklecrdt.go | 16 +++----
11 files changed, 82 insertions(+), 194 deletions(-)
delete mode 100644 internal/merkle/crdt/errors.go
diff --git a/internal/db/base/collection_keys.go b/internal/db/base/collection_keys.go
index 31cdeef18c..015a2aeee3 100644
--- a/internal/db/base/collection_keys.go
+++ b/internal/db/base/collection_keys.go
@@ -11,10 +11,7 @@
package base
import (
- "fmt"
-
"github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/keys"
)
@@ -35,29 +32,3 @@ func MakeDataStoreKeyWithCollectionAndDocID(
DocID: docID,
}
}
-
-func MakePrimaryIndexKeyForCRDT(
- c client.CollectionDefinition,
- ctype client.CType,
- key keys.DataStoreKey,
- fieldName string,
-) (keys.DataStoreKey, error) {
- switch ctype {
- case client.COMPOSITE:
- return MakeDataStoreKeyWithCollectionDescription(c.Description).
- WithInstanceInfo(key).
- WithFieldID(core.COMPOSITE_NAMESPACE),
- nil
- case client.LWW_REGISTER, client.PN_COUNTER, client.P_COUNTER:
- field, ok := c.GetFieldByName(fieldName)
- if !ok {
- return keys.DataStoreKey{}, client.NewErrFieldNotExist(fieldName)
- }
-
- return MakeDataStoreKeyWithCollectionDescription(c.Description).
- WithInstanceInfo(key).
- WithFieldID(fmt.Sprint(field.ID)),
- nil
- }
- return keys.DataStoreKey{}, ErrInvalidCrdtType
-}
diff --git a/internal/db/collection.go b/internal/db/collection.go
index af631701fc..8d71c7aff6 100644
--- a/internal/db/collection.go
+++ b/internal/db/collection.go
@@ -17,10 +17,8 @@ import (
"strconv"
"strings"
- "github.com/ipfs/go-cid"
ds "github.com/ipfs/go-datastore"
"github.com/ipfs/go-datastore/query"
- cidlink "github.com/ipld/go-ipld-prime/linking/cid"
"github.com/sourcenetwork/immutable"
"github.com/sourcenetwork/defradb/acp"
@@ -467,7 +465,7 @@ func (c *collection) create(
}
// write data to DB via MerkleClock/CRDT
- _, err = c.save(ctx, doc, true)
+ err = c.save(ctx, doc, true)
if err != nil {
return err
}
@@ -535,7 +533,7 @@ func (c *collection) update(
return client.ErrDocumentNotFoundOrNotAuthorized
}
- _, err = c.save(ctx, doc, false)
+ err = c.save(ctx, doc, false)
if err != nil {
return err
}
@@ -605,15 +603,15 @@ func (c *collection) save(
ctx context.Context,
doc *client.Document,
isCreate bool,
-) (cid.Cid, error) {
+) error {
if err := c.validateEncryptedFields(ctx); err != nil {
- return cid.Undef, err
+ return err
}
if !isCreate {
err := c.updateIndexedDoc(ctx, doc)
if err != nil {
- return cid.Undef, err
+ return err
}
}
txn := mustGetContextTxn(ctx)
@@ -637,19 +635,19 @@ func (c *collection) save(
for k, v := range doc.Fields() {
val, err := doc.GetValueWithField(v)
if err != nil {
- return cid.Undef, err
+ return err
}
if val.IsDirty() {
fieldKey, fieldExists := c.tryGetFieldKey(primaryKey, k)
if !fieldExists {
- return cid.Undef, client.NewErrFieldNotExist(k)
+ return client.NewErrFieldNotExist(k)
}
fieldDescription, valid := c.Definition().GetFieldByName(k)
if !valid {
- return cid.Undef, client.NewErrFieldNotExist(k)
+ return client.NewErrFieldNotExist(k)
}
// by default the type will have been set to LWW_REGISTER. We need to ensure
@@ -663,10 +661,10 @@ func (c *collection) save(
val.Value(),
)
if err != nil {
- return cid.Undef, err
+ return err
}
- merkleCRDT, err := merklecrdt.InstanceWithStore(
+ merkleCRDT, err := merklecrdt.FieldLevelCRDTWithStore(
txn,
keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
val.Type(),
@@ -675,26 +673,27 @@ func (c *collection) save(
fieldDescription.Name,
)
if err != nil {
- return cid.Undef, err
+ return err
}
link, _, err := merkleCRDT.Save(ctx, merklecrdt.NewDocField(primaryKey.DocID, k, val))
if err != nil {
- return cid.Undef, err
+ return err
}
links = append(links, coreblock.NewDAGLink(k, link))
}
}
- link, headNode, err := c.saveCompositeToMerkleCRDT(
- ctx,
- primaryKey.ToDataStoreKey(),
- links,
- client.Active,
+ merkleCRDT := merklecrdt.NewMerkleCompositeDAG(
+ txn,
+ keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
+ primaryKey.ToDataStoreKey().WithFieldID(core.COMPOSITE_NAMESPACE),
)
+
+ link, headNode, err := merkleCRDT.Save(ctx, links)
if err != nil {
- return cid.Undef, err
+ return err
}
// publish an update event when the txn succeeds
@@ -712,7 +711,7 @@ func (c *collection) save(
doc.SetHead(link.Cid)
})
- return link.Cid, nil
+ return nil
}
func (c *collection) validateOneToOneLinkDoesntAlreadyExist(
@@ -888,32 +887,6 @@ func (c *collection) exists(
return true, false, nil
}
-// saveCompositeToMerkleCRDT saves the composite to the merkle CRDT.
-// It returns the CID of the block and the encoded block.
-// saveCompositeToMerkleCRDT MUST not be called outside the `c.save`
-// and `c.applyDelete` methods as we wrap the acp logic around those methods.
-// Calling it elsewhere could cause the omission of acp checks.
-func (c *collection) saveCompositeToMerkleCRDT(
- ctx context.Context,
- dsKey keys.DataStoreKey,
- links []coreblock.DAGLink,
- status client.DocumentStatus,
-) (cidlink.Link, []byte, error) {
- txn := mustGetContextTxn(ctx)
- dsKey = dsKey.WithFieldID(core.COMPOSITE_NAMESPACE)
- merkleCRDT := merklecrdt.NewMerkleCompositeDAG(
- txn,
- keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
- dsKey,
- )
-
- if status.IsDeleted() {
- return merkleCRDT.Delete(ctx, links)
- }
-
- return merkleCRDT.Save(ctx, links)
-}
-
func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) keys.PrimaryDataStoreKey {
return keys.PrimaryDataStoreKey{
CollectionRootID: c.Description().RootID,
diff --git a/internal/db/collection_delete.go b/internal/db/collection_delete.go
index 468095b54c..b0bf933dda 100644
--- a/internal/db/collection_delete.go
+++ b/internal/db/collection_delete.go
@@ -16,8 +16,9 @@ import (
"github.com/sourcenetwork/defradb/acp"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/event"
- coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/keys"
+ merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
)
// DeleteWithFilter deletes using a filter to target documents for delete.
@@ -138,14 +139,14 @@ func (c *collection) applyDelete(
}
txn := mustGetContextTxn(ctx)
- dsKey := primaryKey.ToDataStoreKey()
- link, b, err := c.saveCompositeToMerkleCRDT(
- ctx,
- dsKey,
- []coreblock.DAGLink{},
- client.Deleted,
+ merkleCRDT := merklecrdt.NewMerkleCompositeDAG(
+ txn,
+ keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
+ primaryKey.ToDataStoreKey().WithFieldID(core.COMPOSITE_NAMESPACE),
)
+
+ link, b, err := merkleCRDT.Delete(ctx)
if err != nil {
return err
}
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index 508b0ea406..b10b6ca84f 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -13,10 +13,10 @@ package fetcher
import (
"container/list"
"context"
+ "fmt"
"github.com/ipfs/go-cid"
ds "github.com/ipfs/go-datastore"
- cidlink "github.com/ipld/go-ipld-prime/linking/cid"
"github.com/sourcenetwork/immutable"
@@ -27,7 +27,6 @@ import (
"github.com/sourcenetwork/defradb/datastore/memory"
"github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
- "github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/keys"
merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
@@ -99,7 +98,7 @@ type VersionedFetcher struct {
col client.Collection
// @todo index *client.IndexDescription
- mCRDTs map[uint32]merklecrdt.MerkleCRDT
+ mCRDTs map[client.FieldID]merklecrdt.MerkleCRDT
}
// Init initializes the VersionedFetcher.
@@ -118,7 +117,7 @@ func (vf *VersionedFetcher) Init(
vf.acp = acp
vf.col = col
vf.queuedCids = list.New()
- vf.mCRDTs = make(map[uint32]merklecrdt.MerkleCRDT)
+ vf.mCRDTs = make(map[client.FieldID]merklecrdt.MerkleCRDT)
vf.txn = txn
// create store
@@ -182,7 +181,7 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans core.Spans) error {
}
vf.ctx = ctx
- vf.dsKey = dk
+ vf.dsKey = dk.WithCollectionRoot(vf.col.Description().RootID)
vf.version = c
if err := vf.seekTo(vf.version); err != nil {
@@ -352,7 +351,17 @@ func (vf *VersionedFetcher) merge(c cid.Cid) error {
}
// first arg 0 is the index for the composite DAG in the mCRDTs cache
- if err := vf.processBlock(0, block, link, client.COMPOSITE, client.FieldKind_None, ""); err != nil {
+ mcrdt, exists := vf.mCRDTs[0]
+ if !exists {
+ mcrdt = merklecrdt.NewMerkleCompositeDAG(
+ vf.store,
+ keys.CollectionSchemaVersionKey{},
+ vf.dsKey.WithFieldID(core.COMPOSITE_NAMESPACE),
+ )
+ vf.mCRDTs[0] = mcrdt
+ }
+ err = mcrdt.Clock().ProcessBlock(vf.ctx, block, link)
+ if err != nil {
return err
}
@@ -368,44 +377,30 @@ func (vf *VersionedFetcher) merge(c cid.Cid) error {
if !ok {
return client.NewErrFieldNotExist(l.Name)
}
- if err := vf.processBlock(uint32(field.ID), subBlock, l.Link, field.Typ, field.Kind, l.Name); err != nil {
- return err
- }
- }
-
- return nil
-}
-func (vf *VersionedFetcher) processBlock(
- crdtIndex uint32,
- block *coreblock.Block,
- blockLink cidlink.Link,
- ctype client.CType,
- kind client.FieldKind,
- fieldName string,
-) (err error) {
- // handle CompositeDAG
- mcrdt, exists := vf.mCRDTs[crdtIndex]
- if !exists {
- dsKey, err := base.MakePrimaryIndexKeyForCRDT(vf.col.Definition(), ctype, vf.dsKey, fieldName)
- if err != nil {
- return err
+ mcrdt, exists := vf.mCRDTs[field.ID]
+ if !exists {
+ mcrdt, err = merklecrdt.FieldLevelCRDTWithStore(
+ vf.store,
+ keys.CollectionSchemaVersionKey{},
+ field.Typ,
+ field.Kind,
+ vf.dsKey.WithFieldID(fmt.Sprint(field.ID)),
+ field.Name,
+ )
+ if err != nil {
+ return err
+ }
+ vf.mCRDTs[field.ID] = mcrdt
}
- mcrdt, err = merklecrdt.InstanceWithStore(
- vf.store,
- keys.CollectionSchemaVersionKey{},
- ctype,
- kind,
- dsKey,
- fieldName,
- )
+
+ err = mcrdt.Clock().ProcessBlock(vf.ctx, subBlock, l.Link)
if err != nil {
return err
}
- vf.mCRDTs[crdtIndex] = mcrdt
}
- return mcrdt.Clock().ProcessBlock(vf.ctx, block, blockLink)
+ return nil
}
func (vf *VersionedFetcher) getDAGBlock(c cid.Cid) (*coreblock.Block, error) {
diff --git a/internal/db/merge.go b/internal/db/merge.go
index d1b96d5b77..74db1ad302 100644
--- a/internal/db/merge.go
+++ b/internal/db/merge.go
@@ -455,7 +455,7 @@ func (mp *mergeProcessor) initCRDTForType(field string) (merklecrdt.MerkleCRDT,
return nil, nil
}
- mcrdt, err := merklecrdt.InstanceWithStore(
+ mcrdt, err := merklecrdt.FieldLevelCRDTWithStore(
mp.txn,
schemaVersionKey,
fd.Typ,
diff --git a/internal/keys/datastore_doc.go b/internal/keys/datastore_doc.go
index 1665fb7ea3..cd8ac60917 100644
--- a/internal/keys/datastore_doc.go
+++ b/internal/keys/datastore_doc.go
@@ -86,6 +86,12 @@ func (k DataStoreKey) WithDeletedFlag() DataStoreKey {
return newKey
}
+func (k DataStoreKey) WithCollectionRoot(colRoot uint32) DataStoreKey {
+ newKey := k
+ newKey.CollectionRootID = colRoot
+ return newKey
+}
+
func (k DataStoreKey) WithDocID(docID string) DataStoreKey {
newKey := k
newKey.DocID = docID
diff --git a/internal/merkle/crdt/composite.go b/internal/merkle/crdt/composite.go
index 44df12f83f..862541bf8f 100644
--- a/internal/merkle/crdt/composite.go
+++ b/internal/merkle/crdt/composite.go
@@ -60,25 +60,13 @@ func (m *MerkleCompositeDAG) Clock() *clock.MerkleClock {
// Delete sets the values of CompositeDAG for a delete.
func (m *MerkleCompositeDAG) Delete(
ctx context.Context,
- links []coreblock.DAGLink,
) (cidlink.Link, []byte, error) {
delta := m.reg.Set(client.Deleted)
- link, b, err := m.clock.AddDelta(ctx, delta, links...)
- if err != nil {
- return cidlink.Link{}, nil, err
- }
-
- return link, b, nil
+ return m.clock.AddDelta(ctx, delta)
}
// Save the value of the composite CRDT to DAG.
-func (m *MerkleCompositeDAG) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
- links, ok := data.([]coreblock.DAGLink)
- if !ok {
- return cidlink.Link{}, nil, NewErrUnexpectedValueType(client.COMPOSITE, []coreblock.DAGLink{}, data)
- }
-
+func (m *MerkleCompositeDAG) Save(ctx context.Context, links []coreblock.DAGLink) (cidlink.Link, []byte, error) {
delta := m.reg.Set(client.Active)
-
return m.clock.AddDelta(ctx, delta, links...)
}
diff --git a/internal/merkle/crdt/counter.go b/internal/merkle/crdt/counter.go
index 50434ed7da..b5c45c4121 100644
--- a/internal/merkle/crdt/counter.go
+++ b/internal/merkle/crdt/counter.go
@@ -27,7 +27,7 @@ type MerkleCounter struct {
reg crdt.Counter
}
-var _ MerkleCRDT = (*MerkleCounter)(nil)
+var _ FieldLevelMerkleCRDT = (*MerkleCounter)(nil)
// NewMerkleCounter creates a new instance (or loaded from DB) of a MerkleCRDT
// backed by a Counter CRDT.
@@ -53,12 +53,8 @@ func (m *MerkleCounter) Clock() *clock.MerkleClock {
}
// Save the value of the Counter to the DAG.
-func (m *MerkleCounter) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
- value, ok := data.(*DocField)
- if !ok {
- return cidlink.Link{}, nil, NewErrUnexpectedValueType(m.reg.CType(), &client.FieldValue{}, data)
- }
- bytes, err := value.FieldValue.Bytes()
+func (m *MerkleCounter) Save(ctx context.Context, data *DocField) (cidlink.Link, []byte, error) {
+ bytes, err := data.FieldValue.Bytes()
if err != nil {
return cidlink.Link{}, nil, err
}
diff --git a/internal/merkle/crdt/errors.go b/internal/merkle/crdt/errors.go
deleted file mode 100644
index 58ee8b6bc4..0000000000
--- a/internal/merkle/crdt/errors.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2024 Democratized Data Foundation
-//
-// Use of this software is governed by the Business Source License
-// included in the file licenses/BSL.txt.
-//
-// As of the Change Date specified in that file, in accordance with
-// the Business Source License, use of this software will be governed
-// by the Apache License, Version 2.0, included in the file
-// licenses/APL.txt.
-
-package merklecrdt
-
-import (
- "fmt"
-
- "github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/errors"
-)
-
-const (
- errUnexpectedValueType = "unexpected value type for merkle CRDT"
-)
-
-var (
- ErrUnexpectedValueType = errors.New(errUnexpectedValueType)
-)
-
-func NewErrUnexpectedValueType(cType client.CType, expected, actual any) error {
- return errors.New(
- errUnexpectedValueType,
- errors.NewKV("CRDT", cType.String()),
- errors.NewKV("expected", fmt.Sprintf("%T", expected)),
- errors.NewKV("actual", fmt.Sprintf("%T", actual)),
- )
-}
diff --git a/internal/merkle/crdt/lwwreg.go b/internal/merkle/crdt/lwwreg.go
index 18fc7ee35d..6dae0a1a31 100644
--- a/internal/merkle/crdt/lwwreg.go
+++ b/internal/merkle/crdt/lwwreg.go
@@ -15,7 +15,6 @@ import (
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
- "github.com/sourcenetwork/defradb/client"
corecrdt "github.com/sourcenetwork/defradb/internal/core/crdt"
"github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
@@ -27,7 +26,7 @@ type MerkleLWWRegister struct {
reg corecrdt.LWWRegister
}
-var _ MerkleCRDT = (*MerkleLWWRegister)(nil)
+var _ FieldLevelMerkleCRDT = (*MerkleLWWRegister)(nil)
// NewMerkleLWWRegister creates a new instance (or loaded from DB) of a MerkleCRDT
// backed by a LWWRegister CRDT.
@@ -51,12 +50,8 @@ func (m *MerkleLWWRegister) Clock() *clock.MerkleClock {
}
// Save the value of the register to the DAG.
-func (m *MerkleLWWRegister) Save(ctx context.Context, data any) (cidlink.Link, []byte, error) {
- value, ok := data.(*DocField)
- if !ok {
- return cidlink.Link{}, nil, NewErrUnexpectedValueType(client.LWW_REGISTER, &client.FieldValue{}, data)
- }
- bytes, err := value.FieldValue.Bytes()
+func (m *MerkleLWWRegister) Save(ctx context.Context, data *DocField) (cidlink.Link, []byte, error) {
+ bytes, err := data.FieldValue.Bytes()
if err != nil {
return cidlink.Link{}, nil, err
}
diff --git a/internal/merkle/crdt/merklecrdt.go b/internal/merkle/crdt/merklecrdt.go
index 3dd47ad0dc..911a81c10c 100644
--- a/internal/merkle/crdt/merklecrdt.go
+++ b/internal/merkle/crdt/merklecrdt.go
@@ -38,17 +38,21 @@ type Stores interface {
// so it can be merged with any given semantics.
type MerkleCRDT interface {
Clock() *clock.MerkleClock
- Save(ctx context.Context, data any) (cidlink.Link, []byte, error)
}
-func InstanceWithStore(
+type FieldLevelMerkleCRDT interface {
+ MerkleCRDT
+ Save(ctx context.Context, data *DocField) (cidlink.Link, []byte, error)
+}
+
+func FieldLevelCRDTWithStore(
store Stores,
schemaVersionKey keys.CollectionSchemaVersionKey,
cType client.CType,
kind client.FieldKind,
key keys.DataStoreKey,
fieldName string,
-) (MerkleCRDT, error) {
+) (FieldLevelMerkleCRDT, error) {
switch cType {
case client.LWW_REGISTER:
return NewMerkleLWWRegister(
@@ -66,12 +70,6 @@ func InstanceWithStore(
cType == client.PN_COUNTER,
kind.(client.ScalarKind),
), nil
- case client.COMPOSITE:
- return NewMerkleCompositeDAG(
- store,
- schemaVersionKey,
- key,
- ), nil
}
return nil, client.NewErrUnknownCRDT(cType)
}
From c8fd3b15ef67e575fb064b8b79a64c9c790355fe Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Wed, 6 Nov 2024 12:33:30 -0800
Subject: [PATCH 15/47] feat: Filter alias target (#3201)
## Relevant issue(s)
Resolves #3194
## Description
This PR adds alias targeting in filters.
**Aggregate targets are not included in this PR as they require more
changes.**
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Added and updated integration tests.
Specify the platform(s) on which this was tested:
- MacOS
---
client/request/filter.go | 7 +-
internal/connor/connor.go | 9 +-
internal/connor/eq.go | 9 +-
internal/connor/key.go | 11 +-
internal/connor/not_test.go | 8 +-
internal/core/doc.go | 12 +
internal/planner/filter/copy_field.go | 14 +-
internal/planner/filter/copy_field_test.go | 10 +
internal/planner/mapper/errors.go | 5 +
internal/planner/mapper/mapper.go | 35 +-
internal/planner/mapper/targetable.go | 36 +-
internal/request/graphql/parser/filter.go | 12 +-
internal/request/graphql/schema/generate.go | 10 +-
.../query/one_to_many/with_count_test.go | 63 ++++
.../query/one_to_many/with_filter_test.go | 86 +++++
.../simple/with_filter/with_alias_test.go | 320 ++++++++++++++++++
.../schema/aggregates/inline_array_test.go | 6 +
tests/integration/schema/default_fields.go | 1 +
tests/integration/schema/filter_test.go | 21 ++
19 files changed, 610 insertions(+), 65 deletions(-)
create mode 100644 tests/integration/query/simple/with_filter/with_alias_test.go
diff --git a/client/request/filter.go b/client/request/filter.go
index aabfafb9b9..feacb02f2b 100644
--- a/client/request/filter.go
+++ b/client/request/filter.go
@@ -13,9 +13,10 @@ package request
import "github.com/sourcenetwork/immutable"
const (
- FilterOpOr = "_or"
- FilterOpAnd = "_and"
- FilterOpNot = "_not"
+ FilterOpOr = "_or"
+ FilterOpAnd = "_and"
+ FilterOpNot = "_not"
+ FilterOpAlias = "_alias"
)
// Filter contains the parsed condition map to be
diff --git a/internal/connor/connor.go b/internal/connor/connor.go
index da4f7f5b4d..cdb49d2973 100644
--- a/internal/connor/connor.go
+++ b/internal/connor/connor.go
@@ -9,9 +9,10 @@
package connor
const (
- AndOp = "_and"
- OrOp = "_or"
- NotOp = "_not"
+ AliasOp = "_alias"
+ AndOp = "_and"
+ OrOp = "_or"
+ NotOp = "_not"
AnyOp = "_any"
AllOp = "_all"
@@ -62,7 +63,7 @@ func matchWith(op string, conditions, data any) (bool, error) {
return anyOp(conditions, data)
case AllOp:
return all(conditions, data)
- case EqualOp:
+ case EqualOp, AliasOp:
return eq(conditions, data)
case GreaterOrEqualOp:
return ge(conditions, data)
diff --git a/internal/connor/eq.go b/internal/connor/eq.go
index 65c17356f0..1caa43e81b 100644
--- a/internal/connor/eq.go
+++ b/internal/connor/eq.go
@@ -34,10 +34,15 @@ func eq(condition, data any) (bool, error) {
switch cn := condition.(type) {
case map[FilterKey]any:
for prop, cond := range cn {
- m, err := matchWith(prop.GetOperatorOrDefault(EqualOp), cond, prop.GetProp(data))
+ d, op, err := prop.PropertyAndOperator(data, EqualOp)
if err != nil {
return false, err
- } else if !m {
+ }
+ m, err := matchWith(op, cond, d)
+ if err != nil {
+ return false, err
+ }
+ if !m {
return false, nil
}
}
diff --git a/internal/connor/key.go b/internal/connor/key.go
index b02769685c..98e8d747ea 100644
--- a/internal/connor/key.go
+++ b/internal/connor/key.go
@@ -3,12 +3,11 @@ package connor
// FilterKey represents a type that may be used as a map key
// in a filter.
type FilterKey interface {
- // GetProp returns the data that should be used with this key
- // from the given data.
- GetProp(data any) any
- // GetOperatorOrDefault returns either the operator that corresponds
- // to this key, or the given default.
- GetOperatorOrDefault(defaultOp string) string
+ // PropertyAndOperator returns the data and operator that should be used
+ // to filter the value matching this key.
+ //
+ // If the key does not have an operator the given defaultOp will be returned.
+ PropertyAndOperator(data any, defaultOp string) (any, string, error)
// Equal returns true if other is equal, otherwise returns false.
Equal(other FilterKey) bool
}
diff --git a/internal/connor/not_test.go b/internal/connor/not_test.go
index 1a1dd785dd..959ef04177 100644
--- a/internal/connor/not_test.go
+++ b/internal/connor/not_test.go
@@ -34,12 +34,8 @@ type operator struct {
Operation string
}
-func (k *operator) GetProp(data any) any {
- return data
-}
-
-func (k *operator) GetOperatorOrDefault(defaultOp string) string {
- return k.Operation
+func (k *operator) PropertyAndOperator(data any, defaultOp string) (any, string, error) {
+ return data, k.Operation, nil
}
func (k *operator) Equal(other FilterKey) bool {
diff --git a/internal/core/doc.go b/internal/core/doc.go
index 379ac79bf9..d8716346a0 100644
--- a/internal/core/doc.go
+++ b/internal/core/doc.go
@@ -298,3 +298,15 @@ func (mapping *DocumentMapping) TryToFindNameFromIndex(targetIndex int) (string,
return "", false
}
+
+// TryToFindIndexFromRenderKey returns the corresponding index of the given render key.
+//
+// Additionally, will also return true if the render key was found, and false otherwise.
+func (mapping *DocumentMapping) TryToFindIndexFromRenderKey(key string) (int, bool) {
+ for _, renderKey := range mapping.RenderKeys {
+ if renderKey.Key == key {
+ return renderKey.Index, true
+ }
+ }
+ return -1, false
+}
diff --git a/internal/planner/filter/copy_field.go b/internal/planner/filter/copy_field.go
index 838cdf4cf0..7254dc6cc3 100644
--- a/internal/planner/filter/copy_field.go
+++ b/internal/planner/filter/copy_field.go
@@ -62,10 +62,10 @@ func traverseFilterByProperty(
}
}
} else if opKey, isOpKey := targetKey.(*mapper.Operator); isOpKey {
- clauseArr, isArr := clause.([]any)
- if isArr {
+ switch t := clause.(type) {
+ case []any:
resultArr := make([]any, 0)
- for _, elementClause := range clauseArr {
+ for _, elementClause := range t {
elementMap, ok := elementClause.(map[connor.FilterKey]any)
if !ok {
continue
@@ -80,6 +80,14 @@ func traverseFilterByProperty(
} else if shouldDelete {
delete(result, opKey)
}
+
+ case map[connor.FilterKey]any:
+ resultMap := traverseFilterByProperty(keys, t, shouldDelete)
+ if len(resultMap) > 0 {
+ result[opKey] = resultMap
+ } else if shouldDelete {
+ delete(result, opKey)
+ }
}
}
}
diff --git a/internal/planner/filter/copy_field_test.go b/internal/planner/filter/copy_field_test.go
index d539e437e3..d86aa59531 100644
--- a/internal/planner/filter/copy_field_test.go
+++ b/internal/planner/filter/copy_field_test.go
@@ -46,6 +46,16 @@ func TestCopyField(t *testing.T) {
m("age", m("_gt", 55)),
),
},
+ {
+ name: "within _not",
+ inputFilter: m("_not",
+ m("age", m("_gt", 55)),
+ ),
+ inputField: []mapper.Field{{Index: authorAgeInd}},
+ expectedFilter: m("_not",
+ m("age", m("_gt", 55)),
+ ),
+ },
{
name: "within _or and _and",
inputFilter: r("_and",
diff --git a/internal/planner/mapper/errors.go b/internal/planner/mapper/errors.go
index 43f7f56a7a..6aa03758bd 100644
--- a/internal/planner/mapper/errors.go
+++ b/internal/planner/mapper/errors.go
@@ -15,6 +15,7 @@ import "github.com/sourcenetwork/defradb/errors"
const (
errInvalidFieldToGroupBy string = "invalid field value to groupBy"
errTypeNotFound string = "type not found"
+ errFieldOrAliasNotFound string = "field or alias not found"
)
var (
@@ -33,3 +34,7 @@ func NewErrInvalidFieldToGroupBy(field string) error {
func NewErrTypeNotFound(name string) error {
return errors.New(errTypeNotFound, errors.NewKV("Type", name))
}
+
+func NewErrFieldOrAliasNotFound(name string) error {
+ return errors.New(errFieldOrAliasNotFound, errors.NewKV("Name", name))
+}
diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go
index 4717b7cba0..15014fb9f4 100644
--- a/internal/planner/mapper/mapper.go
+++ b/internal/planner/mapper/mapper.go
@@ -992,6 +992,12 @@ func resolveInnerFilterDependencies(
newFields := []Requestable{}
for key, value := range source {
+ // alias fields are guarenteed to be resolved
+ // because they refer to existing fields
+ if key == request.FilterOpAlias {
+ continue
+ }
+
if key == request.FilterOpAnd || key == request.FilterOpOr {
if value == nil {
continue
@@ -1335,8 +1341,27 @@ func toFilterKeyValue(
sourceClause any,
mapping *core.DocumentMapping,
) (connor.FilterKey, any) {
+ var propIndex = -1
+ if mapping != nil {
+ // if we have a mapping available check if the
+ // source key is a field or alias (render key)
+ if indexes, ok := mapping.IndexesByName[sourceKey]; ok {
+ // If there are multiple properties of the same name we can just take the first as
+ // we have no other reasonable way of identifying which property they mean if multiple
+ // consumer specified requestables are available. Aggregate dependencies should not
+ // impact this as they are added after selects.
+ propIndex = indexes[0]
+ } else if index, ok := mapping.TryToFindIndexFromRenderKey(sourceKey); ok {
+ propIndex = index
+ }
+ }
+
var returnKey connor.FilterKey
- if strings.HasPrefix(sourceKey, "_") && sourceKey != request.DocIDFieldName {
+ if propIndex >= 0 {
+ returnKey = &PropertyIndex{
+ Index: propIndex,
+ }
+ } else if strings.HasPrefix(sourceKey, "_") {
returnKey = &Operator{
Operation: sourceKey,
}
@@ -1345,14 +1370,6 @@ func toFilterKeyValue(
if connor.IsOpSimple(sourceKey) {
return returnKey, sourceClause
}
- } else if mapping != nil && len(mapping.IndexesByName[sourceKey]) > 0 {
- // If there are multiple properties of the same name we can just take the first as
- // we have no other reasonable way of identifying which property they mean if multiple
- // consumer specified requestables are available. Aggregate dependencies should not
- // impact this as they are added after selects.
- returnKey = &PropertyIndex{
- Index: mapping.FirstIndexOfName(sourceKey),
- }
} else {
returnKey = &ObjectProperty{
Name: sourceKey,
diff --git a/internal/planner/mapper/targetable.go b/internal/planner/mapper/targetable.go
index 2611d297dc..55bc256327 100644
--- a/internal/planner/mapper/targetable.go
+++ b/internal/planner/mapper/targetable.go
@@ -30,16 +30,11 @@ type PropertyIndex struct {
Index int
}
-func (k *PropertyIndex) GetProp(data any) any {
+func (k *PropertyIndex) PropertyAndOperator(data any, defaultOp string) (any, string, error) {
if data == nil {
- return nil
+ return nil, defaultOp, nil
}
-
- return data.(core.Doc).Fields[k.Index]
-}
-
-func (k *PropertyIndex) GetOperatorOrDefault(defaultOp string) string {
- return defaultOp
+ return data.(core.Doc).Fields[k.Index], defaultOp, nil
}
func (k *PropertyIndex) Equal(other connor.FilterKey) bool {
@@ -57,12 +52,8 @@ type Operator struct {
Operation string
}
-func (k *Operator) GetProp(data any) any {
- return data
-}
-
-func (k *Operator) GetOperatorOrDefault(defaultOp string) string {
- return k.Operation
+func (k *Operator) PropertyAndOperator(data any, defaultOp string) (any, string, error) {
+ return data, k.Operation, nil
}
func (k *Operator) Equal(other connor.FilterKey) bool {
@@ -81,16 +72,15 @@ type ObjectProperty struct {
Name string
}
-func (k *ObjectProperty) GetProp(data any) any {
+func (k *ObjectProperty) PropertyAndOperator(data any, defaultOp string) (any, string, error) {
if data == nil {
- return nil
+ return nil, defaultOp, nil
}
- object := data.(map[string]any)
- return object[k.Name]
-}
-
-func (k *ObjectProperty) GetOperatorOrDefault(defaultOp string) string {
- return defaultOp
+ docMap, ok := data.(map[string]any)
+ if !ok {
+ return nil, defaultOp, NewErrFieldOrAliasNotFound(k.Name)
+ }
+ return docMap[k.Name], defaultOp, nil
}
func (k *ObjectProperty) Equal(other connor.FilterKey) bool {
@@ -165,7 +155,7 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a
logicMapEntries[i] = filterObjectToMap(mapping, itemMap)
}
outmap[keyType.Operation] = logicMapEntries
- case request.FilterOpNot:
+ case request.FilterOpNot, request.FilterOpAlias:
itemMap, ok := v.(map[connor.FilterKey]any)
if ok {
outmap[keyType.Operation] = filterObjectToMap(mapping, itemMap)
diff --git a/internal/request/graphql/parser/filter.go b/internal/request/graphql/parser/filter.go
index 40d4a798f4..1995eeb58b 100644
--- a/internal/request/graphql/parser/filter.go
+++ b/internal/request/graphql/parser/filter.go
@@ -93,20 +93,20 @@ func parseFilterFieldsForDescriptionMap(
fields := make([]client.FieldDefinition, 0)
for k, v := range conditions {
switch k {
- case "_or", "_and":
+ case request.FilterOpOr, request.FilterOpAnd:
conds := v.([]any)
- parsedFileds, err := parseFilterFieldsForDescriptionSlice(conds, col)
+ parsedFields, err := parseFilterFieldsForDescriptionSlice(conds, col)
if err != nil {
return nil, err
}
- fields = append(fields, parsedFileds...)
- case "_not":
+ fields = append(fields, parsedFields...)
+ case request.FilterOpNot, request.FilterOpAlias:
conds := v.(map[string]any)
- parsedFileds, err := parseFilterFieldsForDescriptionMap(conds, col)
+ parsedFields, err := parseFilterFieldsForDescriptionMap(conds, col)
if err != nil {
return nil, err
}
- fields = append(fields, parsedFileds...)
+ fields = append(fields, parsedFields...)
default:
f, found := col.GetFieldByName(k)
if !found || f.Kind.IsObject() {
diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go
index 254fae6e7d..608c83e381 100644
--- a/internal/request/graphql/schema/generate.go
+++ b/internal/request/graphql/schema/generate.go
@@ -1188,18 +1188,22 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject {
func() (gql.InputObjectConfigFieldMap, error) {
fields := gql.InputObjectConfigFieldMap{}
- fields["_and"] = &gql.InputObjectFieldConfig{
+ fields[request.FilterOpAnd] = &gql.InputObjectFieldConfig{
Description: schemaTypes.AndOperatorDescription,
Type: gql.NewList(gql.NewNonNull(selfRefType)),
}
- fields["_or"] = &gql.InputObjectFieldConfig{
+ fields[request.FilterOpOr] = &gql.InputObjectFieldConfig{
Description: schemaTypes.OrOperatorDescription,
Type: gql.NewList(gql.NewNonNull(selfRefType)),
}
- fields["_not"] = &gql.InputObjectFieldConfig{
+ fields[request.FilterOpNot] = &gql.InputObjectFieldConfig{
Description: schemaTypes.NotOperatorDescription,
Type: selfRefType,
}
+ fields[request.FilterOpAlias] = &gql.InputObjectFieldConfig{
+ Description: "The alias operator allows filters to target aliased fields.",
+ Type: schemaTypes.JSONScalarType(),
+ }
// generate basic filter operator blocks
for f, field := range obj.Fields() {
diff --git a/tests/integration/query/one_to_many/with_count_test.go b/tests/integration/query/one_to_many/with_count_test.go
index 2b4e8a5fbe..77d4e754f3 100644
--- a/tests/integration/query/one_to_many/with_count_test.go
+++ b/tests/integration/query/one_to_many/with_count_test.go
@@ -118,3 +118,66 @@ func TestQueryOneToManyWithCount(t *testing.T) {
executeTestCase(t, test)
}
}
+
+// This test documents the behavior of aggregate alias targeting which is not yet implemented.
+// https://github.com/sourcenetwork/defradb/issues/3195
+func TestQueryOneToMany_WithCountAliasFilter_ShouldFilterAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with count",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {publishedCount: {_gt: 0}}}) {
+ name
+ publishedCount: _count(published: {})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{},
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/one_to_many/with_filter_test.go b/tests/integration/query/one_to_many/with_filter_test.go
index 317d89b2fd..e507b43a78 100644
--- a/tests/integration/query/one_to_many/with_filter_test.go
+++ b/tests/integration/query/one_to_many/with_filter_test.go
@@ -562,3 +562,89 @@ func TestQueryOneToMany_WithCompoundOperatorInFilterAndRelationAndCaseInsensitiv
}
testUtils.ExecuteTestCase(t, test)
}
+
+func TestQueryOneToMany_WithAliasFilterOnRelated_Succeeds(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from the many side, alias filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: bookAuthorGQLSchema,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ // bae-be6d8024-4953-5a92-84b4-f042d25230c6
+ Doc: `{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": "bae-72e8c691-9f20-55e7-9228-8af1cf54cace"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ // bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ // bae-72e8c691-9f20-55e7-9228-8af1cf54cace
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {books: {rating: {_gt: 4.8}}}}) {
+ name
+ age
+ books: published {
+ name
+ rating
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "age": int64(65),
+ "books": []map[string]any{
+ {
+ "name": "Painted House",
+ "rating": 4.9,
+ },
+ {
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_filter/with_alias_test.go b/tests/integration/query/simple/with_filter/with_alias_test.go
new file mode 100644
index 0000000000..a3e2c920be
--- /dev/null
+++ b/tests/integration/query/simple/with_filter/with_alias_test.go
@@ -0,0 +1,320 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package simple
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQuerySimple_WithAliasEqualsFilterBlock_ShouldFilter(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with alias filter(age)",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {_alias: {UserAge: {_eq: 21}}}) {
+ Name
+ UserAge: Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "John",
+ "UserAge": int64(21),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithEmptyAlias_ShouldNotFilter(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with empty alias filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {_alias: {}}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "John",
+ "Age": int64(21),
+ },
+ {
+ "Name": "Bob",
+ "Age": int64(32),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithNullAlias_ShouldFilterAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with null alias filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {_alias: null}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{},
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithNonObjectAlias_ShouldFilterAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with non object alias filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {_alias: 1}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{},
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithNonExistantAlias_ShouldReturnError(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with non existant alias filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {_alias: {UserAge: {_eq: 21}}}) {
+ Name
+ Age
+ }
+ }`,
+ ExpectedError: `field or alias not found. Name: UserAge`,
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithNonAliasedField_ShouldMatchFilter(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with non aliased filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {_alias: {Age: {_eq: 32}}}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Bob",
+ "Age": int64(32),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithCompoundAlias_ShouldMatchFilter(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with compound alias filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {
+ _and: [
+ {_alias: {userAge: {_gt: 30}}},
+ {_alias: {userAge: {_lt: 40}}}
+ ]
+ }) {
+ Name
+ userAge: Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Bob",
+ "userAge": int64(32),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithAliasWithCompound_ShouldMatchFilter(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with alias with compound filter",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(filter: {
+ _alias: {
+ _and: [
+ {userAge: {_gt: 30}},
+ {userAge: {_lt: 40}}
+ ]
+ }
+ }) {
+ Name
+ userAge: Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Bob",
+ "userAge": int64(32),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/schema/aggregates/inline_array_test.go b/tests/integration/schema/aggregates/inline_array_test.go
index 067f17c6ed..03ad59aa75 100644
--- a/tests/integration/schema/aggregates/inline_array_test.go
+++ b/tests/integration/schema/aggregates/inline_array_test.go
@@ -405,6 +405,12 @@ func aggregateGroupArg(fieldType string) map[string]any {
"name": fieldType + "ListOperatorBlock",
},
},
+ map[string]any{
+ "name": "_alias",
+ "type": map[string]any{
+ "name": "JSON",
+ },
+ },
map[string]any{
"name": "_and",
"type": map[string]any{
diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go
index 18c09975a2..1f71f6bc2f 100644
--- a/tests/integration/schema/default_fields.go
+++ b/tests/integration/schema/default_fields.go
@@ -228,6 +228,7 @@ func buildFilterArg(objectName string, fields []argDef) Field {
filterArgName := objectName + "FilterArg"
inputFields := []any{
+ makeInputObject("_alias", "JSON", nil),
makeInputObject("_and", nil, map[string]any{
"kind": "NON_NULL",
"name": nil,
diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go
index a48ac2e296..5b6fcb2c74 100644
--- a/tests/integration/schema/filter_test.go
+++ b/tests/integration/schema/filter_test.go
@@ -66,6 +66,13 @@ func TestFilterForSimpleSchema(t *testing.T) {
"type": map[string]any{
"name": "UsersFilterArg",
"inputFields": []any{
+ map[string]any{
+ "name": "_alias",
+ "type": map[string]any{
+ "name": "JSON",
+ "ofType": nil,
+ },
+ },
map[string]any{
"name": "_and",
"type": map[string]any{
@@ -198,6 +205,13 @@ func TestFilterForOneToOneSchema(t *testing.T) {
"type": map[string]any{
"name": "BookFilterArg",
"inputFields": []any{
+ map[string]any{
+ "name": "_alias",
+ "type": map[string]any{
+ "name": "JSON",
+ "ofType": nil,
+ },
+ },
map[string]any{
"name": "_and",
"type": map[string]any{
@@ -356,6 +370,13 @@ func TestSchemaFilterInputs_WithJSONField_Succeeds(t *testing.T) {
"type": map[string]any{
"name": "UsersFilterArg",
"inputFields": []any{
+ map[string]any{
+ "name": "_alias",
+ "type": map[string]any{
+ "name": "JSON",
+ "ofType": nil,
+ },
+ },
map[string]any{
"name": "_and",
"type": map[string]any{
From 6d6c9f2523f90a796f98c75d9bedaa6750639a15 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Wed, 6 Nov 2024 18:05:53 -0500
Subject: [PATCH 16/47] refactor: Rework core.Spans (#3210)
## Relevant issue(s)
Resolves #3209
## Description
Simplifies core.Spans a little bit, before removing some old hacky code
allowing a nicer way of handling headstore keys within the
planner/fetcher system.
---
internal/core/data.go | 80 +++----
internal/core/data_test.go | 200 +++++++++---------
internal/db/collection_get.go | 2 +-
internal/db/collection_index.go | 2 +-
internal/db/fetcher/dag.go | 28 +--
internal/db/fetcher/errors.go | 63 +++---
internal/db/fetcher/fetcher.go | 39 ++--
internal/db/fetcher/indexer.go | 8 +-
internal/db/fetcher/mocks/fetcher.go | 34 ++-
internal/db/fetcher/versioned.go | 48 ++---
internal/db/fetcher_test.go | 3 +-
internal/keys/datastore_doc.go | 4 +-
internal/keys/headstore_doc.go | 21 +-
internal/keys/key.go | 17 ++
internal/lens/fetcher.go | 4 +-
internal/planner/arbitrary_join.go | 2 +-
internal/planner/average.go | 10 +-
internal/planner/commit.go | 58 +++--
internal/planner/count.go | 2 +-
internal/planner/create.go | 6 +-
internal/planner/delete.go | 2 +-
internal/planner/group.go | 2 +-
internal/planner/lens.go | 2 +-
internal/planner/limit.go | 8 +-
internal/planner/max.go | 14 +-
internal/planner/min.go | 14 +-
internal/planner/multi.go | 4 +-
internal/planner/operation.go | 2 +-
internal/planner/order.go | 2 +-
internal/planner/pipe.go | 8 +-
internal/planner/planner.go | 2 +-
internal/planner/scan.go | 23 +-
internal/planner/select.go | 23 +-
internal/planner/sum.go | 2 +-
internal/planner/top.go | 2 +-
internal/planner/type_join.go | 6 +-
internal/planner/update.go | 2 +-
internal/planner/upsert.go | 2 +-
internal/planner/values.go | 6 +-
internal/planner/view.go | 4 +-
.../explain/default/delete_test.go | 7 +-
41 files changed, 379 insertions(+), 389 deletions(-)
diff --git a/internal/core/data.go b/internal/core/data.go
index 122d255c5f..d84186826c 100644
--- a/internal/core/data.go
+++ b/internal/core/data.go
@@ -17,40 +17,22 @@ import (
)
// Span is a range of keys from [Start, End).
-type Span interface {
- // Start returns the starting key of the Span.
- Start() keys.DataStoreKey
- // End returns the ending key of the Span.
- End() keys.DataStoreKey
- // Compare returns -1 if the provided span is less, 0 if it is equal, and 1 if its greater.
- Compare(Span) SpanComparisonResult
-}
+type Span struct {
+ // Start represents the starting key of the Span.
+ Start keys.Walkable
-type span struct {
- start keys.DataStoreKey
- end keys.DataStoreKey
+ // End represents the ending key of the Span.
+ End keys.Walkable
}
-var _ Span = span{}
-
// NewSpan creates a new Span from the provided start and end keys.
-func NewSpan(start, end keys.DataStoreKey) Span {
- return span{
- start: start,
- end: end,
+func NewSpan(start, end keys.Walkable) Span {
+ return Span{
+ Start: start,
+ End: end,
}
}
-// Start returns the starting key of the Span.
-func (s span) Start() keys.DataStoreKey {
- return s.start
-}
-
-// End returns the ending key of the Span.
-func (s span) End() keys.DataStoreKey {
- return s.end
-}
-
// SpanComparisonResult is the result of comparing two spans.
type SpanComparisonResult uint
@@ -73,18 +55,18 @@ const (
// Compares two spans returning how the compare to each other.
// If the end of one span is adjacent to the other (with no gap possible)
// then those ends are considered equal.
-func (this span) Compare(other Span) SpanComparisonResult {
+func (this Span) Compare(other Span) SpanComparisonResult {
if this == other {
return Equal
}
- thisStart := this.start.ToString()
- thisEnd := this.end.ToString()
- otherStart := other.Start().ToString()
- otherEnd := other.End().ToString()
+ thisStart := this.Start.ToString()
+ thisEnd := this.End.ToString()
+ otherStart := other.Start.ToString()
+ otherEnd := other.End.ToString()
if thisStart < otherStart {
- if thisEnd == otherStart || isAdjacent(this.end, other.Start()) {
+ if thisEnd == otherStart || isAdjacent(this.End, other.Start) {
return StartBeforeEndEqualToStart
}
@@ -133,33 +115,19 @@ func (this span) Compare(other Span) SpanComparisonResult {
}
}
- if thisStart == otherEnd || isAdjacent(this.start, other.End()) {
+ if thisStart == otherEnd || isAdjacent(this.Start, other.End) {
return StartEqualToEndEndAfter
}
return After
}
-func isAdjacent(this keys.DataStoreKey, other keys.DataStoreKey) bool {
+func isAdjacent(this keys.Walkable, other keys.Walkable) bool {
return len(this.ToString()) == len(other.ToString()) &&
(this.PrefixEnd().ToString() == other.ToString() ||
this.ToString() == other.PrefixEnd().ToString())
}
-// Spans is a collection of individual spans.
-type Spans struct {
- HasValue bool
- Value []Span
-}
-
-// NewSpans creates a new Spans from the provided spans.
-func NewSpans(spans ...Span) Spans {
- return Spans{
- HasValue: true,
- Value: spans,
- }
-}
-
// Merges an unordered, potentially overlapping and/or duplicated collection of Spans into
// a unique set in ascending order, where overlapping spans are merged into a single span.
// Will handle spans with keys of different lengths, where one might be a prefix of another.
@@ -186,7 +154,7 @@ func MergeAscending(spans []Span) []Span {
}
// Then we insert
- newArray[i] = NewSpan(span.Start(), span.End())
+ newArray[i] = NewSpan(span.Start, span.End)
// Move the values prior to the new one across
for j := 0; j < i; j++ {
@@ -197,12 +165,12 @@ func MergeAscending(spans []Span) []Span {
// Exit the unique-span loop, this span has been handled
i = len(uniqueSpans)
case StartBeforeEndEqualToStart, StartBeforeEndWithin, StartBeforeEndEqual:
- uniqueSpans[i] = NewSpan(span.Start(), uniqueSpan.End())
+ uniqueSpans[i] = NewSpan(span.Start, uniqueSpan.End)
uniqueSpanFound = true
i++
case StartBeforeEndAfter:
- uniqueSpans = removeBefore(uniqueSpans, i, span.End().ToString())
- uniqueSpans[i] = NewSpan(span.Start(), span.End())
+ uniqueSpans = removeBefore(uniqueSpans, i, span.End.ToString())
+ uniqueSpans[i] = NewSpan(span.Start, span.End)
uniqueSpanFound = true
// Exit the unique-span loop, this span has been handled
i = len(uniqueSpans)
@@ -211,8 +179,8 @@ func MergeAscending(spans []Span) []Span {
// Do nothing, span is contained within an existing unique-span
i = len(uniqueSpans)
case StartEqualEndAfter, StartWithinEndAfter, StartEqualToEndEndAfter:
- uniqueSpans = removeBefore(uniqueSpans, i, span.End().ToString())
- uniqueSpans[i] = NewSpan(uniqueSpan.Start(), span.End())
+ uniqueSpans = removeBefore(uniqueSpans, i, span.End.ToString())
+ uniqueSpans[i] = NewSpan(uniqueSpan.Start, span.End)
uniqueSpanFound = true
// Exit the unique-span loop, this span has been handled
i = len(uniqueSpans)
@@ -234,7 +202,7 @@ func MergeAscending(spans []Span) []Span {
func removeBefore(spans []Span, startIndex int, end string) []Span {
indexOfLastMatchingItem := -1
for i := startIndex; i < len(spans); i++ {
- if spans[i].End().ToString() <= end {
+ if spans[i].End.ToString() <= end {
indexOfLastMatchingItem = i
}
}
diff --git a/internal/core/data_test.go b/internal/core/data_test.go
index d55851b795..154441819e 100644
--- a/internal/core/data_test.go
+++ b/internal/core/data_test.go
@@ -34,8 +34,8 @@ func TestMergeAscending_ReturnsSingle_GivenSingle(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *testing.T) {
@@ -52,10 +52,10 @@ func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *test
result := MergeAscending(input)
assert.Len(t, result, 2)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end2, result[0].End())
- assert.Equal(t, start1, result[1].Start())
- assert.Equal(t, end1, result[1].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
+ assert.Equal(t, start1, result[1].Start)
+ assert.Equal(t, end1, result[1].End)
}
func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T) {
@@ -75,13 +75,13 @@ func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T)
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
// Span 3 should be returned between one and two
- assert.Equal(t, start3, result[1].Start())
- assert.Equal(t, end3, result[1].End())
- assert.Equal(t, start2, result[2].Start())
- assert.Equal(t, end2, result[2].End())
+ assert.Equal(t, start3, result[1].Start)
+ assert.Equal(t, end3, result[1].End)
+ assert.Equal(t, start2, result[2].Start)
+ assert.Equal(t, end2, result[2].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing.T) {
@@ -97,8 +97,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *testing.T) {
@@ -114,8 +114,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *test
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) {
@@ -131,8 +131,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T) {
@@ -148,8 +148,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T)
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *testing.T) {
@@ -165,8 +165,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *test
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *testing.T) {
@@ -182,8 +182,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *te
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) {
@@ -199,8 +199,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *testing.T) {
@@ -216,8 +216,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *te
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *testing.T) {
@@ -233,8 +233,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *t
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) {
@@ -250,8 +250,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t *testing.T) {
@@ -267,8 +267,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t *
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *testing.T) {
@@ -284,8 +284,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *t
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start2, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t *testing.T) {
@@ -310,13 +310,13 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
// Spans 2 and 3 are within span 5
- assert.Equal(t, start5, result[1].Start())
- assert.Equal(t, end5, result[1].End())
- assert.Equal(t, start4, result[2].Start())
- assert.Equal(t, end4, result[2].End())
+ assert.Equal(t, start5, result[1].Start)
+ assert.Equal(t, end5, result[1].End)
+ assert.Equal(t, start4, result[2].Start)
+ assert.Equal(t, end4, result[2].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) {
@@ -332,8 +332,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testing.T) {
@@ -349,8 +349,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testi
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) {
@@ -364,8 +364,8 @@ func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) {
@@ -381,8 +381,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *testing.T) {
@@ -398,8 +398,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *test
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) {
@@ -415,8 +415,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) {
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *testing.T) {
@@ -432,8 +432,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *te
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *testing.T) {
@@ -449,8 +449,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *tes
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCoveringMiddleSpans(
@@ -477,13 +477,13 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCove
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
// Spans 2 and 3 are within span 5
- assert.Equal(t, start5, result[1].Start())
- assert.Equal(t, end5, result[1].End())
- assert.Equal(t, start4, result[2].Start())
- assert.Equal(t, end4, result[2].End())
+ assert.Equal(t, start5, result[1].Start)
+ assert.Equal(t, end5, result[1].End)
+ assert.Equal(t, start4, result[2].Start)
+ assert.Equal(t, end4, result[2].End)
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCoveringMiddleSpans(
@@ -510,12 +510,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCov
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
- assert.Equal(t, start2, result[1].Start())
- assert.Equal(t, end5, result[1].End())
- assert.Equal(t, start4, result[2].Start())
- assert.Equal(t, end4, result[2].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
+ assert.Equal(t, start2, result[1].Start)
+ assert.Equal(t, end5, result[1].End)
+ assert.Equal(t, start4, result[2].Start)
+ assert.Equal(t, end4, result[2].End)
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpanCoveringMiddleSpans(
@@ -542,12 +542,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpa
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
- assert.Equal(t, start2, result[1].Start())
- assert.Equal(t, end5, result[1].End())
- assert.Equal(t, start4, result[2].Start())
- assert.Equal(t, end4, result[2].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
+ assert.Equal(t, start2, result[1].Start)
+ assert.Equal(t, end5, result[1].End)
+ assert.Equal(t, start4, result[2].Start)
+ assert.Equal(t, end4, result[2].End)
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndEndAfterSpanCoveringMiddleSpans(
@@ -574,12 +574,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndE
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
- assert.Equal(t, start2, result[1].Start())
- assert.Equal(t, end5, result[1].End())
- assert.Equal(t, start4, result[2].Start())
- assert.Equal(t, end4, result[2].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
+ assert.Equal(t, start2, result[1].Start)
+ assert.Equal(t, end5, result[1].End)
+ assert.Equal(t, start4, result[2].Start)
+ assert.Equal(t, end4, result[2].End)
}
func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEndAfterSpanCoveringMiddleSpans(
@@ -606,12 +606,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEn
result := MergeAscending(input)
assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
- assert.Equal(t, start2, result[1].Start())
- assert.Equal(t, end5, result[1].End())
- assert.Equal(t, start4, result[2].Start())
- assert.Equal(t, end4, result[2].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
+ assert.Equal(t, start2, result[1].Start)
+ assert.Equal(t, end5, result[1].End)
+ assert.Equal(t, start4, result[2].Start)
+ assert.Equal(t, end4, result[2].End)
}
func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T) {
@@ -627,10 +627,10 @@ func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T)
result := MergeAscending(input)
assert.Len(t, result, 2)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end1, result[0].End())
- assert.Equal(t, start2, result[1].Start())
- assert.Equal(t, end2, result[1].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end1, result[0].End)
+ assert.Equal(t, start2, result[1].Start)
+ assert.Equal(t, end2, result[1].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t *testing.T) {
@@ -646,8 +646,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t *
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentAndAfter(
@@ -665,8 +665,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentA
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t *testing.T) {
@@ -682,8 +682,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t *
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndAfterEndEndAfter(t *testing.T) {
@@ -699,6 +699,6 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndAfterEndEndAfter(t *t
result := MergeAscending(input)
assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start())
- assert.Equal(t, end2, result[0].End())
+ assert.Equal(t, start1, result[0].Start)
+ assert.Equal(t, end2, result[0].End)
}
diff --git a/internal/db/collection_get.go b/internal/db/collection_get.go
index f2db5f0f8c..e68df05df4 100644
--- a/internal/db/collection_get.go
+++ b/internal/db/collection_get.go
@@ -73,7 +73,7 @@ func (c *collection) get(
// construct target DS key from DocID.
targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(c.Description(), primaryKey.DocID)
// run the doc fetcher
- err = df.Start(ctx, core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd())))
+ err = df.Start(ctx, core.NewSpan(targetKey, targetKey.PrefixEnd()))
if err != nil {
_ = df.Close()
return nil, err
diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go
index b1baad8369..f268e14f2e 100644
--- a/internal/db/collection_index.go
+++ b/internal/db/collection_index.go
@@ -317,7 +317,7 @@ func (c *collection) iterateAllDocs(
return errors.Join(err, df.Close())
}
start := base.MakeDataStoreKeyWithCollectionDescription(c.Description())
- spans := core.NewSpans(core.NewSpan(start, start.PrefixEnd()))
+ spans := core.NewSpan(start, start.PrefixEnd())
err = df.Start(ctx, spans)
if err != nil {
diff --git a/internal/db/fetcher/dag.go b/internal/db/fetcher/dag.go
index 395354fc08..723b821a97 100644
--- a/internal/db/fetcher/dag.go
+++ b/internal/db/fetcher/dag.go
@@ -12,21 +12,17 @@ package fetcher
import (
"context"
- "sort"
- "strings"
"github.com/ipfs/go-cid"
dsq "github.com/ipfs/go-datastore/query"
"github.com/sourcenetwork/immutable"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/keys"
)
// HeadFetcher is a utility to incrementally fetch all the MerkleCRDT heads of a given doc/field.
type HeadFetcher struct {
- spans core.Spans
fieldId immutable.Option[string]
kvIter dsq.Results
@@ -35,33 +31,13 @@ type HeadFetcher struct {
func (hf *HeadFetcher) Start(
ctx context.Context,
txn datastore.Txn,
- spans core.Spans,
+ prefix keys.HeadStoreKey,
fieldId immutable.Option[string],
) error {
- if len(spans.Value) == 0 {
- spans = core.NewSpans(
- core.NewSpan(
- keys.DataStoreKey{},
- keys.DataStoreKey{}.PrefixEnd(),
- ),
- )
- }
-
- if len(spans.Value) > 1 {
- // if we have multiple spans, we need to sort them by their start position
- // so we can do a single iterative sweep
- sort.Slice(spans.Value, func(i, j int) bool {
- // compare by strings if i < j.
- // apply the '!= df.reverse' to reverse the sort
- // if we need to
- return (strings.Compare(spans.Value[i].Start().ToString(), spans.Value[j].Start().ToString()) < 0)
- })
- }
- hf.spans = spans
hf.fieldId = fieldId
q := dsq.Query{
- Prefix: hf.spans.Value[0].Start().ToString(),
+ Prefix: prefix.ToString(),
Orders: []dsq.Order{dsq.OrderByKey{}},
}
diff --git a/internal/db/fetcher/errors.go b/internal/db/fetcher/errors.go
index 22f0c8b182..0a8a7d21b4 100644
--- a/internal/db/fetcher/errors.go
+++ b/internal/db/fetcher/errors.go
@@ -18,39 +18,37 @@ import (
)
const (
- errFieldIdNotFound string = "unable to find SchemaFieldDescription for given FieldId"
- errFailedToDecodeCIDForVFetcher string = "failed to decode CID for VersionedFetcher"
- errFailedToSeek string = "seek failed"
- errFailedToMergeState string = "failed merging state"
- errVFetcherFailedToFindBlock string = "(version fetcher) failed to find block in blockstore"
- errVFetcherFailedToGetBlock string = "(version fetcher) failed to get block in blockstore"
- errVFetcherFailedToWriteBlock string = "(version fetcher) failed to write block to blockstore"
- errVFetcherFailedToDecodeNode string = "(version fetcher) failed to decode protobuf"
- errVFetcherFailedToGetDagLink string = "(version fetcher) failed to get node link from DAG"
- errFailedToGetDagNode string = "failed to get DAG Node"
- errMissingMapper string = "missing document mapper"
- errInvalidInOperatorValue string = "invalid _in/_nin value"
- errInvalidFilterOperator string = "invalid filter operator is provided"
- errNotSupportedKindByIndex string = "kind is not supported by index"
- errUnexpectedTypeValue string = "unexpected type value"
+ errFieldIdNotFound string = "unable to find SchemaFieldDescription for given FieldId"
+ errFailedToSeek string = "seek failed"
+ errFailedToMergeState string = "failed merging state"
+ errVFetcherFailedToFindBlock string = "(version fetcher) failed to find block in blockstore"
+ errVFetcherFailedToGetBlock string = "(version fetcher) failed to get block in blockstore"
+ errVFetcherFailedToWriteBlock string = "(version fetcher) failed to write block to blockstore"
+ errVFetcherFailedToDecodeNode string = "(version fetcher) failed to decode protobuf"
+ errVFetcherFailedToGetDagLink string = "(version fetcher) failed to get node link from DAG"
+ errFailedToGetDagNode string = "failed to get DAG Node"
+ errMissingMapper string = "missing document mapper"
+ errInvalidInOperatorValue string = "invalid _in/_nin value"
+ errInvalidFilterOperator string = "invalid filter operator is provided"
+ errNotSupportedKindByIndex string = "kind is not supported by index"
+ errUnexpectedTypeValue string = "unexpected type value"
)
var (
- ErrFieldIdNotFound = errors.New(errFieldIdNotFound)
- ErrFailedToDecodeCIDForVFetcher = errors.New(errFailedToDecodeCIDForVFetcher)
- ErrFailedToSeek = errors.New(errFailedToSeek)
- ErrFailedToMergeState = errors.New(errFailedToMergeState)
- ErrVFetcherFailedToFindBlock = errors.New(errVFetcherFailedToFindBlock)
- ErrVFetcherFailedToGetBlock = errors.New(errVFetcherFailedToGetBlock)
- ErrVFetcherFailedToWriteBlock = errors.New(errVFetcherFailedToWriteBlock)
- ErrVFetcherFailedToDecodeNode = errors.New(errVFetcherFailedToDecodeNode)
- ErrVFetcherFailedToGetDagLink = errors.New(errVFetcherFailedToGetDagLink)
- ErrFailedToGetDagNode = errors.New(errFailedToGetDagNode)
- ErrMissingMapper = errors.New(errMissingMapper)
- ErrSingleSpanOnly = errors.New("spans must contain only a single entry")
- ErrInvalidInOperatorValue = errors.New(errInvalidInOperatorValue)
- ErrInvalidFilterOperator = errors.New(errInvalidFilterOperator)
- ErrUnexpectedTypeValue = errors.New(errUnexpectedTypeValue)
+ ErrFieldIdNotFound = errors.New(errFieldIdNotFound)
+ ErrFailedToSeek = errors.New(errFailedToSeek)
+ ErrFailedToMergeState = errors.New(errFailedToMergeState)
+ ErrVFetcherFailedToFindBlock = errors.New(errVFetcherFailedToFindBlock)
+ ErrVFetcherFailedToGetBlock = errors.New(errVFetcherFailedToGetBlock)
+ ErrVFetcherFailedToWriteBlock = errors.New(errVFetcherFailedToWriteBlock)
+ ErrVFetcherFailedToDecodeNode = errors.New(errVFetcherFailedToDecodeNode)
+ ErrVFetcherFailedToGetDagLink = errors.New(errVFetcherFailedToGetDagLink)
+ ErrFailedToGetDagNode = errors.New(errFailedToGetDagNode)
+ ErrMissingMapper = errors.New(errMissingMapper)
+ ErrSingleSpanOnly = errors.New("spans must contain only a single entry")
+ ErrInvalidInOperatorValue = errors.New(errInvalidInOperatorValue)
+ ErrInvalidFilterOperator = errors.New(errInvalidFilterOperator)
+ ErrUnexpectedTypeValue = errors.New(errUnexpectedTypeValue)
)
// NewErrFieldIdNotFound returns an error indicating that the given FieldId was not found.
@@ -58,11 +56,6 @@ func NewErrFieldIdNotFound(fieldId uint32) error {
return errors.New(errFieldIdNotFound, errors.NewKV("FieldId", fieldId))
}
-// NewErrFailedToDecodeCIDForVFetcher returns an error indicating that the given CID could not be decoded.
-func NewErrFailedToDecodeCIDForVFetcher(inner error) error {
- return errors.Wrap(errFailedToDecodeCIDForVFetcher, inner)
-}
-
// NewErrFailedToSeek returns an error indicating that the given target could not be seeked to.
func NewErrFailedToSeek(target any, inner error) error {
return errors.Wrap(errFailedToSeek, inner, errors.NewKV("Target", target))
diff --git a/internal/db/fetcher/fetcher.go b/internal/db/fetcher/fetcher.go
index 62a03a4d17..0ca828c4b2 100644
--- a/internal/db/fetcher/fetcher.go
+++ b/internal/db/fetcher/fetcher.go
@@ -72,7 +72,7 @@ type Fetcher interface {
reverse bool,
showDeleted bool,
) error
- Start(ctx context.Context, spans core.Spans) error
+ Start(ctx context.Context, spans ...core.Span) error
FetchNext(ctx context.Context) (EncodedDocument, ExecInfo, error)
Close() error
}
@@ -98,7 +98,7 @@ type DocumentFetcher struct {
deletedDocs bool
txn datastore.Txn
- spans core.Spans
+ spans []core.Span
order []dsq.Order
curSpanIndex int
@@ -243,7 +243,7 @@ func (df *DocumentFetcher) init(
return nil
}
-func (df *DocumentFetcher) Start(ctx context.Context, spans core.Spans) error {
+func (df *DocumentFetcher) Start(ctx context.Context, spans ...core.Span) error {
err := df.start(ctx, spans, false)
if err != nil {
return err
@@ -257,7 +257,7 @@ func (df *DocumentFetcher) Start(ctx context.Context, spans core.Spans) error {
}
// Start implements DocumentFetcher.
-func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDeleted bool) error {
+func (df *DocumentFetcher) start(ctx context.Context, spans []core.Span, withDeleted bool) error {
if df.col == nil {
return client.NewErrUninitializeProperty("DocumentFetcher", "CollectionDescription")
}
@@ -267,22 +267,31 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele
df.deletedDocs = withDeleted
- if !spans.HasValue { // no specified spans so create a prefix scan key for the entire collection
+ if len(spans) == 0 { // no specified spans so create a prefix scan key for the entire collection
start := base.MakeDataStoreKeyWithCollectionDescription(df.col.Description())
if withDeleted {
start = start.WithDeletedFlag()
} else {
start = start.WithValueFlag()
}
- df.spans = core.NewSpans(core.NewSpan(start, start.PrefixEnd()))
+ df.spans = []core.Span{core.NewSpan(start, start.PrefixEnd())}
} else {
- valueSpans := make([]core.Span, len(spans.Value))
- for i, span := range spans.Value {
- // We can only handle value keys, so here we ensure we only read value keys
+ valueSpans := make([]core.Span, len(spans))
+ for i, span := range spans {
if withDeleted {
- valueSpans[i] = core.NewSpan(span.Start().WithDeletedFlag(), span.End().WithDeletedFlag())
+ // DocumentFetcher only ever recieves document keys
+ //nolint:forcetypeassert
+ valueSpans[i] = core.NewSpan(
+ span.Start.(keys.DataStoreKey).WithDeletedFlag(),
+ span.End.(keys.DataStoreKey).WithDeletedFlag(),
+ )
} else {
- valueSpans[i] = core.NewSpan(span.Start().WithValueFlag(), span.End().WithValueFlag())
+ // DocumentFetcher only ever recieves document keys
+ //nolint:forcetypeassert
+ valueSpans[i] = core.NewSpan(
+ span.Start.(keys.DataStoreKey).WithValueFlag(),
+ span.End.(keys.DataStoreKey).WithValueFlag(),
+ )
}
}
@@ -292,7 +301,7 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele
spans[i], spans[j] = spans[j], spans[i]
}
}
- df.spans = core.NewSpans(spans...)
+ df.spans = spans
}
df.curSpanIndex = -1
@@ -309,7 +318,7 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele
func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) {
nextSpanIndex := df.curSpanIndex + 1
- if nextSpanIndex >= len(df.spans.Value) {
+ if nextSpanIndex >= len(df.spans) {
return false, nil
}
@@ -330,8 +339,8 @@ func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) {
}
}
- span := df.spans.Value[nextSpanIndex]
- df.kvResultsIter, err = df.kvIter.IteratePrefix(ctx, span.Start().ToDS(), span.End().ToDS())
+ span := df.spans[nextSpanIndex]
+ df.kvResultsIter, err = df.kvIter.IteratePrefix(ctx, span.Start.ToDS(), span.End.ToDS())
if err != nil {
return false, err
}
diff --git a/internal/db/fetcher/indexer.go b/internal/db/fetcher/indexer.go
index 4d370146ed..3f7b82b6e0 100644
--- a/internal/db/fetcher/indexer.go
+++ b/internal/db/fetcher/indexer.go
@@ -124,9 +124,9 @@ outer:
return err
}
-func (f *IndexFetcher) Start(ctx context.Context, spans core.Spans) error {
+func (f *IndexFetcher) Start(ctx context.Context, spans ...core.Span) error {
if f.indexIter == nil {
- return f.docFetcher.Start(ctx, spans)
+ return f.docFetcher.Start(ctx, spans...)
}
return f.indexIter.Init(ctx, f.txn.Datastore())
}
@@ -192,8 +192,8 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo
if len(f.docFields) > 0 {
targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(f.col.Description(), string(f.doc.id))
- spans := core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd()))
- err := f.docFetcher.Start(ctx, spans)
+ span := core.NewSpan(targetKey, targetKey.PrefixEnd())
+ err := f.docFetcher.Start(ctx, span)
if err != nil {
return nil, ExecInfo{}, err
}
diff --git a/internal/db/fetcher/mocks/fetcher.go b/internal/db/fetcher/mocks/fetcher.go
index 2dac3b0079..396bf67345 100644
--- a/internal/db/fetcher/mocks/fetcher.go
+++ b/internal/db/fetcher/mocks/fetcher.go
@@ -202,16 +202,23 @@ func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Op
}
// Start provides a mock function with given fields: ctx, spans
-func (_m *Fetcher) Start(ctx context.Context, spans core.Spans) error {
- ret := _m.Called(ctx, spans)
+func (_m *Fetcher) Start(ctx context.Context, spans ...core.Span) error {
+ _va := make([]interface{}, len(spans))
+ for _i := range spans {
+ _va[_i] = spans[_i]
+ }
+ var _ca []interface{}
+ _ca = append(_ca, ctx)
+ _ca = append(_ca, _va...)
+ ret := _m.Called(_ca...)
if len(ret) == 0 {
panic("no return value specified for Start")
}
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, core.Spans) error); ok {
- r0 = rf(ctx, spans)
+ if rf, ok := ret.Get(0).(func(context.Context, ...core.Span) error); ok {
+ r0 = rf(ctx, spans...)
} else {
r0 = ret.Error(0)
}
@@ -226,14 +233,21 @@ type Fetcher_Start_Call struct {
// Start is a helper method to define mock.On call
// - ctx context.Context
-// - spans core.Spans
-func (_e *Fetcher_Expecter) Start(ctx interface{}, spans interface{}) *Fetcher_Start_Call {
- return &Fetcher_Start_Call{Call: _e.mock.On("Start", ctx, spans)}
+// - spans ...core.Span
+func (_e *Fetcher_Expecter) Start(ctx interface{}, spans ...interface{}) *Fetcher_Start_Call {
+ return &Fetcher_Start_Call{Call: _e.mock.On("Start",
+ append([]interface{}{ctx}, spans...)...)}
}
-func (_c *Fetcher_Start_Call) Run(run func(ctx context.Context, spans core.Spans)) *Fetcher_Start_Call {
+func (_c *Fetcher_Start_Call) Run(run func(ctx context.Context, spans ...core.Span)) *Fetcher_Start_Call {
_c.Call.Run(func(args mock.Arguments) {
- run(args[0].(context.Context), args[1].(core.Spans))
+ variadicArgs := make([]core.Span, len(args)-1)
+ for i, a := range args[1:] {
+ if a != nil {
+ variadicArgs[i] = a.(core.Span)
+ }
+ }
+ run(args[0].(context.Context), variadicArgs...)
})
return _c
}
@@ -243,7 +257,7 @@ func (_c *Fetcher_Start_Call) Return(_a0 error) *Fetcher_Start_Call {
return _c
}
-func (_c *Fetcher_Start_Call) RunAndReturn(run func(context.Context, core.Spans) error) *Fetcher_Start_Call {
+func (_c *Fetcher_Start_Call) RunAndReturn(run func(context.Context, ...core.Span) error) *Fetcher_Start_Call {
_c.Call.Return(run)
return _c
}
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index b10b6ca84f..baa3acfcfb 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -89,8 +89,7 @@ type VersionedFetcher struct {
root datastore.Rootstore
store datastore.Txn
- dsKey keys.DataStoreKey
- version cid.Cid
+ dsKey keys.DataStoreKey
queuedCids *list.List
@@ -153,42 +152,37 @@ func (vf *VersionedFetcher) Init(
}
// Start serializes the correct state according to the Key and CID.
-func (vf *VersionedFetcher) Start(ctx context.Context, spans core.Spans) error {
+func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error {
if vf.col == nil {
return client.NewErrUninitializeProperty("VersionedFetcher", "CollectionDescription")
}
- if len(spans.Value) != 1 {
+ if len(spans) != 1 {
return ErrSingleSpanOnly
}
- // For the VersionedFetcher, the spans needs to be in the format
- // Span{Start: DocID, End: CID}
- dk := spans.Value[0].Start()
- cidRaw := spans.Value[0].End()
- if dk.DocID == "" {
+ // VersionedFetcher only ever recieves a headstore key
+ //nolint:forcetypeassert
+ prefix := spans[0].Start.(keys.HeadStoreKey)
+ dk := prefix.DocID
+ cid := prefix.Cid
+ if dk == "" {
return client.NewErrUninitializeProperty("Spans", "DocID")
- } else if cidRaw.DocID == "" { // todo: dont abuse DataStoreKey/Span like this!
+ } else if !cid.Defined() {
return client.NewErrUninitializeProperty("Spans", "CID")
}
- // decode cidRaw from core.Key to cid.Cid
- // need to remove '/' prefix from the core.Key
-
- c, err := cid.Decode(cidRaw.DocID)
- if err != nil {
- return NewErrFailedToDecodeCIDForVFetcher(err)
- }
-
vf.ctx = ctx
- vf.dsKey = dk.WithCollectionRoot(vf.col.Description().RootID)
- vf.version = c
+ vf.dsKey = keys.DataStoreKey{
+ CollectionRootID: vf.col.Description().RootID,
+ DocID: dk,
+ }
- if err := vf.seekTo(vf.version); err != nil {
- return NewErrFailedToSeek(c, err)
+ if err := vf.seekTo(cid); err != nil {
+ return NewErrFailedToSeek(cid, err)
}
- return vf.DocumentFetcher.Start(ctx, core.Spans{})
+ return vf.DocumentFetcher.Start(ctx)
}
// Rootstore returns the rootstore of the VersionedFetcher.
@@ -217,7 +211,7 @@ func (vf *VersionedFetcher) SeekTo(ctx context.Context, c cid.Cid) error {
return err
}
- return vf.DocumentFetcher.Start(ctx, core.Spans{})
+ return vf.DocumentFetcher.Start(ctx)
}
// seekTo seeks to the given CID version by stepping through the CRDT state graph from the beginning
@@ -421,9 +415,3 @@ func (vf *VersionedFetcher) Close() error {
return vf.DocumentFetcher.Close()
}
-
-// NewVersionedSpan creates a new VersionedSpan from a DataStoreKey and a version CID.
-func NewVersionedSpan(dsKey keys.DataStoreKey, version cid.Cid) core.Spans {
- // Todo: Dont abuse DataStoreKey for version cid!
- return core.NewSpans(core.NewSpan(dsKey, keys.DataStoreKey{DocID: version.String()}))
-}
diff --git a/internal/db/fetcher_test.go b/internal/db/fetcher_test.go
index 01b5ff065f..48c159d3c0 100644
--- a/internal/db/fetcher_test.go
+++ b/internal/db/fetcher_test.go
@@ -16,13 +16,12 @@ import (
"github.com/stretchr/testify/assert"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
)
func TestFetcherStartWithoutInit(t *testing.T) {
ctx := context.Background()
df := new(fetcher.DocumentFetcher)
- err := df.Start(ctx, core.Spans{})
+ err := df.Start(ctx)
assert.Error(t, err)
}
diff --git a/internal/keys/datastore_doc.go b/internal/keys/datastore_doc.go
index cd8ac60917..cffa99f6fc 100644
--- a/internal/keys/datastore_doc.go
+++ b/internal/keys/datastore_doc.go
@@ -41,7 +41,7 @@ type DataStoreKey struct {
FieldID string
}
-var _ Key = (*DataStoreKey)(nil)
+var _ Walkable = (*DataStoreKey)(nil)
// Creates a new DataStoreKey from a string as best as it can,
// splitting the input using '/' as a field deliminator. It assumes
@@ -167,7 +167,7 @@ func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey {
// PrefixEnd determines the end key given key as a prefix, that is the key that sorts precisely
// behind all keys starting with prefix: "1" is added to the final byte and the carry propagated.
// The special cases of nil and KeyMin always returns KeyMax.
-func (k DataStoreKey) PrefixEnd() DataStoreKey {
+func (k DataStoreKey) PrefixEnd() Walkable {
newKey := k
if k.FieldID != "" {
diff --git a/internal/keys/headstore_doc.go b/internal/keys/headstore_doc.go
index 5d3ec2306e..55809ab236 100644
--- a/internal/keys/headstore_doc.go
+++ b/internal/keys/headstore_doc.go
@@ -23,7 +23,7 @@ type HeadStoreKey struct {
Cid cid.Cid
}
-var _ Key = (*HeadStoreKey)(nil)
+var _ Walkable = (*HeadStoreKey)(nil)
// Creates a new HeadStoreKey from a string as best as it can,
// splitting the input using '/' as a field deliminator. It assumes
@@ -92,3 +92,22 @@ func (k HeadStoreKey) Bytes() []byte {
func (k HeadStoreKey) ToDS() ds.Key {
return ds.NewKey(k.ToString())
}
+
+func (k HeadStoreKey) PrefixEnd() Walkable {
+ newKey := k
+
+ if k.FieldID != "" {
+ newKey.FieldID = string(bytesPrefixEnd([]byte(k.FieldID)))
+ return newKey
+ }
+ if k.DocID != "" {
+ newKey.DocID = string(bytesPrefixEnd([]byte(k.DocID)))
+ return newKey
+ }
+ if k.Cid.Defined() {
+ newKey.Cid = cid.MustParse(bytesPrefixEnd(k.Cid.Bytes()))
+ return newKey
+ }
+
+ return newKey
+}
diff --git a/internal/keys/key.go b/internal/keys/key.go
index 893b9790b4..42e5935a1f 100644
--- a/internal/keys/key.go
+++ b/internal/keys/key.go
@@ -20,3 +20,20 @@ type Key interface {
Bytes() []byte
ToDS() ds.Key
}
+
+// Walkable represents a key in the database that can be 'walked along'
+// by prefixing the end of the key.
+type Walkable interface {
+ Key
+ PrefixEnd() Walkable
+}
+
+// PrettyPrint returns the human readable version of the given key.
+func PrettyPrint(k Key) string {
+ switch typed := k.(type) {
+ case DataStoreKey:
+ return typed.PrettyPrint()
+ default:
+ return typed.ToString()
+ }
+}
diff --git a/internal/lens/fetcher.go b/internal/lens/fetcher.go
index db9e418afa..a441c357bd 100644
--- a/internal/lens/fetcher.go
+++ b/internal/lens/fetcher.go
@@ -127,8 +127,8 @@ historyLoop:
)
}
-func (f *lensedFetcher) Start(ctx context.Context, spans core.Spans) error {
- return f.source.Start(ctx, spans)
+func (f *lensedFetcher) Start(ctx context.Context, spans ...core.Span) error {
+ return f.source.Start(ctx, spans...)
}
func (f *lensedFetcher) FetchNext(ctx context.Context) (fetcher.EncodedDocument, fetcher.ExecInfo, error) {
diff --git a/internal/planner/arbitrary_join.go b/internal/planner/arbitrary_join.go
index 978015298b..e668287028 100644
--- a/internal/planner/arbitrary_join.go
+++ b/internal/planner/arbitrary_join.go
@@ -79,7 +79,7 @@ func (n *dataSource) Start() error {
return nil
}
-func (n *dataSource) Spans(spans core.Spans) {
+func (n *dataSource) Spans(spans []core.Span) {
if n.parentSource != nil {
n.parentSource.Spans(spans)
}
diff --git a/internal/planner/average.go b/internal/planner/average.go
index 24ef567011..9fe8803bee 100644
--- a/internal/planner/average.go
+++ b/internal/planner/average.go
@@ -64,11 +64,11 @@ func (n *averageNode) Init() error {
return n.plan.Init()
}
-func (n *averageNode) Kind() string { return "averageNode" }
-func (n *averageNode) Start() error { return n.plan.Start() }
-func (n *averageNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
-func (n *averageNode) Close() error { return n.plan.Close() }
-func (n *averageNode) Source() planNode { return n.plan }
+func (n *averageNode) Kind() string { return "averageNode" }
+func (n *averageNode) Start() error { return n.plan.Start() }
+func (n *averageNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *averageNode) Close() error { return n.plan.Close() }
+func (n *averageNode) Source() planNode { return n.plan }
func (n *averageNode) Next() (bool, error) {
n.execInfo.iterations++
diff --git a/internal/planner/commit.go b/internal/planner/commit.go
index dc9a0ce3d7..ceecfc46cd 100644
--- a/internal/planner/commit.go
+++ b/internal/planner/commit.go
@@ -36,7 +36,7 @@ type dagScanNode struct {
queuedCids []*cid.Cid
fetcher fetcher.HeadFetcher
- spans core.Spans
+ prefix keys.HeadStoreKey
commitSelect *mapper.CommitSelect
execInfo dagScanExecInfo
@@ -67,20 +67,21 @@ func (n *dagScanNode) Kind() string {
}
func (n *dagScanNode) Init() error {
- if len(n.spans.Value) == 0 {
+ undefined := keys.HeadStoreKey{}
+ if n.prefix == undefined {
if n.commitSelect.DocID.HasValue() {
- dsKey := keys.DataStoreKey{}.WithDocID(n.commitSelect.DocID.Value())
+ key := keys.HeadStoreKey{}.WithDocID(n.commitSelect.DocID.Value())
if n.commitSelect.FieldID.HasValue() {
field := n.commitSelect.FieldID.Value()
- dsKey = dsKey.WithFieldID(field)
+ key = key.WithFieldID(field)
}
- n.spans = core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd()))
+ n.prefix = key
}
}
- return n.fetcher.Start(n.planner.ctx, n.planner.txn, n.spans, n.commitSelect.FieldID)
+ return n.fetcher.Start(n.planner.ctx, n.planner.txn, n.prefix, n.commitSelect.FieldID)
}
func (n *dagScanNode) Start() error {
@@ -92,18 +93,11 @@ func (n *dagScanNode) Start() error {
// either a CID or a DocID.
// If its a CID, set the node CID val
// if its a DocID, set the node Key val (headset)
-func (n *dagScanNode) Spans(spans core.Spans) {
- if len(spans.Value) == 0 {
+func (n *dagScanNode) Spans(spans []core.Span) {
+ if len(spans) == 0 {
return
}
- // copy the input spans so that we may mutate freely
- headSetSpans := core.Spans{
- HasValue: spans.HasValue,
- Value: make([]core.Span, len(spans.Value)),
- }
- copy(headSetSpans.Value, spans.Value)
-
var fieldID string
if n.commitSelect.FieldID.HasValue() {
fieldID = n.commitSelect.FieldID.Value()
@@ -111,13 +105,18 @@ func (n *dagScanNode) Spans(spans core.Spans) {
fieldID = core.COMPOSITE_NAMESPACE
}
- for i, span := range headSetSpans.Value {
- if span.Start().FieldID != fieldID {
- headSetSpans.Value[i] = core.NewSpan(span.Start().WithFieldID(fieldID), keys.DataStoreKey{})
+ for _, span := range spans {
+ var start keys.HeadStoreKey
+ switch s := span.Start.(type) {
+ case keys.DataStoreKey:
+ start = s.ToHeadStoreKey()
+ case keys.HeadStoreKey:
+ start = s
}
- }
- n.spans = headSetSpans
+ n.prefix = start.WithFieldID(fieldID)
+ return
+ }
}
func (n *dagScanNode) Close() error {
@@ -145,17 +144,16 @@ func (n *dagScanNode) simpleExplain() (map[string]any, error) {
// Build the explanation of the spans attribute.
spansExplainer := []map[string]any{}
+ undefinedHsKey := keys.HeadStoreKey{}
// Note: n.headset is `nil` for single commit selection query, so must check for it.
- if n.spans.HasValue {
- for _, span := range n.spans.Value {
- spansExplainer = append(
- spansExplainer,
- map[string]any{
- "start": span.Start().ToString(),
- "end": span.End().ToString(),
- },
- )
- }
+ if n.prefix != undefinedHsKey {
+ spansExplainer = append(
+ spansExplainer,
+ map[string]any{
+ "start": n.prefix.ToString(),
+ "end": n.prefix.PrefixEnd().ToString(),
+ },
+ )
}
// Add the built spans attribute, if it was valid.
simpleExplainMap[spansLabel] = spansExplainer
diff --git a/internal/planner/count.go b/internal/planner/count.go
index d0c0642cae..efc2a20c36 100644
--- a/internal/planner/count.go
+++ b/internal/planner/count.go
@@ -62,7 +62,7 @@ func (n *countNode) Init() error {
func (n *countNode) Start() error { return n.plan.Start() }
-func (n *countNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
+func (n *countNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
func (n *countNode) Close() error { return n.plan.Close() }
diff --git a/internal/planner/create.go b/internal/planner/create.go
index 18365f966d..1b03857a13 100644
--- a/internal/planner/create.go
+++ b/internal/planner/create.go
@@ -56,13 +56,13 @@ func (n *createNode) Kind() string { return "createNode" }
func (n *createNode) Init() error { return nil }
-func docIDsToSpans(ids []string, desc client.CollectionDescription) core.Spans {
+func docIDsToSpans(ids []string, desc client.CollectionDescription) []core.Span {
spans := make([]core.Span, len(ids))
for i, id := range ids {
docID := base.MakeDataStoreKeyWithCollectionAndDocID(desc, id)
spans[i] = core.NewSpan(docID, docID.PrefixEnd())
}
- return core.NewSpans(spans...)
+ return spans
}
func documentsToDocIDs(docs ...*client.Document) []string {
@@ -115,7 +115,7 @@ func (n *createNode) Next() (bool, error) {
return next, err
}
-func (n *createNode) Spans(spans core.Spans) { /* no-op */ }
+func (n *createNode) Spans(spans []core.Span) { /* no-op */ }
func (n *createNode) Close() error {
return n.results.Close()
diff --git a/internal/planner/delete.go b/internal/planner/delete.go
index e470f45956..9142a76868 100644
--- a/internal/planner/delete.go
+++ b/internal/planner/delete.go
@@ -67,7 +67,7 @@ func (n *deleteNode) Next() (bool, error) {
return true, nil
}
-func (n *deleteNode) Spans(spans core.Spans) {
+func (n *deleteNode) Spans(spans []core.Span) {
n.source.Spans(spans)
}
diff --git a/internal/planner/group.go b/internal/planner/group.go
index 32a98c2330..2491740e81 100644
--- a/internal/planner/group.go
+++ b/internal/planner/group.go
@@ -127,7 +127,7 @@ func (n *groupNode) Start() error {
return nil
}
-func (n *groupNode) Spans(spans core.Spans) {
+func (n *groupNode) Spans(spans []core.Span) {
for _, dataSource := range n.dataSources {
dataSource.Spans(spans)
}
diff --git a/internal/planner/lens.go b/internal/planner/lens.go
index 816e973b09..618642b5df 100644
--- a/internal/planner/lens.go
+++ b/internal/planner/lens.go
@@ -61,7 +61,7 @@ func (n *lensNode) Start() error {
return n.source.Start()
}
-func (n *lensNode) Spans(spans core.Spans) {
+func (n *lensNode) Spans(spans []core.Span) {
n.source.Spans(spans)
}
diff --git a/internal/planner/limit.go b/internal/planner/limit.go
index 0da7a8b249..5281a7e215 100644
--- a/internal/planner/limit.go
+++ b/internal/planner/limit.go
@@ -59,10 +59,10 @@ func (n *limitNode) Init() error {
return n.plan.Init()
}
-func (n *limitNode) Start() error { return n.plan.Start() }
-func (n *limitNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
-func (n *limitNode) Close() error { return n.plan.Close() }
-func (n *limitNode) Value() core.Doc { return n.plan.Value() }
+func (n *limitNode) Start() error { return n.plan.Start() }
+func (n *limitNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *limitNode) Close() error { return n.plan.Close() }
+func (n *limitNode) Value() core.Doc { return n.plan.Value() }
func (n *limitNode) Next() (bool, error) {
n.execInfo.iterations++
diff --git a/internal/planner/max.go b/internal/planner/max.go
index dbcc991268..e4db8fa526 100644
--- a/internal/planner/max.go
+++ b/internal/planner/max.go
@@ -54,13 +54,13 @@ func (p *Planner) Max(
}, nil
}
-func (n *maxNode) Kind() string { return "maxNode" }
-func (n *maxNode) Init() error { return n.plan.Init() }
-func (n *maxNode) Start() error { return n.plan.Start() }
-func (n *maxNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
-func (n *maxNode) Close() error { return n.plan.Close() }
-func (n *maxNode) Source() planNode { return n.plan }
-func (n *maxNode) SetPlan(p planNode) { n.plan = p }
+func (n *maxNode) Kind() string { return "maxNode" }
+func (n *maxNode) Init() error { return n.plan.Init() }
+func (n *maxNode) Start() error { return n.plan.Start() }
+func (n *maxNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *maxNode) Close() error { return n.plan.Close() }
+func (n *maxNode) Source() planNode { return n.plan }
+func (n *maxNode) SetPlan(p planNode) { n.plan = p }
func (n *maxNode) simpleExplain() (map[string]any, error) {
sourceExplanations := make([]map[string]any, len(n.aggregateMapping))
diff --git a/internal/planner/min.go b/internal/planner/min.go
index 9be8ecd30a..163ca2894d 100644
--- a/internal/planner/min.go
+++ b/internal/planner/min.go
@@ -54,13 +54,13 @@ func (p *Planner) Min(
}, nil
}
-func (n *minNode) Kind() string { return "minNode" }
-func (n *minNode) Init() error { return n.plan.Init() }
-func (n *minNode) Start() error { return n.plan.Start() }
-func (n *minNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
-func (n *minNode) Close() error { return n.plan.Close() }
-func (n *minNode) Source() planNode { return n.plan }
-func (n *minNode) SetPlan(p planNode) { n.plan = p }
+func (n *minNode) Kind() string { return "minNode" }
+func (n *minNode) Init() error { return n.plan.Init() }
+func (n *minNode) Start() error { return n.plan.Start() }
+func (n *minNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *minNode) Close() error { return n.plan.Close() }
+func (n *minNode) Source() planNode { return n.plan }
+func (n *minNode) SetPlan(p planNode) { n.plan = p }
func (n *minNode) simpleExplain() (map[string]any, error) {
sourceExplanations := make([]map[string]any, len(n.aggregateMapping))
diff --git a/internal/planner/multi.go b/internal/planner/multi.go
index c4c3278480..579f169344 100644
--- a/internal/planner/multi.go
+++ b/internal/planner/multi.go
@@ -91,7 +91,7 @@ func (p *parallelNode) Start() error {
})
}
-func (p *parallelNode) Spans(spans core.Spans) {
+func (p *parallelNode) Spans(spans []core.Span) {
_ = p.applyToPlans(func(n planNode) error {
n.Spans(spans)
return nil
@@ -157,7 +157,7 @@ func (p *parallelNode) nextAppend(index int, plan planNode) (bool, error) {
}
// pass the doc key as a reference through the spans interface
- spans := core.NewSpans(core.NewSpan(keys.DataStoreKey{DocID: key}, keys.DataStoreKey{}))
+ spans := []core.Span{core.NewSpan(keys.DataStoreKey{DocID: key}, keys.DataStoreKey{})}
plan.Spans(spans)
err := plan.Init()
if err != nil {
diff --git a/internal/planner/operation.go b/internal/planner/operation.go
index 934fe2d4b4..6f351f92a1 100644
--- a/internal/planner/operation.go
+++ b/internal/planner/operation.go
@@ -28,7 +28,7 @@ type operationNode struct {
isDone bool
}
-func (n *operationNode) Spans(spans core.Spans) {
+func (n *operationNode) Spans(spans []core.Span) {
for _, child := range n.children {
child.Spans(spans)
}
diff --git a/internal/planner/order.go b/internal/planner/order.go
index 7f30800f2e..0a69ba5453 100644
--- a/internal/planner/order.go
+++ b/internal/planner/order.go
@@ -98,7 +98,7 @@ func (n *orderNode) Init() error {
}
func (n *orderNode) Start() error { return n.plan.Start() }
-func (n *orderNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
+func (n *orderNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
func (n *orderNode) Value() core.Doc {
return n.valueIter.Value()
diff --git a/internal/planner/pipe.go b/internal/planner/pipe.go
index a14432bc12..b9331fce45 100644
--- a/internal/planner/pipe.go
+++ b/internal/planner/pipe.go
@@ -51,10 +51,10 @@ func (n *pipeNode) Init() error {
return n.source.Init()
}
-func (n *pipeNode) Start() error { return n.source.Start() }
-func (n *pipeNode) Spans(spans core.Spans) { n.source.Spans(spans) }
-func (n *pipeNode) Close() error { return n.source.Close() }
-func (n *pipeNode) Source() planNode { return n.source }
+func (n *pipeNode) Start() error { return n.source.Start() }
+func (n *pipeNode) Spans(spans []core.Span) { n.source.Spans(spans) }
+func (n *pipeNode) Close() error { return n.source.Close() }
+func (n *pipeNode) Source() planNode { return n.source }
func (n *pipeNode) Next() (bool, error) {
// we need to load all docs up until the requested point - this allows us to
diff --git a/internal/planner/planner.go b/internal/planner/planner.go
index fb5ce5812a..77dac1c7a2 100644
--- a/internal/planner/planner.go
+++ b/internal/planner/planner.go
@@ -36,7 +36,7 @@ type planNode interface {
// Spans sets the planNodes target spans. This is primarily only used for a scanNode,
// but based on the tree structure, may need to be propagated Eg. From a selectNode -> scanNode.
- Spans(core.Spans)
+ Spans([]core.Span)
// Next processes the next result doc from the request. Can only be called *after* Start().
// Can't be called again if any previous call returns false.
diff --git a/internal/planner/scan.go b/internal/planner/scan.go
index a5fe4a32e9..c00cda401c 100644
--- a/internal/planner/scan.go
+++ b/internal/planner/scan.go
@@ -18,6 +18,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/lens"
"github.com/sourcenetwork/defradb/internal/planner/filter"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
@@ -45,7 +46,7 @@ type scanNode struct {
showDeleted bool
- spans core.Spans
+ spans []core.Span
reverse bool
filter *mapper.Filter
@@ -201,12 +202,12 @@ func (n *scanNode) Start() error {
}
func (n *scanNode) initScan() error {
- if !n.spans.HasValue {
+ if len(n.spans) == 0 {
start := base.MakeDataStoreKeyWithCollectionDescription(n.col.Description())
- n.spans = core.NewSpans(core.NewSpan(start, start.PrefixEnd()))
+ n.spans = []core.Span{core.NewSpan(start, start.PrefixEnd())}
}
- err := n.fetcher.Start(n.p.ctx, n.spans)
+ err := n.fetcher.Start(n.p.ctx, n.spans...)
if err != nil {
return err
}
@@ -220,7 +221,7 @@ func (n *scanNode) initScan() error {
func (n *scanNode) Next() (bool, error) {
n.execInfo.iterations++
- if n.spans.HasValue && len(n.spans.Value) == 0 {
+ if len(n.spans) == 0 {
return false, nil
}
@@ -248,7 +249,7 @@ func (n *scanNode) Next() (bool, error) {
return true, nil
}
-func (n *scanNode) Spans(spans core.Spans) {
+func (n *scanNode) Spans(spans []core.Span) {
n.spans = spans
}
@@ -261,12 +262,10 @@ func (n *scanNode) Source() planNode { return nil }
// explainSpans explains the spans attribute.
func (n *scanNode) explainSpans() []map[string]any {
spansExplainer := []map[string]any{}
- for _, span := range n.spans.Value {
+ for _, span := range n.spans {
spanExplainer := map[string]any{
- // These must be pretty printed as the explain results need to be returnable
- // as json via some clients (e.g. http and cli)
- "start": span.Start().PrettyPrint(),
- "end": span.End().PrettyPrint(),
+ "start": keys.PrettyPrint(span.Start),
+ "end": keys.PrettyPrint(span.End),
}
spansExplainer = append(spansExplainer, spanExplainer)
@@ -419,7 +418,7 @@ func (n *multiScanNode) Value() core.Doc {
return n.scanNode.documentIterator.Value()
}
-func (n *multiScanNode) Spans(spans core.Spans) {
+func (n *multiScanNode) Spans(spans []core.Span) {
n.scanNode.Spans(spans)
}
diff --git a/internal/planner/select.go b/internal/planner/select.go
index 9393103e40..e5b53cd997 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -18,7 +18,6 @@ import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
- "github.com/sourcenetwork/defradb/internal/db/fetcher"
"github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -70,7 +69,7 @@ func (n *selectTopNode) Start() error { return n.planNode.Start() }
func (n *selectTopNode) Next() (bool, error) { return n.planNode.Next() }
-func (n *selectTopNode) Spans(spans core.Spans) { n.planNode.Spans(spans) }
+func (n *selectTopNode) Spans(spans []core.Span) { n.planNode.Spans(spans) }
func (n *selectTopNode) Value() core.Doc { return n.planNode.Value() }
@@ -182,7 +181,7 @@ func (n *selectNode) Next() (bool, error) {
}
}
-func (n *selectNode) Spans(spans core.Spans) {
+func (n *selectNode) Spans(spans []core.Span) {
n.source.Spans(spans)
}
@@ -264,11 +263,17 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
if err != nil {
return nil, err
}
- spans := fetcher.NewVersionedSpan(
- keys.DataStoreKey{DocID: n.selectReq.DocIDs.Value()[0]},
- c,
- ) // @todo check len
- origScan.Spans(spans)
+ origScan.Spans(
+ []core.Span{
+ core.NewSpan(
+ keys.HeadStoreKey{
+ DocID: n.selectReq.DocIDs.Value()[0],
+ Cid: c,
+ },
+ keys.HeadStoreKey{},
+ ),
+ },
+ )
} else if n.selectReq.DocIDs.HasValue() {
// If we *just* have a DocID(s), run a FindByDocID(s) optimization
// if we have a FindByDocID filter, create a span for it
@@ -281,7 +286,7 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
docIDIndexKey := base.MakeDataStoreKeyWithCollectionAndDocID(sourcePlan.collection.Description(), docID)
spans[i] = core.NewSpan(docIDIndexKey, docIDIndexKey.PrefixEnd())
}
- origScan.Spans(core.NewSpans(spans...))
+ origScan.Spans(spans)
}
}
diff --git a/internal/planner/sum.go b/internal/planner/sum.go
index 177dd72e3a..da6010704d 100644
--- a/internal/planner/sum.go
+++ b/internal/planner/sum.go
@@ -149,7 +149,7 @@ func (n *sumNode) Init() error {
func (n *sumNode) Start() error { return n.plan.Start() }
-func (n *sumNode) Spans(spans core.Spans) { n.plan.Spans(spans) }
+func (n *sumNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
func (n *sumNode) Close() error { return n.plan.Close() }
diff --git a/internal/planner/top.go b/internal/planner/top.go
index ce2ce4e6dc..518a96af50 100644
--- a/internal/planner/top.go
+++ b/internal/planner/top.go
@@ -35,7 +35,7 @@ type topLevelNode struct {
isInRecurse bool
}
-func (n *topLevelNode) Spans(spans core.Spans) {
+func (n *topLevelNode) Spans(spans []core.Span) {
if n.isInRecurse {
return
}
diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go
index a6d726b801..a9063d07d6 100644
--- a/internal/planner/type_join.go
+++ b/internal/planner/type_join.go
@@ -114,7 +114,7 @@ func (n *typeIndexJoin) Start() error {
return n.joinPlan.Start()
}
-func (n *typeIndexJoin) Spans(spans core.Spans) {
+func (n *typeIndexJoin) Spans(spans []core.Span) {
n.joinPlan.Spans(spans)
}
@@ -444,7 +444,7 @@ func fetchDocWithID(node planNode, docID string) (bool, error) {
}
dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(scan.col.Description(), docID)
- spans := core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd()))
+ spans := []core.Span{core.NewSpan(dsKey, dsKey.PrefixEnd())}
node.Spans(spans)
@@ -502,7 +502,7 @@ func (join *invertibleTypeJoin) Close() error {
return join.childSide.plan.Close()
}
-func (join *invertibleTypeJoin) Spans(spans core.Spans) {
+func (join *invertibleTypeJoin) Spans(spans []core.Span) {
join.parentSide.plan.Spans(spans)
}
diff --git a/internal/planner/update.go b/internal/planner/update.go
index e707065022..4340625bf8 100644
--- a/internal/planner/update.go
+++ b/internal/planner/update.go
@@ -107,7 +107,7 @@ func (n *updateNode) Next() (bool, error) {
func (n *updateNode) Kind() string { return "updateNode" }
-func (n *updateNode) Spans(spans core.Spans) { n.results.Spans(spans) }
+func (n *updateNode) Spans(spans []core.Span) { n.results.Spans(spans) }
func (n *updateNode) Init() error { return n.results.Init() }
diff --git a/internal/planner/upsert.go b/internal/planner/upsert.go
index 4f12395284..331d1e4171 100644
--- a/internal/planner/upsert.go
+++ b/internal/planner/upsert.go
@@ -96,7 +96,7 @@ func (n *upsertNode) Kind() string {
return "upsertNode"
}
-func (n *upsertNode) Spans(spans core.Spans) {
+func (n *upsertNode) Spans(spans []core.Span) {
n.source.Spans(spans)
}
diff --git a/internal/planner/values.go b/internal/planner/values.go
index 8053d2df1e..4028f52594 100644
--- a/internal/planner/values.go
+++ b/internal/planner/values.go
@@ -46,9 +46,9 @@ func (p *Planner) newContainerValuesNode(ordering []mapper.OrderCondition) *valu
}
}
-func (n *valuesNode) Init() error { return nil }
-func (n *valuesNode) Start() error { return nil }
-func (n *valuesNode) Spans(spans core.Spans) {}
+func (n *valuesNode) Init() error { return nil }
+func (n *valuesNode) Start() error { return nil }
+func (n *valuesNode) Spans(spans []core.Span) {}
func (n *valuesNode) Kind() string {
return "valuesNode"
diff --git a/internal/planner/view.go b/internal/planner/view.go
index e5beef128b..b834d74323 100644
--- a/internal/planner/view.go
+++ b/internal/planner/view.go
@@ -74,7 +74,7 @@ func (n *viewNode) Start() error {
return n.source.Start()
}
-func (n *viewNode) Spans(spans core.Spans) {
+func (n *viewNode) Spans(spans []core.Span) {
n.source.Spans(spans)
}
@@ -217,7 +217,7 @@ func (n *cachedViewFetcher) Start() error {
return nil
}
-func (n *cachedViewFetcher) Spans(spans core.Spans) {
+func (n *cachedViewFetcher) Spans(spans []core.Span) {
// no-op
}
diff --git a/tests/integration/explain/default/delete_test.go b/tests/integration/explain/default/delete_test.go
index e220ae4d86..39b3b732d5 100644
--- a/tests/integration/explain/default/delete_test.go
+++ b/tests/integration/explain/default/delete_test.go
@@ -292,7 +292,12 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{},
+ "spans": []dataMap{
+ {
+ "end": "/4",
+ "start": "/3",
+ },
+ },
},
},
},
From 2f5387825aa1fd47871bf5a9e10f72b4034c330b Mon Sep 17 00:00:00 2001
From: Islam Aliev
Date: Thu, 7 Nov 2024 21:23:17 +0100
Subject: [PATCH 17/47] feat: Add ACP to pubsub KMS (#3206)
## Relevant issue(s)
Resolves #2893
This change adds ACP to pubsub KMS.
A new event `MergeComplete` was introduced in order to make it
correspond to `MergeCompleteName`. This is necessary to notify listeners
that the merge was executed on decrypted blocks.
Upon granting access to a document via `AddDocActorRelationship` we
publish not `Update` event so that the actors that being given access to
can now request a document anew.
In testing framework `WaitForSync` is extended to allow specifying
documents that should be received in decrypted state.
---
errors/errors.go | 29 ++
event/event.go | 9 +
internal/db/collection_retriever.go | 80 +++
internal/db/db.go | 36 ++
internal/db/errors.go | 12 +
internal/db/iterator.go | 104 ++++
internal/db/merge.go | 11 +-
internal/db/p2p_replicator.go | 30 +-
internal/db/permission/check.go | 4 +-
internal/kms/pubsub.go | 69 ++-
node/node.go | 9 +
tests/integration/acp.go | 94 ++--
tests/integration/encryption/peer_acp_test.go | 468 ++++++++++++++++++
tests/integration/events.go | 57 ++-
tests/integration/p2p.go | 5 +-
tests/integration/state.go | 17 +-
tests/integration/test_case.go | 7 +
tests/integration/utils.go | 12 +-
18 files changed, 972 insertions(+), 81 deletions(-)
create mode 100644 internal/db/collection_retriever.go
create mode 100644 internal/db/iterator.go
create mode 100644 tests/integration/encryption/peer_acp_test.go
diff --git a/errors/errors.go b/errors/errors.go
index 45c1202e77..dd5bf706c4 100644
--- a/errors/errors.go
+++ b/errors/errors.go
@@ -63,10 +63,39 @@ func Wrap(message string, inner error, keyvals ...KV) error {
return err
}
+// Is reports whether any error in err's tree matches target.
+//
+// The tree consists of err itself, followed by the errors obtained by repeatedly
+// calling its Unwrap() error or Unwrap() []error method. When err wraps multiple
+// errors, Is examines err followed by a depth-first traversal of its children.
+//
+// An error is considered to match a target if it is equal to that target or if
+// it implements a method Is(error) bool such that Is(target) returns true.
+//
+// An error type might provide an Is method so it can be treated as equivalent
+// to an existing error. For example, if MyError defines
+//
+// func (m MyError) Is(target error) bool { return target == fs.ErrExist }
+//
+// then Is(MyError{}, fs.ErrExist) returns true. See [syscall.Errno.Is] for
+// an example in the standard library. An Is method should only shallowly
+// compare err and the target and not call [Unwrap] on either.
func Is(err, target error) bool {
return errors.Is(err, target)
}
+// Join returns an error that wraps the given errors.
+// Any nil error values are discarded.
+// Join returns nil if every value in errs is nil.
+// The error formats as the concatenation of the strings obtained
+// by calling the Error method of each element of errs, with a newline
+// between each string.
+//
+// A non-nil error returned by Join implements the Unwrap() []error method.
+func Join(errs ...error) error {
+ return errors.Join(errs...)
+}
+
// This function will not be inlined by the compiler as it will spoil any stacktrace
// generated.
//
diff --git a/event/event.go b/event/event.go
index 5ae882c6bb..53d5f0dbb4 100644
--- a/event/event.go
+++ b/event/event.go
@@ -96,6 +96,15 @@ type Merge struct {
SchemaRoot string
}
+// MergeComplete is a notification that a merge has been completed.
+type MergeComplete struct {
+ // Merge is the merge that was completed.
+ Merge Merge
+
+ // Decrypted specifies if the merge payload was decrypted.
+ Decrypted bool
+}
+
// Message contains event info.
type Message struct {
// Name is the name of the event this message was generated from.
diff --git a/internal/db/collection_retriever.go b/internal/db/collection_retriever.go
new file mode 100644
index 0000000000..6fc134c722
--- /dev/null
+++ b/internal/db/collection_retriever.go
@@ -0,0 +1,80 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package db
+
+import (
+ "context"
+
+ "github.com/sourcenetwork/immutable"
+
+ "github.com/sourcenetwork/defradb/client"
+ "github.com/sourcenetwork/defradb/internal/db/description"
+)
+
+// collectionRetriever is a helper struct that retrieves a collection from a document ID.
+type collectionRetriever struct {
+ db client.DB
+}
+
+// NewCollectionRetriever creates a new CollectionRetriever.
+func NewCollectionRetriever(db client.DB) collectionRetriever {
+ return collectionRetriever{
+ db: db,
+ }
+}
+
+// RetrieveCollectionFromDocID retrieves a collection from a document ID.
+func (r collectionRetriever) RetrieveCollectionFromDocID(
+ ctx context.Context,
+ docID string,
+) (client.Collection, error) {
+ ctx, txn, err := ensureContextTxn(ctx, r.db, false)
+ if err != nil {
+ return nil, err
+ }
+ defer txn.Discard(ctx)
+
+ headIterator, err := NewHeadBlocksIteratorFromTxn(ctx, txn, docID)
+ if err != nil {
+ return nil, err
+ }
+
+ hasValue, err := headIterator.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ if !hasValue {
+ return nil, NewErrDocIDNotFound(docID)
+ }
+
+ schema, err := description.GetSchemaVersion(ctx, txn, headIterator.CurrentBlock().Delta.GetSchemaVersionID())
+ if err != nil {
+ return nil, err
+ }
+
+ cols, err := r.db.GetCollections(
+ ctx,
+ client.CollectionFetchOptions{
+ SchemaRoot: immutable.Some(schema.Root),
+ },
+ )
+
+ if err != nil {
+ return nil, err
+ }
+
+ if len(cols) == 0 {
+ return nil, NewErrCollectionWithSchemaRootNotFound(schema.Root)
+ }
+
+ return cols[0], nil
+}
diff --git a/internal/db/db.go b/internal/db/db.go
index f2782bbe3a..630bd0ae43 100644
--- a/internal/db/db.go
+++ b/internal/db/db.go
@@ -227,6 +227,35 @@ func (db *db) AddPolicy(
return client.AddPolicyResult{PolicyID: policyID}, nil
}
+// publishDocUpdateEvent publishes an update event for a document.
+// It uses heads iterator to read the document's head blocks directly from the storage, i.e. without
+// using a transaction.
+func (db *db) publishDocUpdateEvent(ctx context.Context, docID string, collection client.Collection) error {
+ headsIterator, err := NewHeadBlocksIterator(ctx, db.multistore.Headstore(), db.Blockstore(), docID)
+ if err != nil {
+ return err
+ }
+
+ for {
+ hasValue, err := headsIterator.Next()
+ if err != nil {
+ return err
+ }
+ if !hasValue {
+ break
+ }
+
+ updateEvent := event.Update{
+ DocID: docID,
+ Cid: headsIterator.CurrentCid(),
+ SchemaRoot: collection.Schema().Root,
+ Block: headsIterator.CurrentRawBlock(),
+ }
+ db.events.Publish(event.NewMessage(event.UpdateName, updateEvent))
+ }
+ return nil
+}
+
func (db *db) AddDocActorRelationship(
ctx context.Context,
collectionName string,
@@ -262,6 +291,13 @@ func (db *db) AddDocActorRelationship(
return client.AddDocActorRelationshipResult{}, err
}
+ if !exists {
+ err = db.publishDocUpdateEvent(ctx, docID, collection)
+ if err != nil {
+ return client.AddDocActorRelationshipResult{}, err
+ }
+ }
+
return client.AddDocActorRelationshipResult{ExistedAlready: exists}, nil
}
diff --git a/internal/db/errors.go b/internal/db/errors.go
index bd38cf052e..1bc200f2b4 100644
--- a/internal/db/errors.go
+++ b/internal/db/errors.go
@@ -106,6 +106,8 @@ const (
errColNotMaterialized string = "non-materialized collections are not supported"
errMaterializedViewAndACPNotSupported string = "materialized views do not support ACP"
errInvalidDefaultFieldValue string = "default field value is invalid"
+ errDocIDNotFound string = "docID not found"
+ errCollectionWithSchemaRootNotFound string = "collection with schema root not found"
)
var (
@@ -152,6 +154,8 @@ var (
ErrContextDone = errors.New("context done")
ErrFailedToRetryDoc = errors.New("failed to retry doc")
ErrTimeoutDocRetry = errors.New("timeout while retrying doc")
+ ErrDocIDNotFound = errors.New(errDocIDNotFound)
+ ErrorCollectionWithSchemaRootNotFound = errors.New(errCollectionWithSchemaRootNotFound)
)
// NewErrFailedToGetHeads returns a new error indicating that the heads of a document
@@ -690,3 +694,11 @@ func NewErrDefaultFieldValueInvalid(collection string, inner error) error {
errors.NewKV("Inner", inner),
)
}
+
+func NewErrDocIDNotFound(docID string) error {
+ return errors.New(errDocIDNotFound, errors.NewKV("DocID", docID))
+}
+
+func NewErrCollectionWithSchemaRootNotFound(schemaRoot string) error {
+ return errors.New(errCollectionWithSchemaRootNotFound, errors.NewKV("SchemaRoot", schemaRoot))
+}
diff --git a/internal/db/iterator.go b/internal/db/iterator.go
new file mode 100644
index 0000000000..00519d1915
--- /dev/null
+++ b/internal/db/iterator.go
@@ -0,0 +1,104 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package db
+
+import (
+ "context"
+
+ "github.com/ipfs/go-cid"
+
+ "github.com/sourcenetwork/defradb/datastore"
+ "github.com/sourcenetwork/defradb/internal/core"
+ coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/keys"
+ "github.com/sourcenetwork/defradb/internal/merkle/clock"
+)
+
+// DocHeadBlocksIterator is an iterator that iterates over the head blocks of a document.
+type DocHeadBlocksIterator struct {
+ ctx context.Context
+ blockstore datastore.Blockstore
+ cids []cid.Cid
+
+ currentCid cid.Cid
+ currentBlock *coreblock.Block
+ currentRawBlock []byte
+}
+
+// NewHeadBlocksIterator creates a new DocHeadBlocksIterator.
+func NewHeadBlocksIterator(
+ ctx context.Context,
+ headstore datastore.DSReaderWriter,
+ blockstore datastore.Blockstore,
+ docID string,
+) (*DocHeadBlocksIterator, error) {
+ headStoreKey := keys.HeadStoreKey{
+ DocID: docID,
+ FieldID: core.COMPOSITE_NAMESPACE,
+ }
+ headset := clock.NewHeadSet(headstore, headStoreKey)
+ cids, _, err := headset.List(ctx)
+ if err != nil {
+ return nil, err
+ }
+ return &DocHeadBlocksIterator{
+ ctx: ctx,
+ blockstore: blockstore,
+ cids: cids,
+ }, nil
+}
+
+// NewHeadBlocksIteratorFromTxn creates a new DocHeadBlocksIterator from a transaction.
+func NewHeadBlocksIteratorFromTxn(
+ ctx context.Context,
+ txn datastore.Txn,
+ docID string,
+) (*DocHeadBlocksIterator, error) {
+ return NewHeadBlocksIterator(ctx, txn.Headstore(), txn.Blockstore(), docID)
+}
+
+// Next advances the iterator to the next block.
+func (h *DocHeadBlocksIterator) Next() (bool, error) {
+ if len(h.cids) == 0 {
+ return false, nil
+ }
+ nextCid := h.cids[0]
+ h.cids = h.cids[1:]
+
+ rawBlock, err := h.blockstore.Get(h.ctx, nextCid)
+ if err != nil {
+ return false, err
+ }
+ blk, err := coreblock.GetFromBytes(rawBlock.RawData())
+ if err != nil {
+ return false, err
+ }
+
+ h.currentCid = nextCid
+ h.currentBlock = blk
+ h.currentRawBlock = rawBlock.RawData()
+ return true, nil
+}
+
+// CurrentCid returns the CID of the current block.
+func (h *DocHeadBlocksIterator) CurrentCid() cid.Cid {
+ return h.currentCid
+}
+
+// CurrentBlock returns the current block.
+func (h *DocHeadBlocksIterator) CurrentBlock() *coreblock.Block {
+ return h.currentBlock
+}
+
+// CurrentRawBlock returns the raw data of the current block.
+func (h *DocHeadBlocksIterator) CurrentRawBlock() []byte {
+ return h.currentRawBlock
+}
diff --git a/internal/db/merge.go b/internal/db/merge.go
index 74db1ad302..898700a9ed 100644
--- a/internal/db/merge.go
+++ b/internal/db/merge.go
@@ -84,7 +84,10 @@ func (db *db) executeMerge(ctx context.Context, dagMerge event.Merge) error {
}
// send a complete event so we can track merges in the integration tests
- db.events.Publish(event.NewMessage(event.MergeCompleteName, dagMerge))
+ db.events.Publish(event.NewMessage(event.MergeCompleteName, event.MergeComplete{
+ Merge: dagMerge,
+ Decrypted: len(mp.missingEncryptionBlocks) == 0,
+ }))
return nil
}
@@ -264,7 +267,9 @@ func (mp *mergeProcessor) tryFetchMissingBlocksAndMerge(ctx context.Context) err
return res.Error
}
- clear(mp.missingEncryptionBlocks)
+ if len(res.Items) == 0 {
+ return nil
+ }
for i := range res.Items {
_, link, err := cid.CidFromBytes(res.Items[i].Link)
@@ -280,6 +285,8 @@ func (mp *mergeProcessor) tryFetchMissingBlocksAndMerge(ctx context.Context) err
mp.availableEncryptionBlocks[cidlink.Link{Cid: link}] = &encBlock
}
+ clear(mp.missingEncryptionBlocks)
+
err := mp.mergeComposites(ctx)
if err != nil {
return err
diff --git a/internal/db/p2p_replicator.go b/internal/db/p2p_replicator.go
index 61c082d210..a6d28f261a 100644
--- a/internal/db/p2p_replicator.go
+++ b/internal/db/p2p_replicator.go
@@ -23,10 +23,9 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/errors"
+ dbErrors "github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/event"
"github.com/sourcenetwork/defradb/internal/core"
- coreblock "github.com/sourcenetwork/defradb/internal/core/block"
"github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/merkle/clock"
)
@@ -163,7 +162,7 @@ func (db *db) getDocsHeads(
log.ErrorContextE(
ctx,
"Failed to get all docIDs",
- NewErrReplicatorDocID(err, errors.NewKV("Collection", col.Name().Value())),
+ NewErrReplicatorDocID(err, dbErrors.NewKV("Collection", col.Name().Value())),
)
continue
}
@@ -646,31 +645,28 @@ func (db *db) retryDoc(ctx context.Context, docID string) error {
return err
}
defer txn.Discard(ctx)
- headStoreKey := keys.HeadStoreKey{
- DocID: docID,
- FieldID: core.COMPOSITE_NAMESPACE,
- }
- headset := clock.NewHeadSet(txn.Headstore(), headStoreKey)
- cids, _, err := headset.List(ctx)
+
+ headsIterator, err := NewHeadBlocksIteratorFromTxn(ctx, txn, docID)
if err != nil {
return err
}
- for _, c := range cids {
+ for {
select {
case <-ctx.Done():
return ErrContextDone
default:
}
- rawblk, err := txn.Blockstore().Get(ctx, c)
+
+ hasValue, err := headsIterator.Next()
if err != nil {
return err
}
- blk, err := coreblock.GetFromBytes(rawblk.RawData())
- if err != nil {
- return err
+ if !hasValue {
+ break
}
- schema, err := db.getSchemaByVersionID(ctx, blk.Delta.GetSchemaVersionID())
+
+ schema, err := db.getSchemaByVersionID(ctx, headsIterator.CurrentBlock().Delta.GetSchemaVersionID())
if err != nil {
return err
}
@@ -678,9 +674,9 @@ func (db *db) retryDoc(ctx context.Context, docID string) error {
defer close(successChan)
updateEvent := event.Update{
DocID: docID,
- Cid: c,
+ Cid: headsIterator.CurrentCid(),
SchemaRoot: schema.Root,
- Block: rawblk.RawData(),
+ Block: headsIterator.CurrentRawBlock(),
IsRetry: true,
// Because the retry is done in a separate goroutine but the retry handling process should be synchronous,
// we use a channel to block while waiting for the success status of the retry.
diff --git a/internal/db/permission/check.go b/internal/db/permission/check.go
index b19500f41b..ce111bccaf 100644
--- a/internal/db/permission/check.go
+++ b/internal/db/permission/check.go
@@ -50,7 +50,7 @@ func CheckAccessOfDocOnCollectionWithACP(
// Now that we know acp is available and the collection is permissioned, before checking access with
// acp directly we need to make sure that the document is not public, as public documents will not
- // be regestered with acp. We give unrestricted access to public documents, so it does not matter
+ // be registered with acp. We give unrestricted access to public documents, so it does not matter
// whether the request has a signature identity or not at this stage of the check.
isRegistered, err := acpSystem.IsDocRegistered(
ctx,
@@ -69,7 +69,7 @@ func CheckAccessOfDocOnCollectionWithACP(
// At this point if the request is not signatured, then it has no access, because:
// the collection has a policy on it, and the acp is enabled/available,
- // and the document is not public (is regestered with acp).
+ // and the document is not public (is registered with acp).
if !identity.HasValue() {
return false, nil
}
diff --git a/internal/kms/pubsub.go b/internal/kms/pubsub.go
index cbcd6ee141..13cef16269 100644
--- a/internal/kms/pubsub.go
+++ b/internal/kms/pubsub.go
@@ -20,12 +20,18 @@ import (
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
libpeer "github.com/libp2p/go-libp2p/core/peer"
rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc"
+ "github.com/sourcenetwork/immutable"
grpcpeer "google.golang.org/grpc/peer"
+ "github.com/sourcenetwork/defradb/acp"
+ "github.com/sourcenetwork/defradb/acp/identity"
+ "github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/crypto"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/event"
+ coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/db/permission"
"github.com/sourcenetwork/defradb/internal/encryption"
)
@@ -36,6 +42,10 @@ type PubSubServer interface {
SendPubSubMessage(context.Context, string, []byte) (<-chan rpc.Response, error)
}
+type CollectionRetriever interface {
+ RetrieveCollectionFromDocID(context.Context, string) (client.Collection, error)
+}
+
type pubSubService struct {
ctx context.Context
peerID libpeer.ID
@@ -43,6 +53,9 @@ type pubSubService struct {
keyRequestedSub *event.Subscription
eventBus *event.Bus
encStore *ipldEncStorage
+ acp immutable.Option[acp.ACP]
+ colRetriever CollectionRetriever
+ nodeDID string
}
var _ Service = (*pubSubService)(nil)
@@ -69,13 +82,19 @@ func NewPubSubService(
pubsub PubSubServer,
eventBus *event.Bus,
encstore datastore.Blockstore,
+ acp immutable.Option[acp.ACP],
+ colRetriever CollectionRetriever,
+ nodeDID string,
) (*pubSubService, error) {
s := &pubSubService{
- ctx: ctx,
- peerID: peerID,
- pubsub: pubsub,
- eventBus: eventBus,
- encStore: newIPLDEncryptionStorage(encstore),
+ ctx: ctx,
+ peerID: peerID,
+ pubsub: pubsub,
+ eventBus: eventBus,
+ encStore: newIPLDEncryptionStorage(encstore),
+ acp: acp,
+ colRetriever: colRetriever,
+ nodeDID: nodeDID,
}
err := pubsub.AddPubSubTopic(pubsubTopic, s.handleRequestFromPeer)
if err != nil {
@@ -127,6 +146,7 @@ func (s *pubSubService) handleKeyRequestedEvent() {
}
type fetchEncryptionKeyRequest struct {
+ Identity []byte
Links [][]byte
EphemeralPublicKey []byte
}
@@ -153,6 +173,7 @@ func (s *pubSubService) prepareFetchEncryptionKeyRequest(
ephemeralPublicKey []byte,
) (*fetchEncryptionKeyRequest, error) {
req := &fetchEncryptionKeyRequest{
+ Identity: []byte(s.nodeDID),
EphemeralPublicKey: ephemeralPublicKey,
}
@@ -260,9 +281,12 @@ func (s *pubSubService) tryGenEncryptionKeyLocally(
req *fetchEncryptionKeyRequest,
) (*fetchEncryptionKeyReply, error) {
blocks, err := s.getEncryptionKeysLocally(ctx, req)
- if err != nil || len(blocks) == 0 {
+ if err != nil {
return nil, err
}
+ if len(blocks) == 0 {
+ return &fetchEncryptionKeyReply{}, nil
+ }
reqEphPubKey, err := crypto.X25519PublicKeyFromBytes(req.EphemeralPublicKey)
if err != nil {
@@ -317,6 +341,14 @@ func (s *pubSubService) getEncryptionKeysLocally(
continue
}
+ hasPerm, err := s.doesIdentityHaveDocPermission(ctx, string(req.Identity), encBlock)
+ if err != nil {
+ return nil, err
+ }
+ if !hasPerm {
+ continue
+ }
+
encBlockBytes, err := encBlock.Marshal()
if err != nil {
return nil, err
@@ -327,6 +359,31 @@ func (s *pubSubService) getEncryptionKeysLocally(
return blocks, nil
}
+func (s *pubSubService) doesIdentityHaveDocPermission(
+ ctx context.Context,
+ actorIdentity string,
+ entBlock *coreblock.Encryption,
+) (bool, error) {
+ if !s.acp.HasValue() {
+ return true, nil
+ }
+
+ docID := string(entBlock.DocID)
+ collection, err := s.colRetriever.RetrieveCollectionFromDocID(ctx, docID)
+ if err != nil {
+ return false, err
+ }
+
+ return permission.CheckAccessOfDocOnCollectionWithACP(
+ ctx,
+ immutable.Some(identity.Identity{DID: actorIdentity}),
+ s.acp.Value(),
+ collection,
+ acp.ReadPermission,
+ docID,
+ )
+}
+
func encodeToBase64(data []byte) []byte {
encoded := make([]byte, base64.StdEncoding.EncodedLen(len(data)))
base64.StdEncoding.Encode(encoded, data)
diff --git a/node/node.go b/node/node.go
index 0a1b813862..aa47bfbc5c 100644
--- a/node/node.go
+++ b/node/node.go
@@ -158,6 +158,12 @@ func (n *Node) Start(ctx context.Context) error {
if err != nil {
return err
}
+
+ ident, err := n.DB.GetNodeIdentity(ctx)
+ if err != nil {
+ return err
+ }
+
if n.options.kmsType.HasValue() {
switch n.options.kmsType.Value() {
case kms.PubSubServiceType:
@@ -167,6 +173,9 @@ func (n *Node) Start(ctx context.Context) error {
n.Peer.Server(),
n.DB.Events(),
n.DB.Encstore(),
+ acp,
+ db.NewCollectionRetriever(n.DB),
+ ident.Value().DID,
)
}
if err != nil {
diff --git a/tests/integration/acp.go b/tests/integration/acp.go
index 8269245757..b98be7a059 100644
--- a/tests/integration/acp.go
+++ b/tests/integration/acp.go
@@ -181,11 +181,14 @@ func addDocActorRelationshipACP(
s *state,
action AddDocActorRelationship,
) {
+ var docID string
+ actionNodeID := action.NodeID
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collectionName, docID := getCollectionAndDocInfo(s, action.CollectionID, action.DocID, nodeID)
+ var collectionName string
+ collectionName, docID = getCollectionAndDocInfo(s, action.CollectionID, action.DocID, nodeID)
exists, err := node.AddDocActorRelationship(
getContextWithIdentity(s.ctx, s, action.RequestorIdentity, nodeID),
@@ -206,9 +209,14 @@ func addDocActorRelationshipACP(
// The relationship should only be added to a SourceHub chain once - there is no need to loop through
// the nodes.
if acpType == SourceHubACPType {
+ actionNodeID = immutable.Some(0)
break
}
}
+
+ if action.ExpectedError == "" && !action.ExpectedExistence {
+ waitForUpdateEvents(s, actionNodeID, map[string]struct{}{docID: {}})
+ }
}
// DeleteDocActorRelationship will attempt to delete a relationship between a document and an actor.
@@ -356,7 +364,9 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
return nil, err
}
- out, err := exec.Command("sourcehubd", "init", moniker, "--chain-id", chainID, "--home", directory).CombinedOutput()
+ args := []string{"init", moniker, "--chain-id", chainID, "--home", directory}
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ out, err := exec.Command("sourcehubd", args...).CombinedOutput()
s.t.Log(string(out))
if err != nil {
return nil, err
@@ -389,22 +399,27 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
return nil, err
}
- out, err = exec.Command(
- "sourcehubd", "keys", "import-hex", validatorName, acpKeyHex,
+ args = []string{
+ "keys", "import-hex", validatorName, acpKeyHex,
"--keyring-backend", keyringBackend,
"--home", directory,
- ).CombinedOutput()
+ }
+
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ out, err = exec.Command("sourcehubd", args...).CombinedOutput()
s.t.Log(string(out))
if err != nil {
return nil, err
}
- out, err = exec.Command(
- "sourcehubd", "keys", "show", validatorName,
+ args = []string{
+ "keys", "show", validatorName,
"--address",
"--keyring-backend", keyringBackend,
"--home", directory,
- ).CombinedOutput()
+ }
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ out, err = exec.Command("sourcehubd", args...).CombinedOutput()
s.t.Log(string(out))
if err != nil {
return nil, err
@@ -414,28 +429,31 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
validatorAddress := strings.TrimSpace(string(out))
s.sourcehubAddress = validatorAddress
- out, err = exec.Command(
- "sourcehubd", "genesis", "add-genesis-account", validatorAddress, "900000000stake",
+ args = []string{"genesis", "add-genesis-account", validatorAddress, "900000000stake",
"--keyring-backend", keyringBackend,
"--home", directory,
- ).CombinedOutput()
+ }
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ out, err = exec.Command("sourcehubd", args...).CombinedOutput()
s.t.Log(string(out))
if err != nil {
return nil, err
}
- out, err = exec.Command(
- "sourcehubd", "genesis", "gentx", validatorName, "10000000stake",
+ args = []string{"genesis", "gentx", validatorName, "10000000stake",
"--chain-id", chainID,
"--keyring-backend", keyringBackend,
- "--home", directory,
- ).CombinedOutput()
+ "--home", directory}
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ out, err = exec.Command("sourcehubd", args...).CombinedOutput()
s.t.Log(string(out))
if err != nil {
return nil, err
}
- out, err = exec.Command("sourcehubd", "genesis", "collect-gentxs", "--home", directory).CombinedOutput()
+ args = []string{"genesis", "collect-gentxs", "--home", directory}
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ out, err = exec.Command("sourcehubd", args...).CombinedOutput()
s.t.Log(string(out))
if err != nil {
return nil, err
@@ -485,8 +503,7 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
releaseP2pPort()
releasePprofPort()
- sourceHubCmd := exec.Command(
- "sourcehubd",
+ args = []string{
"start",
"--minimum-gas-prices", "0stake",
"--home", directory,
@@ -494,7 +511,9 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) {
"--rpc.laddr", rpcAddress,
"--p2p.laddr", p2pAddress,
"--rpc.pprof_laddr", pprofAddress,
- )
+ }
+ s.t.Log("$ sourcehubd " + strings.Join(args, " "))
+ sourceHubCmd := exec.Command("sourcehubd", args...)
var bf testBuffer
bf.Lines = make(chan string, 100)
sourceHubCmd.Stdout = &bf
@@ -566,23 +585,32 @@ func getFreePort() (int, func(), error) {
// crossLock forms a cross process lock by attempting to listen to the given port.
//
-// This function will only return once the port is free. A function to unbind from the
-// port is returned - this unlock function may be called multiple times without issue.
+// This function will only return once the port is free or the timeout is reached.
+// A function to unbind from the port is returned - this unlock function may be called
+// multiple times without issue.
func crossLock(port uint16) (func(), error) {
- l, err := net.Listen("tcp", fmt.Sprintf("127.0.0.1:%v", port))
- if err != nil {
- if strings.Contains(err.Error(), "address already in use") {
- time.Sleep(5 * time.Millisecond)
- return crossLock(port)
+ timeout := time.After(20 * time.Second)
+ for {
+ select {
+ case <-timeout:
+ return nil, fmt.Errorf("timeout reached while trying to acquire cross process lock on port %v", port)
+ default:
+ l, err := net.Listen("tcp", fmt.Sprintf("127.0.0.1:%v", port))
+ if err != nil {
+ if strings.Contains(err.Error(), "address already in use") {
+ time.Sleep(5 * time.Millisecond)
+ continue
+ }
+ return nil, err
+ }
+
+ return func() {
+ // there are no errors that this returns that we actually care about
+ _ = l.Close()
+ },
+ nil
}
- return nil, err
}
-
- return func() {
- // there are no errors that this returns that we actually care about
- _ = l.Close()
- },
- nil
}
func getNodeAudience(s *state, nodeIndex int) immutable.Option[string] {
diff --git a/tests/integration/encryption/peer_acp_test.go b/tests/integration/encryption/peer_acp_test.go
new file mode 100644
index 0000000000..bb6705c626
--- /dev/null
+++ b/tests/integration/encryption/peer_acp_test.go
@@ -0,0 +1,468 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package encryption
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/sourcenetwork/immutable"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+const policy = `
+name: Test Policy
+
+description: A Policy
+
+actor:
+ name: actor
+
+resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+`
+
+func TestDocEncryptionACP_IfUserAndNodeHaveAccess_ShouldFetch(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+ KMS: testUtils.KMS{Activated: true},
+ SupportedACPTypes: immutable.Some(
+ []testUtils.ACPType{
+ testUtils.SourceHubACPType,
+ },
+ ),
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.AddPolicy{
+ Identity: testUtils.ClientIdentity(0),
+ Policy: policy,
+ ExpectedPolicyID: expectedPolicyID,
+ },
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.ClientIdentity(0),
+ Doc: `
+ {
+ "name": "Fred",
+ "age": 33
+ }
+ `,
+ IsDocEncrypted: true,
+ },
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(0),
+ TargetIdentity: testUtils.ClientIdentity(1),
+ DocID: 0,
+ Relation: "reader",
+ },
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(0),
+ TargetIdentity: testUtils.NodeIdentity(1),
+ DocID: 0,
+ Relation: "reader",
+ },
+ testUtils.WaitForSync{
+ Decrypted: []int{0},
+ },
+ testUtils.Request{
+ NodeID: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
+ Request: `
+ query {
+ Users {
+ name
+ }
+ }
+ `,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {"name": "Fred"},
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestDocEncryptionACP_IfUserHasAccessButNotNode_ShouldNotFetch(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+ KMS: testUtils.KMS{Activated: true},
+ SupportedACPTypes: immutable.Some(
+ []testUtils.ACPType{
+ testUtils.SourceHubACPType,
+ },
+ ),
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.AddPolicy{
+ Identity: testUtils.ClientIdentity(0),
+ Policy: policy,
+ ExpectedPolicyID: expectedPolicyID,
+ },
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.ClientIdentity(0),
+ Doc: `
+ {
+ "name": "Fred",
+ "age": 33
+ }
+ `,
+ IsDocEncrypted: true,
+ },
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(0),
+ TargetIdentity: testUtils.ClientIdentity(1),
+ DocID: 0,
+ Relation: "reader",
+ },
+ testUtils.Wait{Duration: 100 * time.Millisecond},
+ testUtils.Request{
+ NodeID: immutable.Some(1),
+ Identity: testUtils.ClientIdentity(1),
+ Request: `
+ query {
+ Users {
+ name
+ }
+ }
+ `,
+ Results: map[string]any{
+ "Users": []map[string]any{},
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestDocEncryptionACP_IfNodeHasAccessToSomeDocs_ShouldFetchOnlyThem(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+ KMS: testUtils.KMS{Activated: true},
+ SupportedACPTypes: immutable.Some(
+ []testUtils.ACPType{
+ testUtils.SourceHubACPType,
+ },
+ ),
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.AddPolicy{
+ Identity: testUtils.NodeIdentity(0),
+ Policy: policy,
+ ExpectedPolicyID: expectedPolicyID,
+ },
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ // encrypted, private, shared
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.NodeIdentity(0),
+ Doc: `
+ {
+ "name": "Fred",
+ "age": 33
+ }
+ `,
+ IsDocEncrypted: true,
+ },
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.NodeIdentity(0),
+ TargetIdentity: testUtils.NodeIdentity(1),
+ DocID: 0,
+ Relation: "reader",
+ },
+ // encrypted, private, not shared
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.NodeIdentity(0),
+ Doc: `
+ {
+ "name": "Andy",
+ "age": 33
+ }
+ `,
+ IsDocEncrypted: true,
+ },
+ // encrypted, public
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `
+ {
+ "name": "Islam",
+ "age": 33
+ }
+ `,
+ IsDocEncrypted: true,
+ },
+ // not encrypted, private, shared
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.NodeIdentity(0),
+ Doc: `
+ {
+ "name": "John",
+ "age": 33
+ }
+ `,
+ },
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.NodeIdentity(0),
+ TargetIdentity: testUtils.NodeIdentity(1),
+ DocID: 3,
+ Relation: "reader",
+ },
+ // not encrypted, private, not shared
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.NodeIdentity(0),
+ Doc: `
+ {
+ "name": "Keenan",
+ "age": 33
+ }
+ `,
+ },
+ // not encrypted, public
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 33
+ }
+ `,
+ },
+ testUtils.WaitForSync{
+ Decrypted: []int{0, 2},
+ },
+ testUtils.Request{
+ NodeID: immutable.Some(1),
+ Identity: testUtils.NodeIdentity(1),
+ Request: `
+ query {
+ Users {
+ name
+ }
+ }
+ `,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {"name": "John"},
+ {"name": "Islam"},
+ {"name": "Shahzad"},
+ {"name": "Fred"},
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestDocEncryptionACP_IfClientNodeHasDocPermissionButServerNodeIsNotAvailable_ShouldNotFetch(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+ KMS: testUtils.KMS{Activated: true},
+ SupportedACPTypes: immutable.Some(
+ []testUtils.ACPType{
+ testUtils.SourceHubACPType,
+ },
+ ),
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.AddPolicy{
+ Identity: testUtils.NodeIdentity(0),
+ Policy: policy,
+ ExpectedPolicyID: expectedPolicyID,
+ },
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 2,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 2,
+ CollectionIDs: []int{0},
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Identity: testUtils.NodeIdentity(0),
+ Doc: `
+ {
+ "name": "Fred",
+ "age": 33
+ }
+ `,
+ IsDocEncrypted: true,
+ },
+ testUtils.WaitForSync{},
+ testUtils.Close{
+ NodeID: immutable.Some(0),
+ },
+ testUtils.AddDocActorRelationship{
+ NodeID: immutable.Some(1),
+ RequestorIdentity: testUtils.NodeIdentity(0),
+ TargetIdentity: testUtils.NodeIdentity(1),
+ DocID: 0,
+ Relation: "reader",
+ },
+ testUtils.Wait{
+ Duration: 100 * time.Millisecond,
+ },
+ testUtils.Request{
+ NodeID: immutable.Some(1),
+ Identity: testUtils.NodeIdentity(1),
+ Request: `
+ query {
+ Users {
+ name
+ }
+ }
+ `,
+ Results: map[string]any{
+ "Users": []map[string]any{},
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/events.go b/tests/integration/events.go
index 6129d600ee..1fbc64416e 100644
--- a/tests/integration/events.go
+++ b/tests/integration/events.go
@@ -74,7 +74,7 @@ func waitForReplicatorConfigureEvent(s *state, cfg ConfigureReplicator) {
// all previous documents should be merged on the subscriber node
for key, val := range s.nodeP2P[cfg.SourceNodeID].actualDocHeads {
- s.nodeP2P[cfg.TargetNodeID].expectedDocHeads[key] = val
+ s.nodeP2P[cfg.TargetNodeID].expectedDocHeads[key] = val.cid
}
// update node connections and replicators
@@ -160,6 +160,10 @@ func waitForUpdateEvents(
continue // node is not selected
}
+ if _, ok := s.closedNodes[i]; ok {
+ continue // node is closed
+ }
+
expect := make(map[string]struct{}, len(docIDs))
for k := range docIDs {
expect[k] = struct{}{}
@@ -170,17 +174,17 @@ func waitForUpdateEvents(
select {
case msg, ok := <-s.nodeEvents[i].update.Message():
if !ok {
- require.Fail(s.t, "subscription closed waiting for update event")
+ require.Fail(s.t, "subscription closed waiting for update event", "Node %d", i)
}
evt = msg.Data.(event.Update)
case <-time.After(eventTimeout):
- require.Fail(s.t, "timeout waiting for update event")
+ require.Fail(s.t, "timeout waiting for update event", "Node %d", i)
}
// make sure the event is expected
_, ok := expect[evt.DocID]
- require.True(s.t, ok, "unexpected document update")
+ require.True(s.t, ok, "unexpected document update", "Node %d", i)
delete(expect, evt.DocID)
// we only need to update the network state if the nodes
@@ -196,41 +200,63 @@ func waitForUpdateEvents(
//
// Will fail the test if an event is not received within the expected time interval to prevent tests
// from running forever.
-func waitForMergeEvents(s *state) {
+func waitForMergeEvents(s *state, action WaitForSync) {
for nodeID := 0; nodeID < len(s.nodes); nodeID++ {
+ if _, ok := s.closedNodes[nodeID]; ok {
+ continue // node is closed
+ }
+
expect := s.nodeP2P[nodeID].expectedDocHeads
// remove any docs that are already merged
// up to the expected document head
for key, val := range s.nodeP2P[nodeID].actualDocHeads {
- if head, ok := expect[key]; ok && head.String() == val.String() {
+ if head, ok := expect[key]; ok && head.String() == val.cid.String() {
delete(expect, key)
}
}
+ expectDecrypted := make(map[string]struct{}, len(action.Decrypted))
+ for _, docIndex := range action.Decrypted {
+ if len(s.docIDs[0]) <= docIndex {
+ require.Fail(s.t, "doc index %d out of range", docIndex)
+ }
+ docID := s.docIDs[0][docIndex].String()
+ actual, hasActual := s.nodeP2P[nodeID].actualDocHeads[docID]
+ if !hasActual || !actual.decrypted {
+ expectDecrypted[docID] = struct{}{}
+ }
+ }
+
// wait for all expected doc heads to be merged
//
// the order of merges does not matter as we only
// expect the latest head to eventually be merged
//
// unexpected merge events are ignored
- for len(expect) > 0 {
- var evt event.Merge
+ for len(expect) > 0 || len(expectDecrypted) > 0 {
+ var evt event.MergeComplete
select {
case msg, ok := <-s.nodeEvents[nodeID].merge.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for merge complete event")
}
- evt = msg.Data.(event.Merge)
+ evt = msg.Data.(event.MergeComplete)
case <-time.After(30 * eventTimeout):
require.Fail(s.t, "timeout waiting for merge complete event")
}
- head, ok := expect[evt.DocID]
- if ok && head.String() == evt.Cid.String() {
- delete(expect, evt.DocID)
+ _, ok := expectDecrypted[evt.Merge.DocID]
+ if ok && evt.Decrypted {
+ delete(expectDecrypted, evt.Merge.DocID)
+ }
+
+ head, ok := expect[evt.Merge.DocID]
+ if ok && head.String() == evt.Merge.Cid.String() {
+ delete(expect, evt.Merge.DocID)
}
+ s.nodeP2P[nodeID].actualDocHeads[evt.Merge.DocID] = docHeadState{cid: evt.Merge.Cid, decrypted: evt.Decrypted}
}
}
}
@@ -247,7 +273,8 @@ func updateNetworkState(s *state, nodeID int, evt event.Update) {
}
// update the actual document head on the node that updated it
- s.nodeP2P[nodeID].actualDocHeads[evt.DocID] = evt.Cid
+ // as the node created the document, it is already decrypted
+ s.nodeP2P[nodeID].actualDocHeads[evt.DocID] = docHeadState{cid: evt.Cid, decrypted: true}
// update the expected document heads of replicator targets
for id := range s.nodeP2P[nodeID].replicators {
@@ -309,8 +336,8 @@ func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} {
return expect
}
-func waitForSync(s *state) {
- waitForMergeEvents(s)
+func waitForSync(s *state, action WaitForSync) {
+ waitForMergeEvents(s, action)
}
// getEventsForUpdateWithFilter returns a map of docIDs that should be
diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go
index 7c5b20e69a..87e224dce4 100644
--- a/tests/integration/p2p.go
+++ b/tests/integration/p2p.go
@@ -133,7 +133,10 @@ type GetAllP2PCollections struct {
//
// For example you will likely wish to `WaitForSync` after creating a document in node 0 before querying
// node 1 to see if it has been replicated.
-type WaitForSync struct{}
+type WaitForSync struct {
+ // Decrypted is a list of document indexes that are expected to be merged and synced decrypted.
+ Decrypted []int
+}
// connectPeers connects two existing, started, nodes as peers. It returns a channel
// that will receive an empty struct upon sync completion of all expected peer-sync events.
diff --git a/tests/integration/state.go b/tests/integration/state.go
index b4a3777d03..e7130f2ebd 100644
--- a/tests/integration/state.go
+++ b/tests/integration/state.go
@@ -45,7 +45,7 @@ type p2pState struct {
// actualDocHeads contains all document heads that exist on a node.
//
// The map key is the doc id. The map value is the doc head.
- actualDocHeads map[string]cid.Cid
+ actualDocHeads map[string]docHeadState
// expectedDocHeads contains all document heads that are expected to exist on a node.
//
@@ -53,13 +53,22 @@ type p2pState struct {
expectedDocHeads map[string]cid.Cid
}
+// docHeadState contains the state of a document head.
+// It is used to track if a document at a certain head has been decrypted.
+type docHeadState struct {
+ // The actual document head.
+ cid cid.Cid
+ // Indicates if the document at the given head has been decrypted.
+ decrypted bool
+}
+
// newP2PState returns a new empty p2p state.
func newP2PState() *p2pState {
return &p2pState{
connections: make(map[int]struct{}),
replicators: make(map[int]struct{}),
peerCollections: make(map[int]struct{}),
- actualDocHeads: make(map[string]cid.Cid),
+ actualDocHeads: make(map[string]docHeadState),
expectedDocHeads: make(map[string]cid.Cid),
}
}
@@ -156,6 +165,9 @@ type state struct {
// The nodes active in this test.
nodes []clients.Client
+ // closedNodes contains the indexes of nodes that have been closed.
+ closedNodes map[int]struct{}
+
// nodeP2P contains p2p states for all nodes
nodeP2P []*p2pState
@@ -223,6 +235,7 @@ func newState(
nodeConfigs: [][]net.NodeOpt{},
nodeP2P: []*p2pState{},
nodes: []clients.Client{},
+ closedNodes: map[int]struct{}{},
dbPaths: []string{},
collections: [][]client.Collection{},
collectionNames: collectionNames,
diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go
index e1c9b0b6f1..a1ab291257 100644
--- a/tests/integration/test_case.go
+++ b/tests/integration/test_case.go
@@ -12,6 +12,7 @@ package tests
import (
"testing"
+ "time"
"github.com/lens-vm/lens/host-go/config/model"
"github.com/sourcenetwork/immutable"
@@ -806,3 +807,9 @@ type GetNodeIdentity struct {
// Default value is `NoIdentity()`.
ExpectedIdentity immutable.Option[identityRef]
}
+
+// Wait is an action that will wait for the given duration.
+type Wait struct {
+ // Duration is the duration to wait.
+ Duration time.Duration
+}
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index aff1ebecb7..f827ac0130 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -400,7 +400,10 @@ func performAction(
assertClientIntrospectionResults(s, action)
case WaitForSync:
- waitForSync(s)
+ waitForSync(s, action)
+
+ case Wait:
+ <-time.After(action.Duration)
case Benchmark:
benchmarkAction(s, actionIndex, action)
@@ -574,9 +577,10 @@ func closeNodes(
s *state,
action Close,
) {
- _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
- for _, node := range nodes {
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for i, node := range nodes {
node.Close()
+ s.closedNodes[nodeIDs[i]] = struct{}{}
}
}
@@ -781,6 +785,8 @@ func startNodes(s *state, action Start) {
require.NoError(s.t, err)
s.nodeEvents[nodeIndex] = eventState
+ delete(s.closedNodes, nodeIndex)
+
waitForNetworkSetupEvents(s, i)
}
From 85bbdc082566c1fe9ceb0a29a2fe9950ff7e48a4 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Fri, 8 Nov 2024 15:53:32 -0500
Subject: [PATCH 18/47] feat: Add support for branchable collections (#3216)
## Relevant issue(s)
Resolves #3038
## Description
Adds support for branchable collections.
Does not add support for syncing the collection level commits via the
P2P system (broken out, lots of people working in the space, significant
changes required, I'm not so familiar with that part of the code so will
take longer). This does mean that the somewhat surprising (to me at
least) implementation of `Collection.Merge` is currently untested.
Commits are queriable via the `commits` GQL queries.
Time travel queries do not work due to an existing bug:
https://github.com/sourcenetwork/defradb/issues/3214 - once that bug is
fixed I expect there to be some more work (definitely testing) in order
to get it to work with branchable collections.
This is a breaking change due to the moving/namespacing of the existing
document headstore keys.
---
client/collection_description.go | 17 ++
.../i3038-branchable-collections.md | 3 +
docs/website/references/http/openapi.json | 6 +
internal/core/block/block.go | 1 +
internal/core/crdt/collection.go | 75 ++++++++
internal/core/crdt/composite.go | 2 +-
internal/core/crdt/ipld_union.go | 22 +++
internal/db/collection.go | 23 +++
internal/db/collection_delete.go | 24 +++
internal/db/definition_validation.go | 21 +++
internal/db/errors.go | 9 +
internal/db/fetcher/dag.go | 40 ++++-
internal/db/fetcher/versioned.go | 2 +-
internal/db/iterator.go | 2 +-
internal/db/merge.go | 2 +-
internal/keys/datastore_doc.go | 4 +-
internal/keys/headstore.go | 47 +++++
internal/keys/headstore_collection.go | 109 ++++++++++++
internal/keys/headstore_doc.go | 44 ++---
internal/merkle/clock/clock.go | 4 +-
internal/merkle/clock/clock_test.go | 4 +-
internal/merkle/clock/heads.go | 10 +-
internal/merkle/clock/heads_test.go | 2 +-
internal/merkle/crdt/collection.go | 53 ++++++
internal/merkle/crdt/composite.go | 4 +-
internal/planner/commit.go | 46 +++--
internal/planner/select.go | 4 +-
internal/request/graphql/parser/commit.go | 4 +-
internal/request/graphql/schema/collection.go | 18 ++
internal/request/graphql/schema/schema.go | 1 +
.../graphql/schema/types/descriptions.go | 13 +-
.../request/graphql/schema/types/types.go | 30 ++++
net/server_test.go | 2 +-
.../collection_description/branchable_test.go | 92 ++++++++++
.../updates/replace/branchable_test.go | 65 +++++++
tests/integration/events.go | 6 +
.../explain/default/dagscan_test.go | 16 +-
.../commits/branchables/cid_doc_id_test.go | 56 ++++++
.../query/commits/branchables/cid_test.go | 63 +++++++
.../query/commits/branchables/create_test.go | 117 +++++++++++++
.../query/commits/branchables/delete_test.go | 103 +++++++++++
.../commits/branchables/field_id_test.go | 63 +++++++
.../query/commits/branchables/if_test.go | 108 ++++++++++++
.../query/commits/branchables/peer_test.go | 133 +++++++++++++++
.../query/commits/branchables/simple_test.go | 161 ++++++++++++++++++
.../query/commits/branchables/update_test.go | 116 +++++++++++++
.../query/commits/with_null_input_test.go | 12 +-
.../integration/query/simple/with_cid_test.go | 50 ++++++
tests/integration/results.go | 1 +
49 files changed, 1718 insertions(+), 92 deletions(-)
create mode 100644 docs/data_format_changes/i3038-branchable-collections.md
create mode 100644 internal/core/crdt/collection.go
create mode 100644 internal/keys/headstore.go
create mode 100644 internal/keys/headstore_collection.go
create mode 100644 internal/merkle/crdt/collection.go
create mode 100644 tests/integration/collection_description/branchable_test.go
create mode 100644 tests/integration/collection_description/updates/replace/branchable_test.go
create mode 100644 tests/integration/query/commits/branchables/cid_doc_id_test.go
create mode 100644 tests/integration/query/commits/branchables/cid_test.go
create mode 100644 tests/integration/query/commits/branchables/create_test.go
create mode 100644 tests/integration/query/commits/branchables/delete_test.go
create mode 100644 tests/integration/query/commits/branchables/field_id_test.go
create mode 100644 tests/integration/query/commits/branchables/if_test.go
create mode 100644 tests/integration/query/commits/branchables/peer_test.go
create mode 100644 tests/integration/query/commits/branchables/simple_test.go
create mode 100644 tests/integration/query/commits/branchables/update_test.go
diff --git a/client/collection_description.go b/client/collection_description.go
index d86a252644..236cf1de6e 100644
--- a/client/collection_description.go
+++ b/client/collection_description.go
@@ -88,6 +88,21 @@ type CollectionDescription struct {
// At the moment this can only be set to `false` if this collection sources its data from
// another collection/query (is a View).
IsMaterialized bool
+
+ // IsBranchable defines whether the history of this collection is tracked as a single,
+ // verifiable entity.
+ //
+ // If set to `true` any change to the contents of this set will be linked to a collection
+ // level commit via the document(s) composite commit.
+ //
+ // This enables multiple nodes to verify that they have the same state/history.
+ //
+ // The history may be queried like a document history can be queried, for example via 'commits'
+ // GQL queries.
+ //
+ // Currently this property is immutable and can only be set on collection creation, however
+ // that will change in the future.
+ IsBranchable bool
}
// QuerySource represents a collection data source from a query.
@@ -189,6 +204,7 @@ type collectionDescription struct {
RootID uint32
SchemaVersionID string
IsMaterialized bool
+ IsBranchable bool
Policy immutable.Option[PolicyDescription]
Indexes []IndexDescription
Fields []CollectionFieldDescription
@@ -209,6 +225,7 @@ func (c *CollectionDescription) UnmarshalJSON(bytes []byte) error {
c.RootID = descMap.RootID
c.SchemaVersionID = descMap.SchemaVersionID
c.IsMaterialized = descMap.IsMaterialized
+ c.IsBranchable = descMap.IsBranchable
c.Indexes = descMap.Indexes
c.Fields = descMap.Fields
c.Sources = make([]any, len(descMap.Sources))
diff --git a/docs/data_format_changes/i3038-branchable-collections.md b/docs/data_format_changes/i3038-branchable-collections.md
new file mode 100644
index 0000000000..0dff26c3d3
--- /dev/null
+++ b/docs/data_format_changes/i3038-branchable-collections.md
@@ -0,0 +1,3 @@
+# Add support for branchable collections
+
+The existing keys in the headstore gained a '/d' prefix in order to accommodate new types of keys within the headstore.
diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json
index a6795d6959..03c6a6513d 100644
--- a/docs/website/references/http/openapi.json
+++ b/docs/website/references/http/openapi.json
@@ -195,6 +195,9 @@
},
"type": "array"
},
+ "IsBranchable": {
+ "type": "boolean"
+ },
"IsMaterialized": {
"type": "boolean"
},
@@ -276,6 +279,9 @@
},
"type": "array"
},
+ "IsBranchable": {
+ "type": "boolean"
+ },
"IsMaterialized": {
"type": "boolean"
},
diff --git a/internal/core/block/block.go b/internal/core/block/block.go
index e930816030..3b9b4815be 100644
--- a/internal/core/block/block.go
+++ b/internal/core/block/block.go
@@ -44,6 +44,7 @@ func init() {
&crdt.LWWRegDelta{},
&crdt.CompositeDAGDelta{},
&crdt.CounterDelta{},
+ &crdt.CollectionDelta{},
)
EncryptionSchema, EncryptionSchemaPrototype = mustSetSchema(
diff --git a/internal/core/crdt/collection.go b/internal/core/crdt/collection.go
new file mode 100644
index 0000000000..4cf7ebf62d
--- /dev/null
+++ b/internal/core/crdt/collection.go
@@ -0,0 +1,75 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package crdt
+
+import (
+ "context"
+
+ "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
+)
+
+// Collection is a simple CRDT type that tracks changes to the contents of a
+// collection in a similar way to a document composite commit, only simpler,
+// without the need to track status and a simpler [Merge] function.
+type Collection struct {
+ // schemaVersionKey is the schema version datastore key at the time of commit.
+ //
+ // It can be used to identify the collection datastructure state at the time of commit.
+ schemaVersionKey keys.CollectionSchemaVersionKey
+}
+
+var _ core.ReplicatedData = (*Collection)(nil)
+
+func NewCollection(schemaVersionKey keys.CollectionSchemaVersionKey) *Collection {
+ return &Collection{
+ schemaVersionKey: schemaVersionKey,
+ }
+}
+
+func (c *Collection) Merge(ctx context.Context, other core.Delta) error {
+ // Collection merges don't actually need to do anything, as the delta is empty,
+ // and doc-level merges are handled by the document commits.
+ return nil
+}
+
+func (c *Collection) NewDelta() *CollectionDelta {
+ return &CollectionDelta{
+ SchemaVersionID: c.schemaVersionKey.SchemaVersionID,
+ }
+}
+
+type CollectionDelta struct {
+ Priority uint64
+
+ // As we do not yet have a global collection id we temporarily rely on the schema
+ // version id for tracking which collection this belongs to. See:
+ // https://github.com/sourcenetwork/defradb/issues/3215
+ SchemaVersionID string
+}
+
+var _ core.Delta = (*CollectionDelta)(nil)
+
+func (delta *CollectionDelta) IPLDSchemaBytes() []byte {
+ return []byte(`
+ type CollectionDelta struct {
+ priority Int
+ schemaVersionID String
+ }`)
+}
+
+func (d *CollectionDelta) GetPriority() uint64 {
+ return d.Priority
+}
+
+func (d *CollectionDelta) SetPriority(priority uint64) {
+ d.Priority = priority
+}
diff --git a/internal/core/crdt/composite.go b/internal/core/crdt/composite.go
index 510d47d7e4..7fed96873c 100644
--- a/internal/core/crdt/composite.go
+++ b/internal/core/crdt/composite.go
@@ -101,7 +101,7 @@ func NewCompositeDAG(
}
// Set returns a new composite DAG delta CRDT with the given status.
-func (c CompositeDAG) Set(status client.DocumentStatus) *CompositeDAGDelta {
+func (c CompositeDAG) NewDelta(status client.DocumentStatus) *CompositeDAGDelta {
return &CompositeDAGDelta{
DocID: []byte(c.key.DocID),
SchemaVersionID: c.schemaVersionKey.SchemaVersionID,
diff --git a/internal/core/crdt/ipld_union.go b/internal/core/crdt/ipld_union.go
index 28c9ccf420..8187b626a9 100644
--- a/internal/core/crdt/ipld_union.go
+++ b/internal/core/crdt/ipld_union.go
@@ -17,6 +17,7 @@ type CRDT struct {
LWWRegDelta *LWWRegDelta
CompositeDAGDelta *CompositeDAGDelta
CounterDelta *CounterDelta
+ CollectionDelta *CollectionDelta
}
// NewCRDT returns a new CRDT.
@@ -28,6 +29,8 @@ func NewCRDT(delta core.Delta) CRDT {
return CRDT{CompositeDAGDelta: d}
case *CounterDelta:
return CRDT{CounterDelta: d}
+ case *CollectionDelta:
+ return CRDT{CollectionDelta: d}
}
return CRDT{}
}
@@ -41,6 +44,7 @@ func (c CRDT) IPLDSchemaBytes() []byte {
| LWWRegDelta "lww"
| CompositeDAGDelta "composite"
| CounterDelta "counter"
+ | CollectionDelta "collection"
} representation keyed`)
}
@@ -53,6 +57,8 @@ func (c CRDT) GetDelta() core.Delta {
return c.CompositeDAGDelta
case c.CounterDelta != nil:
return c.CounterDelta
+ case c.CollectionDelta != nil:
+ return c.CollectionDelta
}
return nil
}
@@ -66,6 +72,8 @@ func (c CRDT) GetPriority() uint64 {
return c.CompositeDAGDelta.GetPriority()
case c.CounterDelta != nil:
return c.CounterDelta.GetPriority()
+ case c.CollectionDelta != nil:
+ return c.CollectionDelta.GetPriority()
}
return 0
}
@@ -90,6 +98,8 @@ func (c CRDT) GetDocID() []byte {
return c.CompositeDAGDelta.DocID
case c.CounterDelta != nil:
return c.CounterDelta.DocID
+ case c.CollectionDelta != nil:
+ return nil
}
return nil
}
@@ -103,6 +113,8 @@ func (c CRDT) GetSchemaVersionID() string {
return c.CompositeDAGDelta.SchemaVersionID
case c.CounterDelta != nil:
return c.CounterDelta.SchemaVersionID
+ case c.CollectionDelta != nil:
+ return c.CollectionDelta.SchemaVersionID
}
return ""
}
@@ -135,6 +147,11 @@ func (c CRDT) Clone() CRDT {
Nonce: c.CounterDelta.Nonce,
Data: c.CounterDelta.Data,
}
+ case c.CollectionDelta != nil:
+ cloned.CollectionDelta = &CollectionDelta{
+ Priority: c.CollectionDelta.Priority,
+ SchemaVersionID: c.CollectionDelta.SchemaVersionID,
+ }
}
return cloned
}
@@ -172,3 +189,8 @@ func (c CRDT) SetData(data []byte) {
func (c CRDT) IsComposite() bool {
return c.CompositeDAGDelta != nil
}
+
+// IsCollection returns true if the CRDT is a collection CRDT.
+func (c CRDT) IsCollection() bool {
+ return c.CollectionDelta != nil
+}
diff --git a/internal/db/collection.go b/internal/db/collection.go
index 8d71c7aff6..1e143e56cd 100644
--- a/internal/db/collection.go
+++ b/internal/db/collection.go
@@ -711,6 +711,29 @@ func (c *collection) save(
doc.SetHead(link.Cid)
})
+ if c.def.Description.IsBranchable {
+ collectionCRDT := merklecrdt.NewMerkleCollection(
+ txn,
+ keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
+ keys.NewHeadstoreColKey(c.def.Description.RootID),
+ )
+
+ link, headNode, err := collectionCRDT.Save(ctx, []coreblock.DAGLink{{Link: link}})
+ if err != nil {
+ return err
+ }
+
+ updateEvent := event.Update{
+ Cid: link.Cid,
+ SchemaRoot: c.Schema().Root,
+ Block: headNode,
+ }
+
+ txn.OnSuccess(func() {
+ c.db.events.Publish(event.NewMessage(event.UpdateName, updateEvent))
+ })
+ }
+
return nil
}
diff --git a/internal/db/collection_delete.go b/internal/db/collection_delete.go
index b0bf933dda..a3963be2f4 100644
--- a/internal/db/collection_delete.go
+++ b/internal/db/collection_delete.go
@@ -17,6 +17,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/event"
"github.com/sourcenetwork/defradb/internal/core"
+ coreblock "github.com/sourcenetwork/defradb/internal/core/block"
"github.com/sourcenetwork/defradb/internal/keys"
merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
)
@@ -162,5 +163,28 @@ func (c *collection) applyDelete(
c.db.events.Publish(event.NewMessage(event.UpdateName, updateEvent))
})
+ if c.def.Description.IsBranchable {
+ collectionCRDT := merklecrdt.NewMerkleCollection(
+ txn,
+ keys.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()),
+ keys.NewHeadstoreColKey(c.def.Description.RootID),
+ )
+
+ link, headNode, err := collectionCRDT.Save(ctx, []coreblock.DAGLink{{Link: link}})
+ if err != nil {
+ return err
+ }
+
+ updateEvent := event.Update{
+ Cid: link.Cid,
+ SchemaRoot: c.Schema().Root,
+ Block: headNode,
+ }
+
+ txn.OnSuccess(func() {
+ c.db.events.Publish(event.NewMessage(event.UpdateName, updateEvent))
+ })
+ }
+
return nil
}
diff --git a/internal/db/definition_validation.go b/internal/db/definition_validation.go
index 7afef5dd13..1613340e00 100644
--- a/internal/db/definition_validation.go
+++ b/internal/db/definition_validation.go
@@ -146,6 +146,7 @@ var collectionUpdateValidators = append(
validateIDExists,
validateSchemaVersionIDNotMutated,
validateCollectionNotRemoved,
+ validateCollectionIsBranchableNotMutated,
),
globalValidators...,
)
@@ -1036,3 +1037,23 @@ func validateCollectionFieldDefaultValue(
return nil
}
+
+// validateCollectionIsBranchableNotMutated is a temporary restriction that prevents users from toggling
+// whether or not a collection is branchable.
+// https://github.com/sourcenetwork/defradb/issues/3219
+func validateCollectionIsBranchableNotMutated(
+ ctx context.Context,
+ db *db,
+ newState *definitionState,
+ oldState *definitionState,
+) error {
+ for _, newCol := range newState.collections {
+ oldCol := oldState.collectionsByID[newCol.ID]
+
+ if newCol.IsBranchable != oldCol.IsBranchable {
+ return NewErrColMutatingIsBranchable(newCol.Name.Value())
+ }
+ }
+
+ return nil
+}
diff --git a/internal/db/errors.go b/internal/db/errors.go
index 1bc200f2b4..6eb1d3a2d5 100644
--- a/internal/db/errors.go
+++ b/internal/db/errors.go
@@ -104,6 +104,7 @@ const (
errFailedToHandleEncKeysReceivedEvent string = "failed to handle encryption-keys-received event"
errSelfReferenceWithoutSelf string = "must specify 'Self' kind for self referencing relations"
errColNotMaterialized string = "non-materialized collections are not supported"
+ errColMutatingIsBranchable string = "mutating IsBranchable is not supported"
errMaterializedViewAndACPNotSupported string = "materialized views do not support ACP"
errInvalidDefaultFieldValue string = "default field value is invalid"
errDocIDNotFound string = "docID not found"
@@ -156,6 +157,7 @@ var (
ErrTimeoutDocRetry = errors.New("timeout while retrying doc")
ErrDocIDNotFound = errors.New(errDocIDNotFound)
ErrorCollectionWithSchemaRootNotFound = errors.New(errCollectionWithSchemaRootNotFound)
+ ErrColMutatingIsBranchable = errors.New(errColMutatingIsBranchable)
)
// NewErrFailedToGetHeads returns a new error indicating that the heads of a document
@@ -680,6 +682,13 @@ func NewErrColNotMaterialized(collection string) error {
)
}
+func NewErrColMutatingIsBranchable(collection string) error {
+ return errors.New(
+ errColMutatingIsBranchable,
+ errors.NewKV("Collection", collection),
+ )
+}
+
func NewErrMaterializedViewAndACPNotSupported(collection string) error {
return errors.New(
errMaterializedViewAndACPNotSupported,
diff --git a/internal/db/fetcher/dag.go b/internal/db/fetcher/dag.go
index 723b821a97..2724425ff6 100644
--- a/internal/db/fetcher/dag.go
+++ b/internal/db/fetcher/dag.go
@@ -28,16 +28,27 @@ type HeadFetcher struct {
kvIter dsq.Results
}
+// Start starts/initializes the fetcher, performing all the work it can do outside
+// of the main iteration loop/funcs.
+//
+// prefix - Optional. The headstore prefix to scan across. If None, the entire
+// headstore will be scanned - for example, in order to fetch document and collection
+// heads.
func (hf *HeadFetcher) Start(
ctx context.Context,
txn datastore.Txn,
- prefix keys.HeadStoreKey,
+ prefix immutable.Option[keys.HeadstoreKey],
fieldId immutable.Option[string],
) error {
hf.fieldId = fieldId
+ var prefixString string
+ if prefix.HasValue() {
+ prefixString = prefix.Value().ToString()
+ }
+
q := dsq.Query{
- Prefix: prefix.ToString(),
+ Prefix: prefixString,
Orders: []dsq.Order{dsq.OrderByKey{}},
}
@@ -64,17 +75,32 @@ func (hf *HeadFetcher) FetchNext() (*cid.Cid, error) {
return nil, nil
}
- headStoreKey, err := keys.NewHeadStoreKey(res.Key)
+ headStoreKey, err := keys.NewHeadstoreKey(res.Key)
if err != nil {
return nil, err
}
- if hf.fieldId.HasValue() && hf.fieldId.Value() != headStoreKey.FieldID {
- // FieldIds do not match, continue to next row
- return hf.FetchNext()
+ if hf.fieldId.HasValue() {
+ switch typedHeadStoreKey := headStoreKey.(type) {
+ case keys.HeadstoreDocKey:
+ if hf.fieldId.Value() != typedHeadStoreKey.FieldID {
+ // FieldIds do not match, continue to next row
+ return hf.FetchNext()
+ }
+
+ return &typedHeadStoreKey.Cid, nil
+
+ case keys.HeadstoreColKey:
+ if hf.fieldId.Value() == "" {
+ return &typedHeadStoreKey.Cid, nil
+ } else {
+ return nil, nil
+ }
+ }
}
- return &headStoreKey.Cid, nil
+ cid := headStoreKey.GetCid()
+ return &cid, nil
}
func (hf *HeadFetcher) Close() error {
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index baa3acfcfb..24f3ab8467 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -163,7 +163,7 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error
// VersionedFetcher only ever recieves a headstore key
//nolint:forcetypeassert
- prefix := spans[0].Start.(keys.HeadStoreKey)
+ prefix := spans[0].Start.(keys.HeadstoreDocKey)
dk := prefix.DocID
cid := prefix.Cid
if dk == "" {
diff --git a/internal/db/iterator.go b/internal/db/iterator.go
index 00519d1915..171e4f80bb 100644
--- a/internal/db/iterator.go
+++ b/internal/db/iterator.go
@@ -40,7 +40,7 @@ func NewHeadBlocksIterator(
blockstore datastore.Blockstore,
docID string,
) (*DocHeadBlocksIterator, error) {
- headStoreKey := keys.HeadStoreKey{
+ headStoreKey := keys.HeadstoreDocKey{
DocID: docID,
FieldID: core.COMPOSITE_NAMESPACE,
}
diff --git a/internal/db/merge.go b/internal/db/merge.go
index 898700a9ed..47db8740b1 100644
--- a/internal/db/merge.go
+++ b/internal/db/merge.go
@@ -418,7 +418,7 @@ func decryptBlock(
) (*coreblock.Block, error) {
_, encryptor := encryption.EnsureContextWithEncryptor(ctx)
- if block.Delta.IsComposite() {
+ if block.Delta.IsComposite() || block.Delta.IsCollection() {
// for composite blocks there is nothing to decrypt
return block, nil
}
diff --git a/internal/keys/datastore_doc.go b/internal/keys/datastore_doc.go
index cffa99f6fc..cf3cb3ea59 100644
--- a/internal/keys/datastore_doc.go
+++ b/internal/keys/datastore_doc.go
@@ -112,8 +112,8 @@ func (k DataStoreKey) WithFieldID(fieldID string) DataStoreKey {
return newKey
}
-func (k DataStoreKey) ToHeadStoreKey() HeadStoreKey {
- return HeadStoreKey{
+func (k DataStoreKey) ToHeadStoreKey() HeadstoreDocKey {
+ return HeadstoreDocKey{
DocID: k.DocID,
FieldID: k.FieldID,
}
diff --git a/internal/keys/headstore.go b/internal/keys/headstore.go
new file mode 100644
index 0000000000..ca0b0c3621
--- /dev/null
+++ b/internal/keys/headstore.go
@@ -0,0 +1,47 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strings"
+
+ "github.com/ipfs/go-cid"
+)
+
+const (
+ HEADSTORE_DOC = "/d"
+ HEADSTORE_COL = "/c"
+)
+
+// HeadstoreKey represents any key that may be stored in the headstore.
+type HeadstoreKey interface {
+ Walkable
+
+ // GetCid returns the cid that forms part of this key.
+ GetCid() cid.Cid
+
+ // WithCid returns a new HeadstoreKey with the same values as the original,
+ // apart from the cid which will have been replaced by the given value.
+ WithCid(c cid.Cid) HeadstoreKey
+}
+
+// NewHeadstoreKey returns the typed representation of the given key string, or
+// an [ErrInvalidKey] error if it's type could not be determined.
+func NewHeadstoreKey(key string) (HeadstoreKey, error) {
+ switch {
+ case strings.HasPrefix(key, HEADSTORE_DOC):
+ return NewHeadstoreDocKey(key)
+ case strings.HasPrefix(key, HEADSTORE_COL):
+ return NewHeadstoreColKeyFromString(key)
+ default:
+ return nil, ErrInvalidKey
+ }
+}
diff --git a/internal/keys/headstore_collection.go b/internal/keys/headstore_collection.go
new file mode 100644
index 0000000000..582badfad3
--- /dev/null
+++ b/internal/keys/headstore_collection.go
@@ -0,0 +1,109 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package keys
+
+import (
+ "strconv"
+ "strings"
+
+ "github.com/ipfs/go-cid"
+ ds "github.com/ipfs/go-datastore"
+)
+
+// HeadstoreColKey are used to store the current collection head in the headstore.
+type HeadstoreColKey struct {
+ // CollectionRoot is the root of the collection that this head refers to.
+ //
+ // Including it in the key allows easier identification of a given collection's
+ // head.
+ CollectionRoot uint32
+
+ // Cid is the cid of this head block.
+ Cid cid.Cid
+}
+
+var _ HeadstoreKey = (*HeadstoreColKey)(nil)
+
+func NewHeadstoreColKey(colRoot uint32) HeadstoreColKey {
+ return HeadstoreColKey{
+ CollectionRoot: colRoot,
+ }
+}
+
+func NewHeadstoreColKeyFromString(key string) (HeadstoreColKey, error) {
+ elements := strings.Split(key, "/")
+ if len(elements) != 4 {
+ return HeadstoreColKey{}, ErrInvalidKey
+ }
+
+ root, err := strconv.Atoi(elements[2])
+ if err != nil {
+ return HeadstoreColKey{}, err
+ }
+
+ cid, err := cid.Decode(elements[3])
+ if err != nil {
+ return HeadstoreColKey{}, err
+ }
+
+ return HeadstoreColKey{
+ // elements[0] is empty (key has leading '/')
+ CollectionRoot: uint32(root),
+ Cid: cid,
+ }, nil
+}
+
+func (k HeadstoreColKey) WithCid(c cid.Cid) HeadstoreKey {
+ newKey := k
+ newKey.Cid = c
+ return newKey
+}
+
+func (k HeadstoreColKey) GetCid() cid.Cid {
+ return k.Cid
+}
+
+func (k HeadstoreColKey) ToString() string {
+ result := HEADSTORE_COL
+
+ if k.CollectionRoot != 0 {
+ result = result + "/" + strconv.Itoa(int(k.CollectionRoot))
+ }
+ if k.Cid.Defined() {
+ result = result + "/" + k.Cid.String()
+ }
+
+ return result
+}
+
+func (k HeadstoreColKey) Bytes() []byte {
+ return []byte(k.ToString())
+}
+
+func (k HeadstoreColKey) ToDS() ds.Key {
+ return ds.NewKey(k.ToString())
+}
+
+func (k HeadstoreColKey) PrefixEnd() Walkable {
+ newKey := k
+
+ if k.Cid.Defined() {
+ newKey.Cid = cid.MustParse(bytesPrefixEnd(k.Cid.Bytes()))
+ return newKey
+ }
+
+ if k.CollectionRoot != 0 {
+ newKey.CollectionRoot = k.CollectionRoot + 1
+ return newKey
+ }
+
+ return newKey
+}
diff --git a/internal/keys/headstore_doc.go b/internal/keys/headstore_doc.go
index 55809ab236..612381fb6f 100644
--- a/internal/keys/headstore_doc.go
+++ b/internal/keys/headstore_doc.go
@@ -17,60 +17,64 @@ import (
ds "github.com/ipfs/go-datastore"
)
-type HeadStoreKey struct {
+type HeadstoreDocKey struct {
DocID string
FieldID string //can be 'C'
Cid cid.Cid
}
-var _ Walkable = (*HeadStoreKey)(nil)
+var _ HeadstoreKey = (*HeadstoreDocKey)(nil)
-// Creates a new HeadStoreKey from a string as best as it can,
+// Creates a new HeadstoreDocKey from a string as best as it can,
// splitting the input using '/' as a field deliminator. It assumes
// that the input string is in the following format:
//
-// /[DocID]/[FieldId]/[Cid]
+// /d/[DocID]/[FieldId]/[Cid]
//
// Any properties before the above are ignored
-func NewHeadStoreKey(key string) (HeadStoreKey, error) {
+func NewHeadstoreDocKey(key string) (HeadstoreDocKey, error) {
elements := strings.Split(key, "/")
- if len(elements) != 4 {
- return HeadStoreKey{}, ErrInvalidKey
+ if len(elements) != 5 {
+ return HeadstoreDocKey{}, ErrInvalidKey
}
- cid, err := cid.Decode(elements[3])
+ cid, err := cid.Decode(elements[4])
if err != nil {
- return HeadStoreKey{}, err
+ return HeadstoreDocKey{}, err
}
- return HeadStoreKey{
+ return HeadstoreDocKey{
// elements[0] is empty (key has leading '/')
- DocID: elements[1],
- FieldID: elements[2],
+ DocID: elements[2],
+ FieldID: elements[3],
Cid: cid,
}, nil
}
-func (k HeadStoreKey) WithDocID(docID string) HeadStoreKey {
+func (k HeadstoreDocKey) WithDocID(docID string) HeadstoreDocKey {
newKey := k
newKey.DocID = docID
return newKey
}
-func (k HeadStoreKey) WithCid(c cid.Cid) HeadStoreKey {
+func (k HeadstoreDocKey) WithCid(c cid.Cid) HeadstoreKey {
newKey := k
newKey.Cid = c
return newKey
}
-func (k HeadStoreKey) WithFieldID(fieldID string) HeadStoreKey {
+func (k HeadstoreDocKey) GetCid() cid.Cid {
+ return k.Cid
+}
+
+func (k HeadstoreDocKey) WithFieldID(fieldID string) HeadstoreDocKey {
newKey := k
newKey.FieldID = fieldID
return newKey
}
-func (k HeadStoreKey) ToString() string {
- var result string
+func (k HeadstoreDocKey) ToString() string {
+ result := HEADSTORE_DOC
if k.DocID != "" {
result = result + "/" + k.DocID
@@ -85,15 +89,15 @@ func (k HeadStoreKey) ToString() string {
return result
}
-func (k HeadStoreKey) Bytes() []byte {
+func (k HeadstoreDocKey) Bytes() []byte {
return []byte(k.ToString())
}
-func (k HeadStoreKey) ToDS() ds.Key {
+func (k HeadstoreDocKey) ToDS() ds.Key {
return ds.NewKey(k.ToString())
}
-func (k HeadStoreKey) PrefixEnd() Walkable {
+func (k HeadstoreDocKey) PrefixEnd() Walkable {
newKey := k
if k.FieldID != "" {
diff --git a/internal/merkle/clock/clock.go b/internal/merkle/clock/clock.go
index 94180f2144..15a07acca9 100644
--- a/internal/merkle/clock/clock.go
+++ b/internal/merkle/clock/clock.go
@@ -48,7 +48,7 @@ func NewMerkleClock(
headstore datastore.DSReaderWriter,
blockstore datastore.Blockstore,
encstore datastore.Blockstore,
- namespace keys.HeadStoreKey,
+ namespace keys.HeadstoreKey,
crdt core.ReplicatedData,
) *MerkleClock {
return &MerkleClock{
@@ -207,7 +207,7 @@ func encryptBlock(
block *coreblock.Block,
encBlock *coreblock.Encryption,
) (*coreblock.Block, error) {
- if block.Delta.IsComposite() {
+ if block.Delta.IsComposite() || block.Delta.IsCollection() {
return block, nil
}
diff --git a/internal/merkle/clock/clock_test.go b/internal/merkle/clock/clock_test.go
index c0f169c0a5..0a5aa34454 100644
--- a/internal/merkle/clock/clock_test.go
+++ b/internal/merkle/clock/clock_test.go
@@ -38,7 +38,7 @@ func newTestMerkleClock() *MerkleClock {
multistore.Headstore(),
multistore.Blockstore(),
multistore.Encstore(),
- keys.HeadStoreKey{DocID: request.DocIDArgName, FieldID: "1"},
+ keys.HeadstoreDocKey{DocID: request.DocIDArgName, FieldID: "1"},
reg,
)
}
@@ -47,7 +47,7 @@ func TestNewMerkleClock(t *testing.T) {
s := newDS()
multistore := datastore.MultiStoreFrom(s)
reg := crdt.NewLWWRegister(multistore.Rootstore(), keys.CollectionSchemaVersionKey{}, keys.DataStoreKey{}, "")
- clk := NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), keys.HeadStoreKey{}, reg)
+ clk := NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), keys.HeadstoreDocKey{}, reg)
if clk.headstore != multistore.Headstore() {
t.Error("MerkleClock store not correctly set")
diff --git a/internal/merkle/clock/heads.go b/internal/merkle/clock/heads.go
index 0dcf2a8f99..873ba64503 100644
--- a/internal/merkle/clock/heads.go
+++ b/internal/merkle/clock/heads.go
@@ -27,17 +27,17 @@ import (
// heads manages the current Merkle-CRDT heads.
type heads struct {
store datastore.DSReaderWriter
- namespace keys.HeadStoreKey
+ namespace keys.HeadstoreKey
}
-func NewHeadSet(store datastore.DSReaderWriter, namespace keys.HeadStoreKey) *heads {
+func NewHeadSet(store datastore.DSReaderWriter, namespace keys.HeadstoreKey) *heads {
return &heads{
store: store,
namespace: namespace,
}
}
-func (hh *heads) key(c cid.Cid) keys.HeadStoreKey {
+func (hh *heads) key(c cid.Cid) keys.HeadstoreKey {
return hh.namespace.WithCid(c)
}
@@ -102,7 +102,7 @@ func (hh *heads) List(ctx context.Context) ([]cid.Cid, uint64, error) {
return nil, 0, NewErrFailedToGetNextQResult(r.Error)
}
- headKey, err := keys.NewHeadStoreKey(r.Key)
+ headKey, err := keys.NewHeadstoreKey(r.Key)
if err != nil {
return nil, 0, err
}
@@ -111,7 +111,7 @@ func (hh *heads) List(ctx context.Context) ([]cid.Cid, uint64, error) {
if n <= 0 {
return nil, 0, ErrDecodingHeight
}
- heads = append(heads, headKey.Cid)
+ heads = append(heads, headKey.GetCid())
if height > maxHeight {
maxHeight = height
}
diff --git a/internal/merkle/clock/heads_test.go b/internal/merkle/clock/heads_test.go
index cb8e1d1014..cbf153874c 100644
--- a/internal/merkle/clock/heads_test.go
+++ b/internal/merkle/clock/heads_test.go
@@ -45,7 +45,7 @@ func newHeadSet() *heads {
return NewHeadSet(
datastore.AsDSReaderWriter(s),
- keys.HeadStoreKey{}.WithDocID("myDocID").WithFieldID("1"),
+ keys.HeadstoreDocKey{}.WithDocID("myDocID").WithFieldID("1"),
)
}
diff --git a/internal/merkle/crdt/collection.go b/internal/merkle/crdt/collection.go
new file mode 100644
index 0000000000..976135ca5c
--- /dev/null
+++ b/internal/merkle/crdt/collection.go
@@ -0,0 +1,53 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package merklecrdt
+
+import (
+ "context"
+
+ cidlink "github.com/ipld/go-ipld-prime/linking/cid"
+
+ coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/core/crdt"
+ "github.com/sourcenetwork/defradb/internal/keys"
+ "github.com/sourcenetwork/defradb/internal/merkle/clock"
+)
+
+type MerkleCollection struct {
+ clock *clock.MerkleClock
+ reg *crdt.Collection
+}
+
+var _ MerkleCRDT = (*MerkleCollection)(nil)
+
+func NewMerkleCollection(
+ store Stores,
+ schemaVersionKey keys.CollectionSchemaVersionKey,
+ key keys.HeadstoreColKey,
+) *MerkleCollection {
+ register := crdt.NewCollection(schemaVersionKey)
+
+ clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), key, register)
+
+ return &MerkleCollection{
+ clock: clk,
+ reg: register,
+ }
+}
+
+func (m *MerkleCollection) Clock() *clock.MerkleClock {
+ return m.clock
+}
+
+func (m *MerkleCollection) Save(ctx context.Context, links []coreblock.DAGLink) (cidlink.Link, []byte, error) {
+ delta := m.reg.NewDelta()
+ return m.clock.AddDelta(ctx, delta, links...)
+}
diff --git a/internal/merkle/crdt/composite.go b/internal/merkle/crdt/composite.go
index 862541bf8f..c6224c3862 100644
--- a/internal/merkle/crdt/composite.go
+++ b/internal/merkle/crdt/composite.go
@@ -61,12 +61,12 @@ func (m *MerkleCompositeDAG) Clock() *clock.MerkleClock {
func (m *MerkleCompositeDAG) Delete(
ctx context.Context,
) (cidlink.Link, []byte, error) {
- delta := m.reg.Set(client.Deleted)
+ delta := m.reg.NewDelta(client.Deleted)
return m.clock.AddDelta(ctx, delta)
}
// Save the value of the composite CRDT to DAG.
func (m *MerkleCompositeDAG) Save(ctx context.Context, links []coreblock.DAGLink) (cidlink.Link, []byte, error) {
- delta := m.reg.Set(client.Active)
+ delta := m.reg.NewDelta(client.Active)
return m.clock.AddDelta(ctx, delta, links...)
}
diff --git a/internal/planner/commit.go b/internal/planner/commit.go
index ceecfc46cd..c73944b250 100644
--- a/internal/planner/commit.go
+++ b/internal/planner/commit.go
@@ -36,7 +36,7 @@ type dagScanNode struct {
queuedCids []*cid.Cid
fetcher fetcher.HeadFetcher
- prefix keys.HeadStoreKey
+ prefix immutable.Option[keys.HeadstoreKey]
commitSelect *mapper.CommitSelect
execInfo dagScanExecInfo
@@ -67,17 +67,20 @@ func (n *dagScanNode) Kind() string {
}
func (n *dagScanNode) Init() error {
- undefined := keys.HeadStoreKey{}
- if n.prefix == undefined {
+ if !n.prefix.HasValue() {
if n.commitSelect.DocID.HasValue() {
- key := keys.HeadStoreKey{}.WithDocID(n.commitSelect.DocID.Value())
+ key := keys.HeadstoreDocKey{}.WithDocID(n.commitSelect.DocID.Value())
if n.commitSelect.FieldID.HasValue() {
field := n.commitSelect.FieldID.Value()
key = key.WithFieldID(field)
}
- n.prefix = key
+ n.prefix = immutable.Some[keys.HeadstoreKey](key)
+ } else if n.commitSelect.FieldID.HasValue() && n.commitSelect.FieldID.Value() == "" {
+ // If the user has provided an explicit nil value as `FieldID`, then we are only
+ // returning collection commits.
+ n.prefix = immutable.Some[keys.HeadstoreKey](keys.HeadstoreColKey{})
}
}
@@ -106,15 +109,15 @@ func (n *dagScanNode) Spans(spans []core.Span) {
}
for _, span := range spans {
- var start keys.HeadStoreKey
+ var start keys.HeadstoreDocKey
switch s := span.Start.(type) {
case keys.DataStoreKey:
start = s.ToHeadStoreKey()
- case keys.HeadStoreKey:
+ case keys.HeadstoreDocKey:
start = s
}
- n.prefix = start.WithFieldID(fieldID)
+ n.prefix = immutable.Some[keys.HeadstoreKey](start.WithFieldID(fieldID))
return
}
}
@@ -144,14 +147,13 @@ func (n *dagScanNode) simpleExplain() (map[string]any, error) {
// Build the explanation of the spans attribute.
spansExplainer := []map[string]any{}
- undefinedHsKey := keys.HeadStoreKey{}
// Note: n.headset is `nil` for single commit selection query, so must check for it.
- if n.prefix != undefinedHsKey {
+ if n.prefix.HasValue() {
spansExplainer = append(
spansExplainer,
map[string]any{
- "start": n.prefix.ToString(),
- "end": n.prefix.PrefixEnd().ToString(),
+ "start": n.prefix.Value().ToString(),
+ "end": n.prefix.Value().PrefixEnd().ToString(),
},
)
}
@@ -305,11 +307,13 @@ func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, error
n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.SchemaVersionIDFieldName, schemaVersionId)
var fieldName any
-
- var fieldID string
+ var fieldID any
if block.Delta.CompositeDAGDelta != nil {
fieldID = core.COMPOSITE_NAMESPACE
fieldName = nil
+ } else if block.Delta.CollectionDelta != nil {
+ fieldID = nil
+ fieldName = nil
} else {
fName := block.Delta.GetFieldName()
fieldName = fName
@@ -348,9 +352,13 @@ func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, error
n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.FieldIDFieldName, fieldID)
docID := block.Delta.GetDocID()
-
- n.commitSelect.DocumentMapping.SetFirstOfName(&commit,
- request.DocIDArgName, string(docID))
+ if docID != nil {
+ n.commitSelect.DocumentMapping.SetFirstOfName(
+ &commit,
+ request.DocIDArgName,
+ string(docID),
+ )
+ }
cols, err := n.planner.db.GetCollections(
n.planner.ctx,
@@ -391,7 +399,9 @@ func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, error
for _, l := range block.Links {
link := linksMapping.NewDoc()
- linksMapping.SetFirstOfName(&link, request.LinksNameFieldName, l.Name)
+ if l.Name != "" {
+ linksMapping.SetFirstOfName(&link, request.LinksNameFieldName, l.Name)
+ }
linksMapping.SetFirstOfName(&link, request.LinksCidFieldName, l.Link.Cid.String())
links[i] = link
diff --git a/internal/planner/select.go b/internal/planner/select.go
index e5b53cd997..f1b3d05867 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -266,11 +266,11 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
origScan.Spans(
[]core.Span{
core.NewSpan(
- keys.HeadStoreKey{
+ keys.HeadstoreDocKey{
DocID: n.selectReq.DocIDs.Value()[0],
Cid: c,
},
- keys.HeadStoreKey{},
+ keys.HeadstoreDocKey{},
),
},
)
diff --git a/internal/request/graphql/parser/commit.go b/internal/request/graphql/parser/commit.go
index 22e5afe0b0..d571384a34 100644
--- a/internal/request/graphql/parser/commit.go
+++ b/internal/request/graphql/parser/commit.go
@@ -50,7 +50,9 @@ func parseCommitSelect(
}
case request.FieldIDName:
- if v, ok := value.(string); ok {
+ if value == nil {
+ commit.FieldID = immutable.Some("")
+ } else if v, ok := value.(string); ok {
commit.FieldID = immutable.Some(v)
}
diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go
index 7085ba5c97..bfe4e9954a 100644
--- a/internal/request/graphql/schema/collection.go
+++ b/internal/request/graphql/schema/collection.go
@@ -163,6 +163,7 @@ func collectionFromAstDefinition(
})
isMaterialized := immutable.None[bool]()
+ var isBranchable bool
for _, directive := range def.Directives {
switch directive.Name.Value {
case types.IndexDirectiveLabel:
@@ -197,6 +198,22 @@ func collectionFromAstDefinition(
} else {
isMaterialized = immutable.Some(true)
}
+
+ case types.BranchableDirectiveLabel:
+ if isBranchable {
+ continue
+ }
+
+ explicitIsBranchable := immutable.None[bool]()
+
+ for _, arg := range directive.Arguments {
+ if arg.Name.Value == types.BranchableDirectivePropIf {
+ explicitIsBranchable = immutable.Some(arg.Value.GetValue().(bool))
+ break
+ }
+ }
+
+ isBranchable = !explicitIsBranchable.HasValue() || explicitIsBranchable.Value()
}
}
@@ -207,6 +224,7 @@ func collectionFromAstDefinition(
Policy: policyDescription,
Fields: collectionFieldDescriptions,
IsMaterialized: !isMaterialized.HasValue() || isMaterialized.Value(),
+ IsBranchable: isBranchable,
},
Schema: client.SchemaDescription{
Name: def.Name.Value,
diff --git a/internal/request/graphql/schema/schema.go b/internal/request/graphql/schema/schema.go
index d18911c929..1168687441 100644
--- a/internal/request/graphql/schema/schema.go
+++ b/internal/request/graphql/schema/schema.go
@@ -105,6 +105,7 @@ func defaultDirectivesType(
types.PrimaryDirective(),
types.RelationDirective(),
types.MaterializedDirective(),
+ types.BranchableDirective(),
}
}
diff --git a/internal/request/graphql/schema/types/descriptions.go b/internal/request/graphql/schema/types/descriptions.go
index e442545995..a9932faeaf 100644
--- a/internal/request/graphql/schema/types/descriptions.go
+++ b/internal/request/graphql/schema/types/descriptions.go
@@ -36,7 +36,8 @@ An optional value that skips the given number of results that would have
Commit represents an individual commit to a MerkleCRDT, every mutation to a
document will result in a new commit per modified field, and one composite
commit composed of the field level commits and, in the case of an update,
- the prior composite commit.
+ the prior composite commit. If the collection is branchable, there will
+ also be a collection-level commit for each mutation.
`
commitDocIDArgDescription string = `
An optional docID parameter for this commit query. Only commits for a document
@@ -60,10 +61,10 @@ An optional value that specifies the maximum depth to which the commit DAG graph
commitLinksDescription string = `
Child commits in the DAG that contribute to the composition of this commit.
Composite commits will link to the field commits for the fields modified during
- the single mutation.
+ the single mutation. Collection commits will link to composites.
`
commitHeightFieldDescription string = `
-Height represents the location of the commit in the DAG. All commits (composite,
+Height represents the location of the commit in the DAG. All commits (collection, composite,
and field level) on create will have a height of '1', each subsequent local update
will increment this by one for the new commits.
`
@@ -82,12 +83,12 @@ The ID of the schema version that this commit was committed against. This ID all
to determine the state of the data model at the time of commit.
`
commitFieldNameFieldDescription string = `
-The name of the field that this commit was committed against. If this is a composite field
- the value will be null.
+The name of the field that this commit was committed against. If this is a composite
+ or a collection the value will be null.
`
commitFieldIDFieldDescription string = `
The id of the field that this commit was committed against. If this is a composite field
- the value will be "C".
+ the value will be "C". If it is a collection level commit it will be null.
`
commitDeltaFieldDescription string = `
The CBOR encoded representation of the value that is saved as part of this commit.
diff --git a/internal/request/graphql/schema/types/types.go b/internal/request/graphql/schema/types/types.go
index 5770b4b579..6bcaf894fe 100644
--- a/internal/request/graphql/schema/types/types.go
+++ b/internal/request/graphql/schema/types/types.go
@@ -54,6 +54,9 @@ const (
MaterializedDirectiveLabel = "materialized"
MaterializedDirectivePropIf = "if"
+ BranchableDirectiveLabel = "branchable"
+ BranchableDirectivePropIf = "if"
+
FieldOrderASC = "ASC"
FieldOrderDESC = "DESC"
)
@@ -237,6 +240,33 @@ func MaterializedDirective() *gql.Directive {
})
}
+func BranchableDirective() *gql.Directive {
+ return gql.NewDirective(gql.DirectiveConfig{
+ Name: BranchableDirectiveLabel,
+ // Todo: This description will need to be changed with:
+ // https://github.com/sourcenetwork/defradb/issues/3219
+ Description: `@branchable is a directive that defines whether the history of this collection is tracked
+ as a single, verifiable entity or not. It will default to false if ommited.
+
+ If multiple @branchable directives are provided, they will aggregated with OR logic (if any are true, the
+ collection history will be tracked).
+
+ The history may be queried like a document history can be queried, for example via 'commits'
+ GQL queries.
+
+ Currently this property is immutable and can only be set on collection creation, however
+ that will change in the future.`,
+ Args: gql.FieldConfigArgument{
+ BranchableDirectivePropIf: &gql.ArgumentConfig{
+ Type: gql.Boolean,
+ },
+ },
+ Locations: []string{
+ gql.DirectiveLocationObject,
+ },
+ })
+}
+
func CRDTEnum() *gql.Enum {
return gql.NewEnum(gql.EnumConfig{
Name: "CRDTType",
diff --git a/net/server_test.go b/net/server_test.go
index a2cda4c76b..a29952d2b8 100644
--- a/net/server_test.go
+++ b/net/server_test.go
@@ -86,7 +86,7 @@ func getHead(ctx context.Context, db client.DB, docID client.DocID) (cid.Cid, er
}
if len(entries) > 0 {
- hsKey, err := keys.NewHeadStoreKey(entries[0].Key)
+ hsKey, err := keys.NewHeadstoreDocKey(entries[0].Key)
if err != nil {
return cid.Undef, err
}
diff --git a/tests/integration/collection_description/branchable_test.go b/tests/integration/collection_description/branchable_test.go
new file mode 100644
index 0000000000..58baaacee1
--- /dev/null
+++ b/tests/integration/collection_description/branchable_test.go
@@ -0,0 +1,92 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package collection_description
+
+import (
+ "testing"
+
+ "github.com/sourcenetwork/immutable"
+
+ "github.com/sourcenetwork/defradb/client"
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestColDescr_Branchable(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {}
+ `,
+ },
+ testUtils.GetCollections{
+ ExpectedResults: []client.CollectionDescription{
+ {
+ ID: 1,
+ Name: immutable.Some("Users"),
+ IsMaterialized: true,
+ IsBranchable: true,
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestColDescr_BranchableIfTrue(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable(if: true) {}
+ `,
+ },
+ testUtils.GetCollections{
+ ExpectedResults: []client.CollectionDescription{
+ {
+ ID: 1,
+ Name: immutable.Some("Users"),
+ IsMaterialized: true,
+ IsBranchable: true,
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestColDescr_BranchableIfFalse(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable(if: false) {}
+ `,
+ },
+ testUtils.GetCollections{
+ ExpectedResults: []client.CollectionDescription{
+ {
+ ID: 1,
+ Name: immutable.Some("Users"),
+ IsMaterialized: true,
+ IsBranchable: false,
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/collection_description/updates/replace/branchable_test.go b/tests/integration/collection_description/updates/replace/branchable_test.go
new file mode 100644
index 0000000000..ad0233d51c
--- /dev/null
+++ b/tests/integration/collection_description/updates/replace/branchable_test.go
@@ -0,0 +1,65 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package replace
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestColDescrUpdateReplaceIsBranchable_UpdatingFromTrueToFalse_Errors(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @branchable {
+ name: String
+ }
+ `,
+ },
+ testUtils.PatchCollection{
+ Patch: `
+ [
+ { "op": "replace", "path": "/1/IsBranchable", "value": false }
+ ]
+ `,
+ ExpectedError: "mutating IsBranchable is not supported. Collection: User",
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestColDescrUpdateReplaceIsBranchable_UpdatingFromFalseToTrue_Errors(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @branchable(if: false) {
+ name: String
+ }
+ `,
+ },
+ testUtils.PatchCollection{
+ Patch: `
+ [
+ { "op": "replace", "path": "/1/IsBranchable", "value": true }
+ ]
+ `,
+ ExpectedError: "mutating IsBranchable is not supported. Collection: User",
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/events.go b/tests/integration/events.go
index 1fbc64416e..bbe19ce391 100644
--- a/tests/integration/events.go
+++ b/tests/integration/events.go
@@ -182,6 +182,12 @@ func waitForUpdateEvents(
require.Fail(s.t, "timeout waiting for update event", "Node %d", i)
}
+ if evt.DocID == "" {
+ // Todo: This will almost certainly need to change once P2P for collection-level commits
+ // is enabled. See: https://github.com/sourcenetwork/defradb/issues/3212
+ continue
+ }
+
// make sure the event is expected
_, ok := expect[evt.DocID]
require.True(s.t, ok, "unexpected document update", "Node %d", i)
diff --git a/tests/integration/explain/default/dagscan_test.go b/tests/integration/explain/default/dagscan_test.go
index 6b216ab061..c19058c258 100644
--- a/tests/integration/explain/default/dagscan_test.go
+++ b/tests/integration/explain/default/dagscan_test.go
@@ -60,8 +60,8 @@ func TestDefaultExplainCommitsDagScanQueryOp(t *testing.T) {
"fieldId": "1",
"spans": []dataMap{
{
- "start": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
- "end": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/2",
+ "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
+ "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/2",
},
},
},
@@ -103,8 +103,8 @@ func TestDefaultExplainCommitsDagScanQueryOpWithoutField(t *testing.T) {
"fieldId": nil,
"spans": []dataMap{
{
- "start": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84",
- "end": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e85",
+ "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84",
+ "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e85",
},
},
},
@@ -147,8 +147,8 @@ func TestDefaultExplainLatestCommitsDagScanQueryOp(t *testing.T) {
"fieldId": "1",
"spans": []dataMap{
{
- "start": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
- "end": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/2",
+ "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
+ "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/2",
},
},
},
@@ -191,8 +191,8 @@ func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) {
"fieldId": "C",
"spans": []dataMap{
{
- "start": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/C",
- "end": "/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/D",
+ "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/C",
+ "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/D",
},
},
},
diff --git a/tests/integration/query/commits/branchables/cid_doc_id_test.go b/tests/integration/query/commits/branchables/cid_doc_id_test.go
new file mode 100644
index 0000000000..e0f8722753
--- /dev/null
+++ b/tests/integration/query/commits/branchables/cid_doc_id_test.go
@@ -0,0 +1,56 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithCidAndDocIDParam(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ // This request uses the document's docID, and the collection's cid.
+ // It would be very nice if this worked:
+ // https://github.com/sourcenetwork/defradb/issues/3213
+ Request: `query {
+ commits(
+ docID: "bae-0b2f15e5-bfe7-5cb7-8045-471318d7dbc3",
+ cid: "bafyreifi7borlnkazxrcohgl7r36cm5ga7moyiiajov3om7urexbx7cyl4"
+ ) {
+ cid
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{},
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/cid_test.go b/tests/integration/query/commits/branchables/cid_test.go
new file mode 100644
index 0000000000..8f60b403b8
--- /dev/null
+++ b/tests/integration/query/commits/branchables/cid_test.go
@@ -0,0 +1,63 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithCidParam(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits(
+ cid: "bafyreifi7borlnkazxrcohgl7r36cm5ga7moyiiajov3om7urexbx7cyl4"
+ ) {
+ cid
+ collectionID
+ docID
+ fieldName
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection"),
+ // Extra params are used to verify this is a collection level cid
+ "collectionID": int64(1),
+ "docID": nil,
+ "fieldName": nil,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/create_test.go b/tests/integration/query/commits/branchables/create_test.go
new file mode 100644
index 0000000000..5eb9d01392
--- /dev/null
+++ b/tests/integration/query/commits/branchables/create_test.go
@@ -0,0 +1,117 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithMultipleCreate(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "Fred",
+ "age": 25
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ links {
+ cid
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, doc2 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, doc1 create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, doc1 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc1 create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc1 name"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc1 age"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc1 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc1 name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc1 age"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 name"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 age"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc2 name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 age"),
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/delete_test.go b/tests/integration/query/commits/branchables/delete_test.go
new file mode 100644
index 0000000000..f908739631
--- /dev/null
+++ b/tests/integration/query/commits/branchables/delete_test.go
@@ -0,0 +1,103 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithDelete(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.DeleteDoc{
+ DocID: 0,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ links {
+ cid
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, delete"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("delete"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("delete"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/field_id_test.go b/tests/integration/query/commits/branchables/field_id_test.go
new file mode 100644
index 0000000000..443796e07c
--- /dev/null
+++ b/tests/integration/query/commits/branchables/field_id_test.go
@@ -0,0 +1,63 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithFieldID(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits(
+ fieldId: null
+ ) {
+ cid
+ collectionID
+ docID
+ fieldId
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection"),
+ // Extra params are used to verify this is a collection level cid
+ "collectionID": int64(1),
+ "docID": nil,
+ "fieldId": nil,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/if_test.go b/tests/integration/query/commits/branchables/if_test.go
new file mode 100644
index 0000000000..b050429c0e
--- /dev/null
+++ b/tests/integration/query/commits/branchables/if_test.go
@@ -0,0 +1,108 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithIfDirectiveTrue(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable(if: true) {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("head"),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryCommitsBranchables_WithIfDirectiveFalse(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable(if: false) {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ // Note: This collection is not branchable, there is no collection
+ // level commit
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("head"),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/peer_test.go b/tests/integration/query/commits/branchables/peer_test.go
new file mode 100644
index 0000000000..81ff77a240
--- /dev/null
+++ b/tests/integration/query/commits/branchables/peer_test.go
@@ -0,0 +1,133 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ "github.com/sourcenetwork/immutable"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+// TODO: This test documents an unimplemented feature. Tracked by:
+// https://github.com/sourcenetwork/defradb/issues/3212
+func TestQueryCommitsBranchables_SyncsAcrossPeerConnection(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.WaitForSync{},
+ testUtils.Request{
+ NodeID: immutable.Some(0),
+ Request: `query {
+ commits {
+ cid
+ links {
+ cid
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("composite"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("composite"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ },
+ },
+ },
+ },
+ },
+ },
+ testUtils.Request{
+ NodeID: immutable.Some(1),
+ Request: `query {
+ commits {
+ cid
+ links {
+ cid
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ // Note: The collection commit has not synced.
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("composite"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/simple_test.go b/tests/integration/query/commits/branchables/simple_test.go
new file mode 100644
index 0000000000..44f0021ca4
--- /dev/null
+++ b/tests/integration/query/commits/branchables/simple_test.go
@@ -0,0 +1,161 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("head"),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryCommitsBranchables_WithAllFields(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ collectionID
+ delta
+ docID
+ fieldId
+ fieldName
+ height
+ links {
+ cid
+ name
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection"),
+ "collectionID": int64(1),
+ "delta": nil,
+ "docID": nil,
+ "fieldId": nil,
+ "fieldName": nil,
+ "height": int64(1),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("composite"),
+ "name": nil,
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ "collectionID": int64(1),
+ "delta": testUtils.CBORValue(21),
+ "docID": "bae-0b2f15e5-bfe7-5cb7-8045-471318d7dbc3",
+ "fieldId": "1",
+ "fieldName": "age",
+ "height": int64(1),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ "collectionID": int64(1),
+ "delta": testUtils.CBORValue("John"),
+ "docID": "bae-0b2f15e5-bfe7-5cb7-8045-471318d7dbc3",
+ "fieldId": "2",
+ "fieldName": "name",
+ "height": int64(1),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("composite"),
+ "collectionID": int64(1),
+ "delta": nil,
+ "docID": "bae-0b2f15e5-bfe7-5cb7-8045-471318d7dbc3",
+ "fieldId": "C",
+ "fieldName": nil,
+ "height": int64(1),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("age"),
+ "name": "age",
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name"),
+ "name": "name",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/update_test.go b/tests/integration/query/commits/branchables/update_test.go
new file mode 100644
index 0000000000..fcb7a88106
--- /dev/null
+++ b/tests/integration/query/commits/branchables/update_test.go
@@ -0,0 +1,116 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_WithDocUpdate(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.UpdateDoc{
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ commits {
+ cid
+ links {
+ cid
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, update"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("update"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("age, create"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, update"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("update"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, update"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("age, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/with_null_input_test.go b/tests/integration/query/commits/with_null_input_test.go
index 912bfc36ca..178ccde6d7 100644
--- a/tests/integration/query/commits/with_null_input_test.go
+++ b/tests/integration/query/commits/with_null_input_test.go
@@ -111,17 +111,7 @@ func TestQueryCommitsWithNullFieldID(t *testing.T) {
}
}`,
Results: map[string]any{
- "commits": []map[string]any{
- {
- "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e",
- },
- {
- "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy",
- },
- {
- "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy",
- },
- },
+ "commits": []map[string]any{},
},
},
},
diff --git a/tests/integration/query/simple/with_cid_test.go b/tests/integration/query/simple/with_cid_test.go
index baa8b09402..d692d7b114 100644
--- a/tests/integration/query/simple/with_cid_test.go
+++ b/tests/integration/query/simple/with_cid_test.go
@@ -13,6 +13,9 @@ package simple
import (
"testing"
+ "github.com/sourcenetwork/immutable"
+ "github.com/stretchr/testify/require"
+
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -39,3 +42,50 @@ func TestQuerySimpleWithInvalidCid(t *testing.T) {
executeTestCase(t, test)
}
+
+// This test documents a bug:
+// https://github.com/sourcenetwork/defradb/issues/3214
+func TestQuerySimpleWithCid(t *testing.T) {
+ test := testUtils.TestCase{
+ SupportedClientTypes: immutable.Some(
+ []testUtils.ClientType{
+ // The CLI/Http clients don't panic in this context
+ testUtils.GoClientType,
+ },
+ ),
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users (
+ cid: "bafyreib7afkd5hepl45wdtwwpai433bhnbd3ps5m2rv3masctda7b6mmxe"
+ ) {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "John",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ require.Panics(t, func() {
+ testUtils.ExecuteTestCase(t, test)
+ })
+}
diff --git a/tests/integration/results.go b/tests/integration/results.go
index 23435a3807..1567467001 100644
--- a/tests/integration/results.go
+++ b/tests/integration/results.go
@@ -274,6 +274,7 @@ func assertCollectionDescriptions(
require.Equal(s.t, expected.Name, actual.Name)
require.Equal(s.t, expected.IsMaterialized, actual.IsMaterialized)
+ require.Equal(s.t, expected.IsBranchable, actual.IsBranchable)
if expected.Indexes != nil || len(actual.Indexes) != 0 {
// Dont bother asserting this if the expected is nil and the actual is nil/empty.
From a89524a3ed29316ca2c3803729ffb435d47062d5 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Mon, 11 Nov 2024 13:23:14 -0500
Subject: [PATCH 19/47] test(i): Skip test when detecting changes (#3234)
## Relevant issue(s)
Resolves #3233
## Description
Skip test `TestQuerySimpleWithCid` when detecting changes as the change
detector does not support asserting panics.
Note: The CI wont pass until *after* this is merged to develop, so
someone will need to change the requiredness of the job so that this can
merge.
---
tests/integration/query/simple/with_cid_test.go | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/tests/integration/query/simple/with_cid_test.go b/tests/integration/query/simple/with_cid_test.go
index d692d7b114..e4c07987e0 100644
--- a/tests/integration/query/simple/with_cid_test.go
+++ b/tests/integration/query/simple/with_cid_test.go
@@ -16,6 +16,7 @@ import (
"github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/require"
+ "github.com/sourcenetwork/defradb/tests/change_detector"
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -46,6 +47,10 @@ func TestQuerySimpleWithInvalidCid(t *testing.T) {
// This test documents a bug:
// https://github.com/sourcenetwork/defradb/issues/3214
func TestQuerySimpleWithCid(t *testing.T) {
+ if change_detector.Enabled {
+ t.Skipf("Change detector does not support requiring panics")
+ }
+
test := testUtils.TestCase{
SupportedClientTypes: immutable.Some(
[]testUtils.ClientType{
From dae28201eef212daf06717546614de07d9b35bc1 Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Mon, 11 Nov 2024 11:55:29 -0800
Subject: [PATCH 20/47] feat: Order alias target (#3217)
## Relevant issue(s)
Resolves #3196
## Description
This PR adds alias targeting to query orderings.
**Aggregate targets are not included in this PR as they require more
changes.**
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Added integration tests.
Specify the platform(s) on which this was tested:
- MacOS
---
client/request/consts.go | 1 +
client/request/filter.go | 7 +-
internal/planner/mapper/mapper.go | 76 ++-
internal/planner/mapper/targetable.go | 2 +-
internal/request/graphql/parser/errors.go | 3 +-
internal/request/graphql/parser/filter.go | 2 +-
internal/request/graphql/parser/order.go | 37 +-
internal/request/graphql/schema/generate.go | 6 +-
.../query/one_to_one/with_order_test.go | 142 ++++++
.../query/simple/with_order_test.go | 448 ++++++++++++++++++
10 files changed, 692 insertions(+), 32 deletions(-)
diff --git a/client/request/consts.go b/client/request/consts.go
index 18fed52946..ce31c95133 100644
--- a/client/request/consts.go
+++ b/client/request/consts.go
@@ -48,6 +48,7 @@ const (
VersionFieldName = "_version"
MaxFieldName = "_max"
MinFieldName = "_min"
+ AliasFieldName = "_alias"
// New generated document id from a backed up document,
// which might have a different _docID originally.
diff --git a/client/request/filter.go b/client/request/filter.go
index feacb02f2b..aabfafb9b9 100644
--- a/client/request/filter.go
+++ b/client/request/filter.go
@@ -13,10 +13,9 @@ package request
import "github.com/sourcenetwork/immutable"
const (
- FilterOpOr = "_or"
- FilterOpAnd = "_and"
- FilterOpNot = "_not"
- FilterOpAlias = "_alias"
+ FilterOpOr = "_or"
+ FilterOpAnd = "_and"
+ FilterOpNot = "_not"
)
// Filter contains the parsed condition map to be
diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go
index 15014fb9f4..826c29ffbc 100644
--- a/internal/planner/mapper/mapper.go
+++ b/internal/planner/mapper/mapper.go
@@ -208,8 +208,12 @@ func toSelect(
}
}
+ targetable, err := toTargetable(thisIndex, selectRequest, mapping)
+ if err != nil {
+ return nil, err
+ }
return &Select{
- Targetable: toTargetable(thisIndex, selectRequest, mapping),
+ Targetable: targetable,
DocumentMapping: mapping,
Cid: selectRequest.CID,
CollectionName: collectionName,
@@ -239,6 +243,11 @@ outer:
for _, condition := range source.Value().Conditions {
fields := condition.Fields[:] // copy slice
for {
+ // alias fields are guaranteed to be resolved
+ // because they refer to existing fields
+ if fields[0] == request.AliasFieldName {
+ continue outer
+ }
numFields := len(fields)
// <2 fields: Direct field on the root type: {age: DESC}
// 2 fields: Single depth related type: {author: {age: DESC}}
@@ -405,11 +414,15 @@ func resolveAggregates(
childObjectIndex := mapping.FirstIndexOfName(target.hostExternalName)
childMapping := mapping.ChildMappings[childObjectIndex]
convertedFilter = ToFilter(target.filter.Value(), childMapping)
+ orderBy, err := toOrderBy(target.order, childMapping)
+ if err != nil {
+ return nil, err
+ }
host, hasHost = tryGetTarget(
target.hostExternalName,
convertedFilter,
target.limit,
- toOrderBy(target.order, childMapping),
+ orderBy,
fields,
)
}
@@ -479,7 +492,10 @@ func resolveAggregates(
// If the child was not mapped, the filter will not have been converted yet
// so we must do that now.
convertedFilter = ToFilter(target.filter.Value(), mapping.ChildMappings[index])
-
+ orderBy, err := toOrderBy(target.order, childMapping)
+ if err != nil {
+ return nil, err
+ }
dummyJoin := &Select{
Targetable: Targetable{
Field: Field{
@@ -488,7 +504,7 @@ func resolveAggregates(
},
Filter: convertedFilter,
Limit: target.limit,
- OrderBy: toOrderBy(target.order, childMapping),
+ OrderBy: orderBy,
},
CollectionName: childCollectionName,
DocumentMapping: childMapping,
@@ -992,9 +1008,9 @@ func resolveInnerFilterDependencies(
newFields := []Requestable{}
for key, value := range source {
- // alias fields are guarenteed to be resolved
+ // alias fields are guaranteed to be resolved
// because they refer to existing fields
- if key == request.FilterOpAlias {
+ if key == request.AliasFieldName {
continue
}
@@ -1290,16 +1306,20 @@ func toMutation(
}, nil
}
-func toTargetable(index int, selectRequest *request.Select, docMap *core.DocumentMapping) Targetable {
+func toTargetable(index int, selectRequest *request.Select, docMap *core.DocumentMapping) (Targetable, error) {
+ orderBy, err := toOrderBy(selectRequest.OrderBy, docMap)
+ if err != nil {
+ return Targetable{}, err
+ }
return Targetable{
Field: toField(index, selectRequest),
DocIDs: selectRequest.DocIDs,
Filter: ToFilter(selectRequest.Filter.Value(), docMap),
Limit: toLimit(selectRequest.Limit, selectRequest.Offset),
GroupBy: toGroupBy(selectRequest.GroupBy, docMap),
- OrderBy: toOrderBy(selectRequest.OrderBy, docMap),
+ OrderBy: orderBy,
ShowDeleted: selectRequest.ShowDeleted,
- }
+ }, nil
}
func toField(index int, selectRequest *request.Select) Field {
@@ -1483,23 +1503,37 @@ func toGroupBy(source immutable.Option[request.GroupBy], mapping *core.DocumentM
}
}
-func toOrderBy(source immutable.Option[request.OrderBy], mapping *core.DocumentMapping) *OrderBy {
+func toOrderBy(source immutable.Option[request.OrderBy], mapping *core.DocumentMapping) (*OrderBy, error) {
if !source.HasValue() {
- return nil
+ return nil, nil
}
conditions := make([]OrderCondition, len(source.Value().Conditions))
for conditionIndex, condition := range source.Value().Conditions {
- fieldIndexes := make([]int, len(condition.Fields))
+ fieldIndexes := make([]int, 0)
currentMapping := mapping
- for fieldIndex, field := range condition.Fields {
- // If there are multiple properties of the same name we can just take the first as
- // we have no other reasonable way of identifying which property they mean if multiple
- // consumer specified requestables are available. Aggregate dependencies should not
- // impact this as they are added after selects.
- firstFieldIndex := currentMapping.FirstIndexOfName(field)
- fieldIndexes[fieldIndex] = firstFieldIndex
- if fieldIndex != len(condition.Fields)-1 {
+ for _, field := range condition.Fields {
+ // flatten alias fields
+ if field == request.AliasFieldName {
+ continue
+ }
+ var firstFieldIndex int
+ // if we have a mapping available check if the
+ // source key is a field or alias (render key)
+ if indexes, ok := currentMapping.IndexesByName[field]; ok {
+ // If there are multiple properties of the same name we can just take the first as
+ // we have no other reasonable way of identifying which property they mean if multiple
+ // consumer specified requestables are available. Aggregate dependencies should not
+ // impact this as they are added after selects.
+ firstFieldIndex = indexes[0]
+ } else if index, ok := currentMapping.TryToFindIndexFromRenderKey(field); ok {
+ firstFieldIndex = index
+ } else {
+ return nil, NewErrFieldOrAliasNotFound(field)
+ }
+
+ fieldIndexes = append(fieldIndexes, firstFieldIndex)
+ if firstFieldIndex < len(currentMapping.ChildMappings) {
// no need to do this for the last (and will panic)
currentMapping = currentMapping.ChildMappings[firstFieldIndex]
}
@@ -1513,7 +1547,7 @@ func toOrderBy(source immutable.Option[request.OrderBy], mapping *core.DocumentM
return &OrderBy{
Conditions: conditions,
- }
+ }, nil
}
// RunFilter runs the given filter expression
diff --git a/internal/planner/mapper/targetable.go b/internal/planner/mapper/targetable.go
index 55bc256327..e25c8b03f5 100644
--- a/internal/planner/mapper/targetable.go
+++ b/internal/planner/mapper/targetable.go
@@ -155,7 +155,7 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a
logicMapEntries[i] = filterObjectToMap(mapping, itemMap)
}
outmap[keyType.Operation] = logicMapEntries
- case request.FilterOpNot, request.FilterOpAlias:
+ case request.FilterOpNot, request.AliasFieldName:
itemMap, ok := v.(map[connor.FilterKey]any)
if ok {
outmap[keyType.Operation] = filterObjectToMap(mapping, itemMap)
diff --git a/internal/request/graphql/parser/errors.go b/internal/request/graphql/parser/errors.go
index 658f50219c..b4692d10e8 100644
--- a/internal/request/graphql/parser/errors.go
+++ b/internal/request/graphql/parser/errors.go
@@ -14,7 +14,8 @@ import "github.com/sourcenetwork/defradb/errors"
var (
ErrFilterMissingArgumentType = errors.New("couldn't find filter argument type")
- ErrInvalidOrderDirection = errors.New("invalid order direction string")
+ ErrInvalidOrderDirection = errors.New("invalid order direction")
+ ErrInvalidOrderInput = errors.New("invalid order input")
ErrFailedToParseConditionsFromAST = errors.New("couldn't parse conditions value from AST")
ErrFailedToParseConditionValue = errors.New("failed to parse condition value from query filter statement")
ErrEmptyDataPayload = errors.New("given data payload is empty")
diff --git a/internal/request/graphql/parser/filter.go b/internal/request/graphql/parser/filter.go
index 1995eeb58b..3fa376a0b1 100644
--- a/internal/request/graphql/parser/filter.go
+++ b/internal/request/graphql/parser/filter.go
@@ -100,7 +100,7 @@ func parseFilterFieldsForDescriptionMap(
return nil, err
}
fields = append(fields, parsedFields...)
- case request.FilterOpNot, request.FilterOpAlias:
+ case request.FilterOpNot, request.AliasFieldName:
conds := v.(map[string]any)
parsedFields, err := parseFilterFieldsForDescriptionMap(conds, col)
if err != nil {
diff --git a/internal/request/graphql/parser/order.go b/internal/request/graphql/parser/order.go
index 983988f5f9..ea6da4a08d 100644
--- a/internal/request/graphql/parser/order.go
+++ b/internal/request/graphql/parser/order.go
@@ -44,7 +44,23 @@ func parseOrderCondition(arg map[string]any) (*request.OrderCondition, error) {
fieldName = name
}
switch t := arg[fieldName].(type) {
+ case string:
+ if fieldName == request.AliasFieldName {
+ return nil, ErrInvalidOrderInput
+ }
+ dir, err := parseOrderDirectionString(t)
+ if err != nil {
+ return nil, err
+ }
+ return &request.OrderCondition{
+ Fields: []string{fieldName},
+ Direction: dir,
+ }, nil
+
case int:
+ if fieldName == request.AliasFieldName {
+ return nil, ErrInvalidOrderInput
+ }
dir, err := parseOrderDirection(t)
if err != nil {
return nil, err
@@ -70,13 +86,28 @@ func parseOrderCondition(arg map[string]any) (*request.OrderCondition, error) {
cond.Fields = append([]string{fieldName}, cond.Fields...)
return cond, nil
- default:
- // field value is null so don't include the condition
+ case nil:
return nil, nil
+
+ default:
+ return nil, ErrInvalidOrderInput
+ }
+}
+
+func parseOrderDirectionString(v string) (request.OrderDirection, error) {
+ switch v {
+ case string(request.ASC):
+ return request.ASC, nil
+
+ case string(request.DESC):
+ return request.DESC, nil
+
+ default:
+ return request.ASC, ErrInvalidOrderDirection
}
}
-func parseOrderDirection(v int) (request.OrderDirection, error) {
+func parseOrderDirection(v any) (request.OrderDirection, error) {
switch v {
case 0:
return request.ASC, nil
diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go
index 608c83e381..1c1bb70e2f 100644
--- a/internal/request/graphql/schema/generate.go
+++ b/internal/request/graphql/schema/generate.go
@@ -1200,7 +1200,7 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject {
Description: schemaTypes.NotOperatorDescription,
Type: selfRefType,
}
- fields[request.FilterOpAlias] = &gql.InputObjectFieldConfig{
+ fields[request.AliasFieldName] = &gql.InputObjectFieldConfig{
Description: "The alias operator allows filters to target aliased fields.",
Type: schemaTypes.JSONScalarType(),
}
@@ -1291,6 +1291,10 @@ func (g *Generator) genTypeOrderArgInput(obj *gql.Object) *gql.InputObject {
fieldThunk := (gql.InputObjectConfigFieldMapThunk)(
func() (gql.InputObjectConfigFieldMap, error) {
fields := gql.InputObjectConfigFieldMap{}
+ fields[request.AliasFieldName] = &gql.InputObjectFieldConfig{
+ Description: "The alias field allows ordering by aliased fields.",
+ Type: schemaTypes.JSONScalarType(),
+ }
for f, field := range obj.Fields() {
if _, ok := request.ReservedFields[f]; ok && f != request.DocIDFieldName {
diff --git a/tests/integration/query/one_to_one/with_order_test.go b/tests/integration/query/one_to_one/with_order_test.go
index 3475ebfc63..e91e803473 100644
--- a/tests/integration/query/one_to_one/with_order_test.go
+++ b/tests/integration/query/one_to_one/with_order_test.go
@@ -287,3 +287,145 @@ func TestQueryOneToOneWithChildIntOrderAscendingWithNoSubTypeFieldsSelected(t *t
executeTestCase(t, test)
}
+
+func TestQueryOneToOne_WithAliasedChildIntOrderAscending_ShouldOrder(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Relation query with ascending order by aliased child's int field.",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Painted House",
+ "rating": 4.9
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Theif Lord",
+ "rating": 4.8
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ DocMap: map[string]any{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true,
+ "published_id": testUtils.NewDocIndex(0, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ DocMap: map[string]any{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false,
+ "published_id": testUtils.NewDocIndex(0, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Book(order: {_alias: {writer: {age: ASC}}}) {
+ name
+ rating
+ writer: author {
+ age
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "Book": []map[string]any{
+ {
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "writer": map[string]any{
+ "age": int64(62),
+ },
+ },
+ {
+ "name": "Painted House",
+ "rating": 4.9,
+ "writer": map[string]any{
+ "age": int64(65),
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQueryOneToOne_WithChildAliasedIntOrderAscending_ShouldOrder(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Relation query with ascending order by child's aliased int field.",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Painted House",
+ "rating": 4.9
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Theif Lord",
+ "rating": 4.8
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ DocMap: map[string]any{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true,
+ "published_id": testUtils.NewDocIndex(0, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ DocMap: map[string]any{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false,
+ "published_id": testUtils.NewDocIndex(0, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Book(order: {author: {_alias: {authorAge: ASC}}}) {
+ name
+ rating
+ author {
+ authorAge: age
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "Book": []map[string]any{
+ {
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author": map[string]any{
+ "authorAge": int64(62),
+ },
+ },
+ {
+ "name": "Painted House",
+ "rating": 4.9,
+ "author": map[string]any{
+ "authorAge": int64(65),
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go
index 82245de369..57f08e4b92 100644
--- a/tests/integration/query/simple/with_order_test.go
+++ b/tests/integration/query/simple/with_order_test.go
@@ -451,3 +451,451 @@ func TestQuerySimple_WithMultipleOrderFields_ReturnsError(t *testing.T) {
executeTestCase(t, test)
}
}
+
+func TestQuerySimple_WithAliasOrder_ShouldOrderResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order ASC",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: {UserAge: ASC}}) {
+ Name
+ UserAge: Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "UserAge": int64(19),
+ },
+ {
+ "Name": "John",
+ "UserAge": int64(21),
+ },
+ {
+ "Name": "Bob",
+ "UserAge": int64(32),
+ },
+ {
+ "Name": "Carlo",
+ "UserAge": int64(55),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithAliasOrderOnNonAliasedField_ShouldOrderResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order on non aliased field ASC",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: {Age: ASC}}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "Age": int64(19),
+ },
+ {
+ "Name": "John",
+ "Age": int64(21),
+ },
+ {
+ "Name": "Bob",
+ "Age": int64(32),
+ },
+ {
+ "Name": "Carlo",
+ "Age": int64(55),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithAliasOrderOnNonExistantField_ShouldError(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order on non existant field ASC",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: {UserAge: ASC}}) {
+ Name
+ Age
+ }
+ }`,
+ ExpectedError: `field or alias not found. Name: UserAge`,
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithInvalidAliasOrder_ShouldError(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order invalid",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: {UserAge: invalid}}) {
+ Name
+ UserAge: Age
+ }
+ }`,
+ ExpectedError: `invalid order direction`,
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithEmptyAliasOrder_ShouldDoNothing(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order empty",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: {}}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Carlo",
+ "Age": int64(55),
+ },
+ {
+ "Name": "Bob",
+ "Age": int64(32),
+ },
+ {
+ "Name": "John",
+ "Age": int64(21),
+ },
+ {
+ "Name": "Alice",
+ "Age": int64(19),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithNullAliasOrder_ShouldDoNothing(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order null",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: null}) {
+ Name
+ Age
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Carlo",
+ "Age": int64(55),
+ },
+ {
+ "Name": "Bob",
+ "Age": int64(32),
+ },
+ {
+ "Name": "John",
+ "Age": int64(21),
+ },
+ {
+ "Name": "Alice",
+ "Age": int64(19),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithIntAliasOrder_ShouldError(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with basic alias order empty",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 32
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: {_alias: 1}) {
+ Name
+ Age
+ }
+ }`,
+ ExpectedError: `invalid order input`,
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQuerySimple_WithCompoundAliasOrder_ShouldOrderResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with compound alias order",
+ Actions: []any{
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "John",
+ "Age": 21,
+ "Verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Age": 21,
+ "Verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Carlo",
+ "Age": 55,
+ "Verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Age": 19,
+ "Verified": false
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(order: [{_alias: {userAge: DESC}}, {_alias: {isVerified: ASC}}]) {
+ Name
+ userAge: Age
+ isVerified: Verified
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Carlo",
+ "userAge": int64(55),
+ "isVerified": true,
+ },
+ {
+ "Name": "Bob",
+ "userAge": int64(21),
+ "isVerified": false,
+ },
+ {
+ "Name": "John",
+ "userAge": int64(21),
+ "isVerified": true,
+ },
+ {
+ "Name": "Alice",
+ "userAge": int64(19),
+ "isVerified": false,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
From 332f9d659e7cc7c7cdedd4d954ea1e3a22c4f19e Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 11 Nov 2024 17:17:53 -0500
Subject: [PATCH 21/47] bot: Update dependencies (bulk dependabot PRs)
11-11-2024 (#3235)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3229 bot: Bump github.com/bits-and-blooms/bitset from 1.14.3 to 1.15.0
#3228 bot: Bump go.opentelemetry.io/otel/metric from 1.31.0 to 1.32.0
#3225 bot: Bump @typescript-eslint/parser from 8.12.2 to 8.13.0 in
/playground
⚠️ The following PRs were resolved manually due to merge conflicts:
#3227 bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.31.0 to
1.32.0
#3224 bot: Bump graphiql from 3.7.1 to 3.7.2 in /playground
#3223 bot: Bump swagger-ui-react from 5.17.14 to 5.18.2 in /playground
#3222 bot: Bump @typescript-eslint/eslint-plugin from 8.12.2 to 8.13.0
in /playground
#3221 bot: Bump vite from 5.4.10 to 5.4.11 in /playground
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
go.mod | 14 +-
go.sum | 28 +-
playground/package-lock.json | 1605 ++++++++++++++++------------------
playground/package.json | 10 +-
4 files changed, 786 insertions(+), 871 deletions(-)
diff --git a/go.mod b/go.mod
index bd49118689..8d2c2715f2 100644
--- a/go.mod
+++ b/go.mod
@@ -5,7 +5,7 @@ go 1.22.0
toolchain go1.22.7
require (
- github.com/bits-and-blooms/bitset v1.14.3
+ github.com/bits-and-blooms/bitset v1.15.0
github.com/bxcodec/faker v2.0.1+incompatible
github.com/cosmos/cosmos-sdk v0.50.10
github.com/cosmos/gogoproto v1.7.0
@@ -59,8 +59,8 @@ require (
github.com/valyala/fastjson v1.6.4
github.com/vito/go-sse v1.1.2
github.com/zalando/go-keyring v0.2.6
- go.opentelemetry.io/otel/metric v1.31.0
- go.opentelemetry.io/otel/sdk/metric v1.31.0
+ go.opentelemetry.io/otel/metric v1.32.0
+ go.opentelemetry.io/otel/sdk/metric v1.32.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.28.0
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c
@@ -352,9 +352,9 @@ require (
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect
- go.opentelemetry.io/otel v1.31.0 // indirect
- go.opentelemetry.io/otel/sdk v1.31.0 // indirect
- go.opentelemetry.io/otel/trace v1.31.0 // indirect
+ go.opentelemetry.io/otel v1.32.0 // indirect
+ go.opentelemetry.io/otel/sdk v1.32.0 // indirect
+ go.opentelemetry.io/otel/trace v1.32.0 // indirect
go.uber.org/dig v1.18.0 // indirect
go.uber.org/fx v1.23.0 // indirect
go.uber.org/mock v0.5.0 // indirect
@@ -363,7 +363,7 @@ require (
golang.org/x/net v0.30.0 // indirect
golang.org/x/oauth2 v0.23.0 // indirect
golang.org/x/sync v0.8.0 // indirect
- golang.org/x/sys v0.26.0 // indirect
+ golang.org/x/sys v0.27.0 // indirect
golang.org/x/term v0.25.0 // indirect
golang.org/x/text v0.19.0 // indirect
golang.org/x/time v0.5.0 // indirect
diff --git a/go.sum b/go.sum
index 76ed0cb1ce..4b820aa582 100644
--- a/go.sum
+++ b/go.sum
@@ -294,8 +294,8 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s=
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
-github.com/bits-and-blooms/bitset v1.14.3 h1:Gd2c8lSNf9pKXom5JtD7AaKO8o7fGQ2LtFj1436qilA=
-github.com/bits-and-blooms/bitset v1.14.3/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
+github.com/bits-and-blooms/bitset v1.15.0 h1:DiCRMscZsGyYePE9AR3sVhKqUXCt5IZvkX5AfAc5xLQ=
+github.com/bits-and-blooms/bitset v1.15.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
@@ -1538,16 +1538,16 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.4
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 h1:9l89oX4ba9kHbBol3Xin3leYJ+252h0zszDtBwyKe2A=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0/go.mod h1:XLZfZboOJWHNKUv7eH0inh0E9VV6eWDFB/9yJyTLPp0=
-go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY=
-go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE=
-go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE=
-go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY=
-go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk=
-go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0=
-go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc=
-go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8=
-go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys=
-go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A=
+go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U=
+go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg=
+go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M=
+go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8=
+go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4=
+go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU=
+go.opentelemetry.io/otel/sdk/metric v1.32.0 h1:rZvFnvmvawYb0alrYkjraqJq0Z4ZUJAiyYCU9snn1CU=
+go.opentelemetry.io/otel/sdk/metric v1.32.0/go.mod h1:PWeZlq0zt9YkYAp3gjKZ0eicRYvOh1Gd+X99x6GHpCQ=
+go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM=
+go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@@ -1870,8 +1870,8 @@ golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
-golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
+golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index 0c226c3c46..f14d580e2f 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -8,30 +8,30 @@
"name": "playground",
"version": "0.0.0",
"dependencies": {
- "graphiql": "^3.7.1",
+ "graphiql": "^3.7.2",
"graphql": "^16.9.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
- "swagger-ui-react": "^5.17.14"
+ "swagger-ui-react": "^5.18.2"
},
"devDependencies": {
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.12.2",
- "@typescript-eslint/parser": "^8.12.2",
+ "@typescript-eslint/eslint-plugin": "^8.13.0",
+ "@typescript-eslint/parser": "^8.13.0",
"@vitejs/plugin-react-swc": "^3.7.1",
"eslint": "^9.14.0",
"eslint-plugin-react-hooks": "^5.0.0",
"eslint-plugin-react-refresh": "^0.4.14",
"typescript": "^5.6.3",
- "vite": "^5.4.10"
+ "vite": "^5.4.11"
}
},
"node_modules/@babel/runtime": {
- "version": "7.25.4",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.4.tgz",
- "integrity": "sha512-DSgLeL/FNcpXuzav5wfYvHCGvynXkJbn3Zvc3823AEe9nPwW9IK4UoCSS5yGymmQzN0pCPvivtgS6/8U2kkm1w==",
+ "version": "7.26.0",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz",
+ "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==",
"license": "MIT",
"dependencies": {
"regenerator-runtime": "^0.14.0"
@@ -41,9 +41,9 @@
}
},
"node_modules/@babel/runtime-corejs3": {
- "version": "7.25.0",
- "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.25.0.tgz",
- "integrity": "sha512-BOehWE7MgQ8W8Qn0CQnMtg2tHPHPulcS/5AVpFvs2KCK1ET+0WqZqPvnpRpFN81gYoFopdIEJX9Sgjw3ZBccPg==",
+ "version": "7.26.0",
+ "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.26.0.tgz",
+ "integrity": "sha512-YXHu5lN8kJCb1LOb9PgV6pvak43X2h4HvRApcN5SdWeaItQOzfn1hgP6jasD6KWQyJDBxrVmA9o9OivlnNJK/w==",
"license": "MIT",
"dependencies": {
"core-js-pure": "^3.30.2",
@@ -54,46 +54,16 @@
}
},
"node_modules/@braintree/sanitize-url": {
- "version": "7.0.2",
- "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.2.tgz",
- "integrity": "sha512-NVf/1YycDMs6+FxS0Tb/W8MjJRDQdXF+tBfDtZ5UZeiRUkTmwKc4vmYCKZTyymfJk1gnMsauvZSX/HiV9jOABw==",
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.4.tgz",
+ "integrity": "sha512-hPYRrKFoI+nuckPgDJfyYAkybFvheo4usS0Vw0HNAe+fmGBQA5Az37b/yStO284atBoqqdOUhKJ3d9Zw3PQkcQ==",
"license": "MIT"
},
- "node_modules/@codemirror/language": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz",
- "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==",
- "peer": true,
- "dependencies": {
- "@codemirror/state": "^6.0.0",
- "@codemirror/view": "^6.0.0",
- "@lezer/common": "^1.0.0",
- "@lezer/highlight": "^1.0.0",
- "@lezer/lr": "^1.0.0",
- "style-mod": "^4.0.0"
- }
- },
- "node_modules/@codemirror/state": {
- "version": "6.4.1",
- "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz",
- "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==",
- "peer": true
- },
- "node_modules/@codemirror/view": {
- "version": "6.33.0",
- "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz",
- "integrity": "sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==",
- "peer": true,
- "dependencies": {
- "@codemirror/state": "^6.4.0",
- "style-mod": "^4.1.0",
- "w3c-keyname": "^2.2.4"
- }
- },
"node_modules/@emotion/is-prop-valid": {
"version": "0.8.8",
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz",
"integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==",
+ "license": "MIT",
"optional": true,
"dependencies": {
"@emotion/memoize": "0.7.4"
@@ -103,6 +73,7 @@
"version": "0.7.4",
"resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz",
"integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==",
+ "license": "MIT",
"optional": true
},
"node_modules/@esbuild/aix-ppc64": {
@@ -497,17 +468,20 @@
}
},
"node_modules/@eslint-community/eslint-utils": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
- "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==",
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz",
+ "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "eslint-visitor-keys": "^3.3.0"
+ "eslint-visitor-keys": "^3.4.3"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ },
"peerDependencies": {
"eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
}
@@ -517,6 +491,7 @@
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
"integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
"dev": true,
+ "license": "MIT",
"engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
}
@@ -526,6 +501,7 @@
"resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.18.0.tgz",
"integrity": "sha512-fTxvnS1sRMu3+JjXwJG0j/i4RT9u4qJ+lqS/yCGap4lH4zZGzQ7tu+xZqQmcMZq5OBZDL4QRxQzRjkWcGt8IVw==",
"dev": true,
+ "license": "Apache-2.0",
"dependencies": {
"@eslint/object-schema": "^2.1.4",
"debug": "^4.3.1",
@@ -540,6 +516,7 @@
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
@@ -550,6 +527,7 @@
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
+ "license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@@ -562,6 +540,7 @@
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.7.0.tgz",
"integrity": "sha512-xp5Jirz5DyPYlPiKat8jaq0EmYvDXKKpzTbxXMpT9eqlRJkRKIz9AGMdlvYjih+im+QlhWrpvVjl8IPC/lHlUw==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
@@ -619,6 +598,7 @@
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.14.0.tgz",
"integrity": "sha512-pFoEtFWCPyDOl+C6Ift+wC7Ro89otjigCf5vcuWqWgqNSQbRrpjSvdeE6ofLz4dHmyxD5f7gIdGT4+p36L6Twg==",
"dev": true,
+ "license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
@@ -628,15 +608,17 @@
"resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.4.tgz",
"integrity": "sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
"node_modules/@eslint/plugin-kit": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.0.tgz",
- "integrity": "sha512-vH9PiIMMwvhCx31Af3HiGzsVNULDbyVkHXwlemn/B0TFj/00ho3y55efXrUZTfQipxoHC5u4xq6zblww1zm1Ig==",
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.2.tgz",
+ "integrity": "sha512-CXtq5nR4Su+2I47WPOlWud98Y5Lv8Kyxp2ukhgFx/eW6Blm18VXJO5WuQylPugRo8nbluoi6GvvxBLqHcvqUUw==",
"dev": true,
+ "license": "Apache-2.0",
"dependencies": {
"levn": "^0.4.1"
},
@@ -645,26 +627,29 @@
}
},
"node_modules/@floating-ui/core": {
- "version": "1.6.7",
- "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.7.tgz",
- "integrity": "sha512-yDzVT/Lm101nQ5TCVeK65LtdN7Tj4Qpr9RTXJ2vPFLqtLxwOrpoxAHAJI8J3yYWUc40J0BDBheaitK5SJmno2g==",
+ "version": "1.6.8",
+ "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.8.tgz",
+ "integrity": "sha512-7XJ9cPU+yI2QeLS+FCSlqNFZJq8arvswefkZrYI1yQBbftw6FyrZOxYSh+9S7z7TpeWlRt9zJ5IhM1WIL334jA==",
+ "license": "MIT",
"dependencies": {
- "@floating-ui/utils": "^0.2.7"
+ "@floating-ui/utils": "^0.2.8"
}
},
"node_modules/@floating-ui/dom": {
- "version": "1.6.10",
- "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.10.tgz",
- "integrity": "sha512-fskgCFv8J8OamCmyun8MfjB1Olfn+uZKjOKZ0vhYF3gRmEUXcGOjxWL8bBr7i4kIuPZ2KD2S3EUIOxnjC8kl2A==",
+ "version": "1.6.12",
+ "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.12.tgz",
+ "integrity": "sha512-NP83c0HjokcGVEMeoStg317VD9W7eDlGK7457dMBANbKA6GJZdc7rjujdgqzTaz93jkGgc5P/jeWbaCHnMNc+w==",
+ "license": "MIT",
"dependencies": {
"@floating-ui/core": "^1.6.0",
- "@floating-ui/utils": "^0.2.7"
+ "@floating-ui/utils": "^0.2.8"
}
},
"node_modules/@floating-ui/react-dom": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.1.tgz",
- "integrity": "sha512-4h84MJt3CHrtG18mGsXuLCHMrug49d7DFkU0RMIyshRveBeyV2hmV/pDaF2Uxtu8kgq5r46llp5E5FQiR0K2Yg==",
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.2.tgz",
+ "integrity": "sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A==",
+ "license": "MIT",
"dependencies": {
"@floating-ui/dom": "^1.0.0"
},
@@ -674,14 +659,16 @@
}
},
"node_modules/@floating-ui/utils": {
- "version": "0.2.7",
- "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.7.tgz",
- "integrity": "sha512-X8R8Oj771YRl/w+c1HqAC1szL8zWQRwFvgDwT129k9ACdBoud/+/rX9V0qiMl6LWUdP9voC2nDVZYPMQQsb6eA=="
+ "version": "0.2.8",
+ "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.8.tgz",
+ "integrity": "sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==",
+ "license": "MIT"
},
"node_modules/@graphiql/react": {
- "version": "0.26.2",
- "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.26.2.tgz",
- "integrity": "sha512-aO4GWf/kJmqrjO+PORT/NPxwGvPGlg+mwye1v8xAlf8Q9j7P0hVtVBawYaSLUCCfJ/QnH7JAP+0VRamyooZZCw==",
+ "version": "0.27.0",
+ "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.27.0.tgz",
+ "integrity": "sha512-K9ZKWd+ewodbS/1kewedmITeeKLUQswMOXwIv8XFLPt3Ondodji0vr1XXXsttlyl+V2QG/9tYVV2RJ9Ch5LdrA==",
+ "license": "MIT",
"dependencies": {
"@graphiql/toolkit": "^0.11.0",
"@headlessui/react": "^1.7.15",
@@ -701,7 +688,7 @@
"set-value": "^4.1.0"
},
"peerDependencies": {
- "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0-alpha.2",
+ "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0",
"react": "^16.8.0 || ^17 || ^18",
"react-dom": "^16.8.0 || ^17 || ^18"
}
@@ -710,6 +697,7 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.11.0.tgz",
"integrity": "sha512-VqqQrvkMwgbGhj7J5907yfuAy5B1OCgOTIPi7gtRneG1jYmnqvSxi8Yrmu0B8G8fZxkxKVsYi8dE8EtsOBrTGQ==",
+ "license": "MIT",
"dependencies": {
"@n1ru4l/push-pull-async-iterable-iterator": "^3.1.0",
"meros": "^1.1.4"
@@ -728,6 +716,7 @@
"version": "1.7.19",
"resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.19.tgz",
"integrity": "sha512-Ll+8q3OlMJfJbAKM/+/Y2q6PPYbryqNTXDbryx7SXLIDamkF6iQFbriYHga0dY44PvDhvvBWCx1Xj4U5+G4hOw==",
+ "license": "MIT",
"dependencies": {
"@tanstack/react-virtual": "^3.0.0-beta.60",
"client-only": "^0.0.1"
@@ -745,6 +734,7 @@
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
"integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": ">=18.18.0"
}
@@ -754,6 +744,7 @@
"resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz",
"integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==",
"dev": true,
+ "license": "Apache-2.0",
"dependencies": {
"@humanfs/core": "^0.19.1",
"@humanwhocodes/retry": "^0.3.0"
@@ -767,6 +758,7 @@
"resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz",
"integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": ">=18.18"
},
@@ -790,10 +782,11 @@
}
},
"node_modules/@humanwhocodes/retry": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.0.tgz",
- "integrity": "sha512-xnRgu9DxZbkWak/te3fcytNyp8MTbuiZIaueg2rgEvBuN55n04nwLYLU9TX/VVlusc9L2ZNXi99nUFNkHXtr5g==",
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz",
+ "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": ">=18.18"
},
@@ -802,34 +795,11 @@
"url": "https://github.com/sponsors/nzakas"
}
},
- "node_modules/@lezer/common": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz",
- "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==",
- "peer": true
- },
- "node_modules/@lezer/highlight": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz",
- "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==",
- "peer": true,
- "dependencies": {
- "@lezer/common": "^1.0.0"
- }
- },
- "node_modules/@lezer/lr": {
- "version": "1.4.2",
- "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz",
- "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==",
- "peer": true,
- "dependencies": {
- "@lezer/common": "^1.0.0"
- }
- },
"node_modules/@motionone/animation": {
"version": "10.18.0",
"resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz",
"integrity": "sha512-9z2p5GFGCm0gBsZbi8rVMOAJCtw1WqBTIPw3ozk06gDvZInBPIsQcHgYogEJ4yuHJ+akuW8g1SEIOpTOvYs8hw==",
+ "license": "MIT",
"dependencies": {
"@motionone/easing": "^10.18.0",
"@motionone/types": "^10.17.1",
@@ -841,6 +811,7 @@
"version": "10.12.0",
"resolved": "https://registry.npmjs.org/@motionone/dom/-/dom-10.12.0.tgz",
"integrity": "sha512-UdPTtLMAktHiqV0atOczNYyDd/d8Cf5fFsd1tua03PqTwwCe/6lwhLSQ8a7TbnQ5SN0gm44N1slBfj+ORIhrqw==",
+ "license": "MIT",
"dependencies": {
"@motionone/animation": "^10.12.0",
"@motionone/generators": "^10.12.0",
@@ -854,6 +825,7 @@
"version": "10.18.0",
"resolved": "https://registry.npmjs.org/@motionone/easing/-/easing-10.18.0.tgz",
"integrity": "sha512-VcjByo7XpdLS4o9T8t99JtgxkdMcNWD3yHU/n6CLEz3bkmKDRZyYQ/wmSf6daum8ZXqfUAgFeCZSpJZIMxaCzg==",
+ "license": "MIT",
"dependencies": {
"@motionone/utils": "^10.18.0",
"tslib": "^2.3.1"
@@ -863,6 +835,7 @@
"version": "10.18.0",
"resolved": "https://registry.npmjs.org/@motionone/generators/-/generators-10.18.0.tgz",
"integrity": "sha512-+qfkC2DtkDj4tHPu+AFKVfR/C30O1vYdvsGYaR13W/1cczPrrcjdvYCj0VLFuRMN+lP1xvpNZHCRNM4fBzn1jg==",
+ "license": "MIT",
"dependencies": {
"@motionone/types": "^10.17.1",
"@motionone/utils": "^10.18.0",
@@ -872,12 +845,14 @@
"node_modules/@motionone/types": {
"version": "10.17.1",
"resolved": "https://registry.npmjs.org/@motionone/types/-/types-10.17.1.tgz",
- "integrity": "sha512-KaC4kgiODDz8hswCrS0btrVrzyU2CSQKO7Ps90ibBVSQmjkrt2teqta6/sOG59v7+dPnKMAg13jyqtMKV2yJ7A=="
+ "integrity": "sha512-KaC4kgiODDz8hswCrS0btrVrzyU2CSQKO7Ps90ibBVSQmjkrt2teqta6/sOG59v7+dPnKMAg13jyqtMKV2yJ7A==",
+ "license": "MIT"
},
"node_modules/@motionone/utils": {
"version": "10.18.0",
"resolved": "https://registry.npmjs.org/@motionone/utils/-/utils-10.18.0.tgz",
"integrity": "sha512-3XVF7sgyTSI2KWvTf6uLlBJ5iAgRgmvp3bpuOiQJvInd4nZ19ET8lX5unn30SlmRH7hXbBbH+Gxd0m0klJ3Xtw==",
+ "license": "MIT",
"dependencies": {
"@motionone/types": "^10.17.1",
"hey-listen": "^1.0.8",
@@ -888,6 +863,7 @@
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@n1ru4l/push-pull-async-iterable-iterator/-/push-pull-async-iterable-iterator-3.2.0.tgz",
"integrity": "sha512-3fkKj25kEjsfObL6IlKPAlHYPq/oYwUkkQ03zsTTiDjD7vg/RxjdiLeCydqtxHZP0JgsXL3D/X5oAkMGzuUp/Q==",
+ "license": "MIT",
"engines": {
"node": ">=12"
}
@@ -933,12 +909,14 @@
"node_modules/@radix-ui/primitive": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz",
- "integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA=="
+ "integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==",
+ "license": "MIT"
},
"node_modules/@radix-ui/react-arrow": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.0.tgz",
"integrity": "sha512-FmlW1rCg7hBpEBwFbjHwCW6AmWLQM6g/v0Sn8XbP9NvmSZ2San1FpQeyPtufzOMSIx7Y4dzjlHoifhp+7NkZhw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-primitive": "2.0.0"
},
@@ -961,6 +939,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.0.tgz",
"integrity": "sha512-GZsZslMJEyo1VKm5L1ZJY8tGDxZNPAoUeQUIbKeJfoi7Q4kmig5AsgLMYYuyYbfjd8fBmFORAIwYAkXMnXZgZw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.0",
"@radix-ui/react-context": "1.1.0",
@@ -982,10 +961,26 @@
}
}
},
+ "node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-context": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.0.tgz",
+ "integrity": "sha512-OKrckBy+sMEgYM/sMmqmErVn0kZqrHPJze+Ql3DzYsDDp0hl0L62nx/2122/Bvps1qz645jlcu2tD9lrRSdf8A==",
+ "license": "MIT",
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-compose-refs": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.0.tgz",
"integrity": "sha512-b4inOtiaOnYf9KWyO3jAeeCG6FeyfY6ldiEPanbUjWd+xIk5wZeHa8yVwmrJ2vderhu/BQvzCrJI0lHd+wIiqw==",
+ "license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -997,9 +992,10 @@
}
},
"node_modules/@radix-ui/react-context": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.0.tgz",
- "integrity": "sha512-OKrckBy+sMEgYM/sMmqmErVn0kZqrHPJze+Ql3DzYsDDp0hl0L62nx/2122/Bvps1qz645jlcu2tD9lrRSdf8A==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz",
+ "integrity": "sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==",
+ "license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -1011,24 +1007,25 @@
}
},
"node_modules/@radix-ui/react-dialog": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.1.tgz",
- "integrity": "sha512-zysS+iU4YP3STKNS6USvFVqI4qqx8EpiwmT5TuCApVEBca+eRCbONi4EgzfNSuVnOXvC5UPHHMjs8RXO6DH9Bg==",
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.2.tgz",
+ "integrity": "sha512-Yj4dZtqa2o+kG61fzB0H2qUvmwBA2oyQroGLyNtBj1beo1khoQ3q1a2AO8rrQYjd8256CO9+N8L9tvsS+bnIyA==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
"@radix-ui/react-compose-refs": "1.1.0",
- "@radix-ui/react-context": "1.1.0",
- "@radix-ui/react-dismissable-layer": "1.1.0",
- "@radix-ui/react-focus-guards": "1.1.0",
+ "@radix-ui/react-context": "1.1.1",
+ "@radix-ui/react-dismissable-layer": "1.1.1",
+ "@radix-ui/react-focus-guards": "1.1.1",
"@radix-ui/react-focus-scope": "1.1.0",
"@radix-ui/react-id": "1.1.0",
- "@radix-ui/react-portal": "1.1.1",
- "@radix-ui/react-presence": "1.1.0",
+ "@radix-ui/react-portal": "1.1.2",
+ "@radix-ui/react-presence": "1.1.1",
"@radix-ui/react-primitive": "2.0.0",
"@radix-ui/react-slot": "1.1.0",
"@radix-ui/react-use-controllable-state": "1.1.0",
"aria-hidden": "^1.1.1",
- "react-remove-scroll": "2.5.7"
+ "react-remove-scroll": "2.6.0"
},
"peerDependencies": {
"@types/react": "*",
@@ -1049,6 +1046,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.0.tgz",
"integrity": "sha512-BUuBvgThEiAXh2DWu93XsT+a3aWrGqolGlqqw5VU1kG7p/ZH2cuDlM1sRLNnY3QcBS69UIz2mcKhMxDsdewhjg==",
+ "license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -1060,9 +1058,10 @@
}
},
"node_modules/@radix-ui/react-dismissable-layer": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.0.tgz",
- "integrity": "sha512-/UovfmmXGptwGcBQawLzvn2jOfM0t4z3/uKffoBlj724+n3FvBbZ7M0aaBOmkp6pqFYpO4yx8tSVJjx3Fl2jig==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.1.tgz",
+ "integrity": "sha512-QSxg29lfr/xcev6kSz7MAlmDnzbP1eI/Dwn3Tp1ip0KT5CUELsxkekFEMVBEoykI3oV39hKT4TKZzBNMbcTZYQ==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
"@radix-ui/react-compose-refs": "1.1.0",
@@ -1086,15 +1085,16 @@
}
},
"node_modules/@radix-ui/react-dropdown-menu": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.1.tgz",
- "integrity": "sha512-y8E+x9fBq9qvteD2Zwa4397pUVhYsh9iq44b5RD5qu1GMJWBCBuVg1hMyItbc6+zH00TxGRqd9Iot4wzf3OoBQ==",
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.2.tgz",
+ "integrity": "sha512-GVZMR+eqK8/Kes0a36Qrv+i20bAPXSn8rCBTHx30w+3ECnR5o3xixAlqcVaYvLeyKUsm0aqyhWfmUcqufM8nYA==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
"@radix-ui/react-compose-refs": "1.1.0",
- "@radix-ui/react-context": "1.1.0",
+ "@radix-ui/react-context": "1.1.1",
"@radix-ui/react-id": "1.1.0",
- "@radix-ui/react-menu": "2.1.1",
+ "@radix-ui/react-menu": "2.1.2",
"@radix-ui/react-primitive": "2.0.0",
"@radix-ui/react-use-controllable-state": "1.1.0"
},
@@ -1114,9 +1114,10 @@
}
},
"node_modules/@radix-ui/react-focus-guards": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.0.tgz",
- "integrity": "sha512-w6XZNUPVv6xCpZUqb/yN9DL6auvpGX3C/ee6Hdi16v2UUy25HV2Q5bcflsiDyT/g5RwbPQ/GIT1vLkeRb+ITBw==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.1.tgz",
+ "integrity": "sha512-pSIwfrT1a6sIoDASCSpFwOasEwKTZWDw/iBdtnqKO7v6FeOzYJ7U53cPzYFVR3geGGXgVHaH+CdngrrAzqUGxg==",
+ "license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -1131,6 +1132,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.0.tgz",
"integrity": "sha512-200UD8zylvEyL8Bx+z76RJnASR2gRMuxlgFCPAe/Q/679a/r0eK3MBVYMb7vZODZcffZBdob1EGnky78xmVvcA==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.0",
"@radix-ui/react-primitive": "2.0.0",
@@ -1155,6 +1157,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.0.tgz",
"integrity": "sha512-EJUrI8yYh7WOjNOqpoJaf1jlFIH2LvtgAl+YcFqNCa+4hj64ZXmPkAKOFs/ukjz3byN6bdb/AVUqHkI8/uWWMA==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-use-layout-effect": "1.1.0"
},
@@ -1169,28 +1172,29 @@
}
},
"node_modules/@radix-ui/react-menu": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.1.tgz",
- "integrity": "sha512-oa3mXRRVjHi6DZu/ghuzdylyjaMXLymx83irM7hTxutQbD+7IhPKdMdRHD26Rm+kHRrWcrUkkRPv5pd47a2xFQ==",
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.2.tgz",
+ "integrity": "sha512-lZ0R4qR2Al6fZ4yCCZzu/ReTFrylHFxIqy7OezIpWF4bL0o9biKo0pFIvkaew3TyZ9Fy5gYVrR5zCGZBVbO1zg==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
"@radix-ui/react-collection": "1.1.0",
"@radix-ui/react-compose-refs": "1.1.0",
- "@radix-ui/react-context": "1.1.0",
+ "@radix-ui/react-context": "1.1.1",
"@radix-ui/react-direction": "1.1.0",
- "@radix-ui/react-dismissable-layer": "1.1.0",
- "@radix-ui/react-focus-guards": "1.1.0",
+ "@radix-ui/react-dismissable-layer": "1.1.1",
+ "@radix-ui/react-focus-guards": "1.1.1",
"@radix-ui/react-focus-scope": "1.1.0",
"@radix-ui/react-id": "1.1.0",
"@radix-ui/react-popper": "1.2.0",
- "@radix-ui/react-portal": "1.1.1",
- "@radix-ui/react-presence": "1.1.0",
+ "@radix-ui/react-portal": "1.1.2",
+ "@radix-ui/react-presence": "1.1.1",
"@radix-ui/react-primitive": "2.0.0",
"@radix-ui/react-roving-focus": "1.1.0",
"@radix-ui/react-slot": "1.1.0",
"@radix-ui/react-use-callback-ref": "1.1.0",
"aria-hidden": "^1.1.1",
- "react-remove-scroll": "2.5.7"
+ "react-remove-scroll": "2.6.0"
},
"peerDependencies": {
"@types/react": "*",
@@ -1211,6 +1215,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.0.tgz",
"integrity": "sha512-ZnRMshKF43aBxVWPWvbj21+7TQCvhuULWJ4gNIKYpRlQt5xGRhLx66tMp8pya2UkGHTSlhpXwmjqltDYHhw7Vg==",
+ "license": "MIT",
"dependencies": {
"@floating-ui/react-dom": "^2.0.0",
"@radix-ui/react-arrow": "1.1.0",
@@ -1238,10 +1243,26 @@
}
}
},
+ "node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-context": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.0.tgz",
+ "integrity": "sha512-OKrckBy+sMEgYM/sMmqmErVn0kZqrHPJze+Ql3DzYsDDp0hl0L62nx/2122/Bvps1qz645jlcu2tD9lrRSdf8A==",
+ "license": "MIT",
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-portal": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.1.tgz",
- "integrity": "sha512-A3UtLk85UtqhzFqtoC8Q0KvR2GbXF3mtPgACSazajqq6A41mEQgo53iPzY4i6BwDxlIFqWIhiQ2G729n+2aw/g==",
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.2.tgz",
+ "integrity": "sha512-WeDYLGPxJb/5EGBoedyJbT0MpoULmwnIPMJMSldkuiMsBAv7N1cRdsTWZWht9vpPOiN3qyiGAtbK2is47/uMFg==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-primitive": "2.0.0",
"@radix-ui/react-use-layout-effect": "1.1.0"
@@ -1262,9 +1283,10 @@
}
},
"node_modules/@radix-ui/react-presence": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.0.tgz",
- "integrity": "sha512-Gq6wuRN/asf9H/E/VzdKoUtT8GC9PQc9z40/vEr0VCJ4u5XvvhWIrSsCB6vD2/cH7ugTdSfYq9fLJCcM00acrQ==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.1.tgz",
+ "integrity": "sha512-IeFXVi4YS1K0wVZzXNrbaaUvIJ3qdY+/Ih4eHFhWA9SwGR9UDX7Ck8abvL57C4cv3wwMvUE0OG69Qc3NCcTe/A==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.0",
"@radix-ui/react-use-layout-effect": "1.1.0"
@@ -1288,6 +1310,7 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.0.tgz",
"integrity": "sha512-ZSpFm0/uHa8zTvKBDjLFWLo8dkr4MBsiDLz0g3gMUwqgLHz9rTaRRGYDgvZPtBJgYCBKXkS9fzmoySgr8CO6Cw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-slot": "1.1.0"
},
@@ -1310,6 +1333,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.0.tgz",
"integrity": "sha512-EA6AMGeq9AEeQDeSH0aZgG198qkfHSbvWTf1HvoDmOB5bBG/qTxjYMWUKMnYiV6J/iP/J8MEFSuB2zRU2n7ODA==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
"@radix-ui/react-collection": "1.1.0",
@@ -1336,10 +1360,26 @@
}
}
},
+ "node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-context": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.0.tgz",
+ "integrity": "sha512-OKrckBy+sMEgYM/sMmqmErVn0kZqrHPJze+Ql3DzYsDDp0hl0L62nx/2122/Bvps1qz645jlcu2tD9lrRSdf8A==",
+ "license": "MIT",
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-slot": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz",
"integrity": "sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.0"
},
@@ -1354,18 +1394,19 @@
}
},
"node_modules/@radix-ui/react-tooltip": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.2.tgz",
- "integrity": "sha512-9XRsLwe6Yb9B/tlnYCPVUd/TFS4J7HuOZW345DCeC6vKIxQGMZdx21RK4VoZauPD5frgkXTYVS5y90L+3YBn4w==",
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.3.tgz",
+ "integrity": "sha512-Z4w1FIS0BqVFI2c1jZvb/uDVJijJjJ2ZMuPV81oVgTZ7g3BZxobplnMVvXtFWgtozdvYJ+MFWtwkM5S2HnAong==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
"@radix-ui/react-compose-refs": "1.1.0",
- "@radix-ui/react-context": "1.1.0",
- "@radix-ui/react-dismissable-layer": "1.1.0",
+ "@radix-ui/react-context": "1.1.1",
+ "@radix-ui/react-dismissable-layer": "1.1.1",
"@radix-ui/react-id": "1.1.0",
"@radix-ui/react-popper": "1.2.0",
- "@radix-ui/react-portal": "1.1.1",
- "@radix-ui/react-presence": "1.1.0",
+ "@radix-ui/react-portal": "1.1.2",
+ "@radix-ui/react-presence": "1.1.1",
"@radix-ui/react-primitive": "2.0.0",
"@radix-ui/react-slot": "1.1.0",
"@radix-ui/react-use-controllable-state": "1.1.0",
@@ -1390,6 +1431,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.0.tgz",
"integrity": "sha512-CasTfvsy+frcFkbXtSJ2Zu9JHpN8TYKxkgJGWbjiZhFivxaeW7rMeZt7QELGVLaYVfFMsKHjb7Ak0nMEe+2Vfw==",
+ "license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -1404,6 +1446,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.1.0.tgz",
"integrity": "sha512-MtfMVJiSr2NjzS0Aa90NPTnvTSg6C/JLCV7ma0W6+OMV78vd8OyRpID+Ng9LxzsPbLeuBnWBA1Nq30AtBIDChw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-use-callback-ref": "1.1.0"
},
@@ -1421,6 +1464,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.0.tgz",
"integrity": "sha512-L7vwWlR1kTTQ3oh7g1O0CBF3YCyyTj8NmhLR+phShpyA50HCfBFKVJTpshm9PzLiKmehsrQzTYTpX9HvmC9rhw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-use-callback-ref": "1.1.0"
},
@@ -1438,6 +1482,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.0.tgz",
"integrity": "sha512-+FPE0rOdziWSrH9athwI1R0HDVbWlEhd+FR+aSDk4uWGmSJ9Z54sdZVDQPZAinJhJXwfT+qnj969mCsT2gfm5w==",
+ "license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -1452,6 +1497,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.0.tgz",
"integrity": "sha512-0Fmkebhr6PiseyZlYAOtLS+nb7jLmpqTrJyv61Pe68MKYW6OWdRE2kI70TaYY27u7H0lajqM3hSMMLFq18Z7nQ==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/rect": "1.1.0"
},
@@ -1469,6 +1515,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.0.tgz",
"integrity": "sha512-XW3/vWuIXHa+2Uwcc2ABSfcCledmXhhQPlGbfcRXbiUQI5Icjcg19BGCZVKKInYbvUCut/ufbbLLPFC5cbb1hw==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-use-layout-effect": "1.1.0"
},
@@ -1486,6 +1533,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.1.0.tgz",
"integrity": "sha512-N8MDZqtgCgG5S3aV60INAB475osJousYpZ4cTJ2cFbMpdHS5Y6loLTH8LPtkj2QN0x93J30HT/M3qJXM0+lyeQ==",
+ "license": "MIT",
"dependencies": {
"@radix-ui/react-primitive": "2.0.0"
},
@@ -1507,224 +1555,276 @@
"node_modules/@radix-ui/rect": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.0.tgz",
- "integrity": "sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg=="
+ "integrity": "sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==",
+ "license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz",
- "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.25.0.tgz",
+ "integrity": "sha512-CC/ZqFZwlAIbU1wUPisHyV/XRc5RydFrNLtgl3dGYskdwPZdt4HERtKm50a/+DtTlKeCq9IXFEWR+P6blwjqBA==",
"cpu": [
"arm"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-android-arm64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz",
- "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.25.0.tgz",
+ "integrity": "sha512-/Y76tmLGUJqVBXXCfVS8Q8FJqYGhgH4wl4qTA24E9v/IJM0XvJCGQVSW1QZ4J+VURO9h8YCa28sTFacZXwK7Rg==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz",
- "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.25.0.tgz",
+ "integrity": "sha512-YVT6L3UrKTlC0FpCZd0MGA7NVdp7YNaEqkENbWQ7AOVOqd/7VzyHpgIpc1mIaxRAo1ZsJRH45fq8j4N63I/vvg==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-darwin-x64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz",
- "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.25.0.tgz",
+ "integrity": "sha512-ZRL+gexs3+ZmmWmGKEU43Bdn67kWnMeWXLFhcVv5Un8FQcx38yulHBA7XR2+KQdYIOtD0yZDWBCudmfj6lQJoA==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
+ "node_modules/@rollup/rollup-freebsd-arm64": {
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.25.0.tgz",
+ "integrity": "sha512-xpEIXhiP27EAylEpreCozozsxWQ2TJbOLSivGfXhU4G1TBVEYtUPi2pOZBnvGXHyOdLAUUhPnJzH3ah5cqF01g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-x64": {
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.25.0.tgz",
+ "integrity": "sha512-sC5FsmZGlJv5dOcURrsnIK7ngc3Kirnx3as2XU9uER+zjfyqIjdcMVgzy4cOawhsssqzoAX19qmxgJ8a14Qrqw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz",
- "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.25.0.tgz",
+ "integrity": "sha512-uD/dbLSs1BEPzg564TpRAQ/YvTnCds2XxyOndAO8nJhaQcqQGFgv/DAVko/ZHap3boCvxnzYMa3mTkV/B/3SWA==",
"cpu": [
"arm"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz",
- "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.25.0.tgz",
+ "integrity": "sha512-ZVt/XkrDlQWegDWrwyC3l0OfAF7yeJUF4fq5RMS07YM72BlSfn2fQQ6lPyBNjt+YbczMguPiJoCfaQC2dnflpQ==",
"cpu": [
"arm"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz",
- "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.25.0.tgz",
+ "integrity": "sha512-qboZ+T0gHAW2kkSDPHxu7quaFaaBlynODXpBVnPxUgvWYaE84xgCKAPEYE+fSMd3Zv5PyFZR+L0tCdYCMAtG0A==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz",
- "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.25.0.tgz",
+ "integrity": "sha512-ndWTSEmAaKr88dBuogGH2NZaxe7u2rDoArsejNslugHZ+r44NfWiwjzizVS1nUOHo+n1Z6qV3X60rqE/HlISgw==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz",
- "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.25.0.tgz",
+ "integrity": "sha512-BVSQvVa2v5hKwJSy6X7W1fjDex6yZnNKy3Kx1JGimccHft6HV0THTwNtC2zawtNXKUu+S5CjXslilYdKBAadzA==",
"cpu": [
"ppc64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz",
- "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.25.0.tgz",
+ "integrity": "sha512-G4hTREQrIdeV0PE2JruzI+vXdRnaK1pg64hemHq2v5fhv8C7WjVaeXc9P5i4Q5UC06d/L+zA0mszYIKl+wY8oA==",
"cpu": [
"riscv64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz",
- "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.25.0.tgz",
+ "integrity": "sha512-9T/w0kQ+upxdkFL9zPVB6zy9vWW1deA3g8IauJxojN4bnz5FwSsUAD034KpXIVX5j5p/rn6XqumBMxfRkcHapQ==",
"cpu": [
"s390x"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz",
- "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.25.0.tgz",
+ "integrity": "sha512-ThcnU0EcMDn+J4B9LD++OgBYxZusuA7iemIIiz5yzEcFg04VZFzdFjuwPdlURmYPZw+fgVrFzj4CA64jSTG4Ig==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz",
- "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.25.0.tgz",
+ "integrity": "sha512-zx71aY2oQxGxAT1JShfhNG79PnjYhMC6voAjzpu/xmMjDnKNf6Nl/xv7YaB/9SIa9jDYf8RBPWEnjcdlhlv1rQ==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz",
- "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.25.0.tgz",
+ "integrity": "sha512-JT8tcjNocMs4CylWY/CxVLnv8e1lE7ff1fi6kbGocWwxDq9pj30IJ28Peb+Y8yiPNSF28oad42ApJB8oUkwGww==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz",
- "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.25.0.tgz",
+ "integrity": "sha512-dRLjLsO3dNOfSN6tjyVlG+Msm4IiZnGkuZ7G5NmpzwF9oOc582FZG05+UdfTbz5Jd4buK/wMb6UeHFhG18+OEg==",
"cpu": [
"ia32"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz",
- "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.25.0.tgz",
+ "integrity": "sha512-/RqrIFtLB926frMhZD0a5oDa4eFIbyNEwLLloMTEjmqfwZWXywwVVOVmwTsuyhC9HKkVEZcOOi+KV4U9wmOdlg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"win32"
]
},
+ "node_modules/@scarf/scarf": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
+ "integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==",
+ "hasInstallScript": true,
+ "license": "Apache-2.0"
+ },
"node_modules/@swagger-api/apidom-ast": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-1.0.0-alpha.9.tgz",
- "integrity": "sha512-SAOQrFSFwgDiI4QSIPDwAIJEb4Za+8bu45sNojgV3RMtCz+n4Agw66iqGsDib5YSI/Cg1h4AKFovT3iWdfGWfw==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-f4Y9t1oBlnsvMoLPCykzn5LRrmARiaPzorocQkMFTkYUPb7RKA4zCuWi67hH4iDVsVvkPutgew19XyJiI3OF9Q==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1732,14 +1832,14 @@
}
},
"node_modules/@swagger-api/apidom-core": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-1.0.0-alpha.9.tgz",
- "integrity": "sha512-vGl8BWRf6ODl39fxElcIOjRE2QG5AJhn8tTNMqjjHB/2WppNBuxOVStYZeVJoWfK03OPK8v4Fp/TAcaP9+R7DQ==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-4uXIN8cLigD1SZUDhmrEwW+1zbrB6bbD9Hlpo/BF74t/Nh4ZoEOUXv1oR/8QXB9AsIkdO65FdDHyaPzyGbjMiQ==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"minim": "~0.23.8",
"ramda": "~0.30.0",
@@ -1749,39 +1849,39 @@
}
},
"node_modules/@swagger-api/apidom-error": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-1.0.0-alpha.9.tgz",
- "integrity": "sha512-FU/2sFSgsICB9HYFELJ79caRpXXzlAV41QTHsAM46WfRehbzZUQpOBQm4jRi3qJGSa/Jk+mQ7Vt8HLRFMpJFfg==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-ydHNOKTdp9jaeW2yBvdZazXNCVFPbzC2Dy3dtDWU3MwUtSryoefT9OUQFWL7NxzChFRneNhBEcVl4NRocitXeA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7"
}
},
"node_modules/@swagger-api/apidom-json-pointer": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-1.0.0-alpha.9.tgz",
- "integrity": "sha512-/W8Ktbgbs29zdhed6KHTFk0qmuIRbvEFi8wu2MHGQ5UT4i99Bdu2OyUiayhnpejWztfQxDgL08pjrQPEwgY8Yg==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-Xo0v4Jxp0ZiAm+OOL2PSLyjiw5OAkCMxI0nN9+vOw1/mfXcC+tdb30QQ9WNtF7O9LExjznfFID/NnDEYqBRDwA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-ns-api-design-systems": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-1.0.0-alpha.9.tgz",
- "integrity": "sha512-aduC2vbwGgn6ia9IkKpqBYBaKyIDGM/80M3oU3DFgaYIIwynzuwVpN1TkBOLIFy3mAzkWoYKUS0jdZJhMy/6Ug==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-0i4KKNboHi7F8Nra2WNHDl9aOndyTcfKiBfdzSw3j+H5wYAHldeKg7zppqj5rVfwZL9pB5r7eFYZlowwGtmlLg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1789,15 +1889,15 @@
}
},
"node_modules/@swagger-api/apidom-ns-asyncapi-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-hZjxXJgMt517ADnAauWJh01k7WNRwkbWT5p6b7AXF2H3tl549A2hhLnIg3BBSE3GwB3Nv25GyrI3aA/1dFVC8A==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-d1LLJ/9LQaT/4jJudFhy3xhpjdTA3pVwBBUqXGPgW2Fp21auTYJMBM9J91wvVUXMUQiVg95DohkCb6TNUYzqLw==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-json-schema-draft-7": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-json-schema-draft-7": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1805,14 +1905,14 @@
}
},
"node_modules/@swagger-api/apidom-ns-json-schema-draft-4": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-1.0.0-alpha.9.tgz",
- "integrity": "sha512-OfX4UBb08C0xD5+F80dQAM2yt5lXxcURWkVEeCwxz7i23BB3nNEbnZXNV91Qo9eaJflPh8dO9iiHQxvfw5IgSg==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-sNj4pAmxEfFYIqRcP9A7/gjNMaa7nu1pWT6gTMXtYROyo4XrChc3wit8F76WJEFIiEPLrPs2SrnnA5GIHM7EnA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.9",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1820,16 +1920,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-json-schema-draft-6": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-1.0.0-alpha.9.tgz",
- "integrity": "sha512-qzUVRSSrnlYGMhK6w57o/RboNvy1FO0iFgEnTk56dD4wN49JRNuFqKI18IgXc1W2r9tTTG70nG1khe4cPE8TNg==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-Okwi0ikBSIBhQwMvsoe1+8Ff55cwwp9hu88N/sTDBxI7lyX0xCGAlSrJ9tx4Z/uOn5X+IL9HCRuNlbFt4Bvi2w==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1837,16 +1937,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-json-schema-draft-7": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-1.0.0-alpha.9.tgz",
- "integrity": "sha512-Zml8Z8VCckdFjvTogaec1dabd85hg1+xZDseWcCuD0tYkaTY/sZ8zzI0dz6/4HsKCb58qjiWSa0w60N8Syr6WQ==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-Y5p+iA1k8HR5d5cS1jtoADPKJLVg5czaHrs39UcMoMPhINqgqKGd2sYKtX7DnglcLARXe06pv0Qs9ERwCd5ayQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-json-schema-draft-6": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-json-schema-draft-6": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1854,16 +1954,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-openapi-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-WUZxt7Gs7P4EQsGtoD6cKAjf0uDJhkUxsIW9Bb4EAgO6tdp7LlXhbJ0fJ2QycCLY717SfJbvGLfhuSfTYo4Iow==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-hVhpXIG5CXSqeLo7+d5VwN8b9X0BM8yMZCEIxVAu5050GlcHC3CeJVpy+2DEBkbvR9tzc2HfPGMpWyQpgnimhQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1871,15 +1971,15 @@
}
},
"node_modules/@swagger-api/apidom-ns-openapi-3-0": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-1.0.0-alpha.9.tgz",
- "integrity": "sha512-7ra5uoZGrfCn1LabfJLueChPcYXyg24//LCYBtjTstyueqd5Vp7JCPeP5NnJSAaqVAP47r8ygceBPoxNp9k1EQ==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-zF2tPojJBGmQ/GuX+QJ0BhBWmnC+ET8Zah9utKpYWFFjqG/Wl6YzWpyrEflXpfGFzDFgoo+R+/3QvzScbPssqg==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1887,16 +1987,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-openapi-3-1": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-1.0.0-alpha.9.tgz",
- "integrity": "sha512-nQOwNQgf0C8EVjf2loAAl4ifRuVOdcqycvXUdcTpsUfHN3fbndR8IKpb26mQNmnACmqgmX+LkbMdW9b+K6089g==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-/7o+/Z2LelLcOdDSeY8O467Tjmr4yp0c8T4l13+zoQlaJFCzoeJqUUzP/dyqLPxqSeSMOez7uXnYpii6F8uYcA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.9",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-json-pointer": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-json-pointer": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1904,15 +2004,15 @@
}
},
"node_modules/@swagger-api/apidom-ns-workflows-1": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-1.0.0-alpha.9.tgz",
- "integrity": "sha512-yKo0p8OkQmDib93Kt1yqWmI7JsD6D9qUHxr/SCuAmNNWny1hxm7cZGoKJwJlGd0uAg84j4vmzWOlG3AsJbnT8g==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-tem8H3DHvQNxUqbiLmepccjAyFffS41Z90ibugsw17xzCNIIr6kDwlhiSSGkl52C+IBqoUlE6kdV0afPr2WuUA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1920,80 +2020,80 @@
}
},
"node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-json": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-1.0.0-alpha.9.tgz",
- "integrity": "sha512-xfVMR4HrTzXU0HB4TtxwkNbUIa/cQrPa0BWutJZ0fMYMAtUox2s8GsFYnJfZP52XfpSHFM1VPclivorZqET14g==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-8yuL2w3G4zdBxyITLHKSFRwpgl8Rp4/bCR2GTznYKr5wYuN9RVSKAp2sGtuWHnynnpspodswu3AI1BVCLKBj1A==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-yaml": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-1.0.0-alpha.9.tgz",
- "integrity": "sha512-lJZkrhZ8qRTtc5fSLKefCv8j7Xzo8UBfMjpqTJhmETAtU8YfVV2i2znjgxJpm0QwV6FVQqGfK1+ASZQWPLiVcA==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-I+/tRdC6CK0GfjZgOaTfpjtehkFW7i1A1ixFOPtrwKA8v3oZ2eUW7dIjDMMC0yTt67j7enHlGTw6o2rZZGnjpA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-asyncapi-json-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-65nmKdPzw4C1bmtYn+3zoxXCI6Gnobr0StI9XE0YWiK+lpso7RH3Cgyl1yPZ0DBRVGzP+Fn9FVzmDNulEfR95w==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-FX4buMibcnz0rsQKMBUrZM8cS1/s0pi3TV9HAsKPQY1mKssyeUEE/nlp6DBbYM6kNCEdq2ALvnPtZVwEJpxS3A==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-RLI4FpVB3vB6mIuT77yrsv5V2LMZ80dW9XpV+Fmbd4Jkdj+ysAFwT38cI4AsUMOxixpTDIXY1oWD7AjvylHhQQ==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-JsPYRsaKCecY8UN2AHuHm6X0WgWfys6ypH8UPYic1n3XUfNPkTSOaUY87Vi04wJmy8pQ1F0wHeESY//Zb37aIA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-json": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-1.0.0-alpha.9.tgz",
- "integrity": "sha512-aOewp8/3zobf/O+5Jx8y7+bX3BPRfRlHIv15qp4YVTsLs6gLISWSzTO9JpWe9cR+AfhpsAalFq4t1LwIkmLk4A==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-CTSgLG33GgC3POxLBCzlXyBBUz+EFGe62VICH012RIYDXHDmcr4dPmfHyj85LVJxLh7regQ+SGL4NwqQSxTY3A==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.9",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -2003,144 +2103,144 @@
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-json-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-zgtsAfkplCFusX2P/saqdn10J8P3kQizCXxHLvxd2j0EhMJk2wfu4HYN5Pej/7/qf/OR1QZxqtacwebd4RfpXA==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-YtPu2BansaTpW6MrIRJgZpa9V+MLl/DFqC2tHbGSO+u73PdWndONRgqzAAc5pBWR+u1RNgULrCK6sX7uPiFLVg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-0": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-1.0.0-alpha.9.tgz",
- "integrity": "sha512-iPuHf0cAZSUhSv8mB0FnVgatTc26cVYohgqz2cvjoGofdqoh5KKIfxOkWlIhm+qGuBp71CfZUrPYPRsd0dHgeg==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-zzZdK+xhj+sVh4z3vZrxdBrDitraD1szJPc3sUC0pukuCz3P7R/u+//6+GLE9UVjUakdbQI2cyKyUOIZX51+/g==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-1": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-1.0.0-alpha.9.tgz",
- "integrity": "sha512-jwkfO7tzZyyrAgok+O9fKFOv1q/5njMb9DBc3D/ZF3ZLTcnEw8uj4V2HkjKxUweH5k8ip/gc8ueKmO/i7p2fng==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-i7HaRnU2kDtvDqM5Yv1sbYZghCeRhiVQEyaIIp59Zhc5SwLS3dSoD/kh0TeuKpaY5Lg0ISIM3SLRDcdaYUsGww==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-jEIDpjbjwFKXQXS/RHJeA4tthsguLoz+nJPYS3AOLfuSiby5QXsKTxgqHXxG/YJqF1xJbZL+5KcF8UyiDePumw==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-QbqCTAvthqhZmFZKf9HBYnVt4kV7konYnauylVFIaE5KAzmZkcb30FtkAwmZfnyW3AURMzZcLfOgJRGHOjYSqA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-1.0.0-alpha.9.tgz",
- "integrity": "sha512-ieJL8dfIF8fmP3uJRNh/duJa3cCIIv6MzUe6o4uPT/oTDroy4qIATvnq9Dq/gtAv6rcPRpA9VhyghJ1DmjKsZQ==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-ajVOqs8lNta7uXkFtU5k1zDJTjwV6Ki3uS+JwBvjuMHsF/i/WIZOmgI4g1Z3yQ1c0QI4dHJskq4WDyp2qW64aw==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-1.0.0-alpha.9.tgz",
- "integrity": "sha512-EatIH7PZQSNDsRn9ompc62MYzboY7wAkjfYz+2FzBaSA8Vl5/+740qGQj22tu/xhwW4K72aV2NNL1m47QVF7hA==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-ljYmbBFWjIcfN+MJr7JFh6NA/fgyu5gXDI6KUrg/sbWTKdUYP4iNLJPw8VLPBXHnExevjZCt1Ni74mmL4ZfyBg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-workflows-json-1": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-1.0.0-alpha.9.tgz",
- "integrity": "sha512-LylC2cQdAmvR7bXqwMwBt6FHTMVGinwIdI8pjl4EbPT9hCVm1rdED53caTYM4gCm+CJGRw20r4gb9vn3+N6RrA==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-vd0H5IYX96AIhOLcU9SJnXDD6OV61i00JDDfJcFnf1K2NCB0D0Otk2V2z9LXqe51s3pZ7d/Dz0biDjYMsMKVww==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-workflows-yaml-1": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-1.0.0-alpha.9.tgz",
- "integrity": "sha512-TlA4+1ca33D7fWxO5jKBytSCv86IGI4Lze4JfrawWUXZ5efhi4LiNmW5TrGlZUyvL7yJtZcA4tn3betlj6jVwA==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-lH0AiPetMLRDy38gcB6TmQnaKv6p1ePimnT4xqcVSHEnc/FsjMbyOE3x6DUENau2eeWFduAhofE9zvliW6iJaQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.9",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-yaml-1-2": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-1.0.0-alpha.9.tgz",
- "integrity": "sha512-jSIHEB7lbh+MP3BhYIXFkeivDR01kugXN70e5FskW7oet2TIARsVEPheWKQFSP1U8bUZA4bsp9h9gOQ9xEeErw==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-mW/W/Q8w4RCw41Y9vggPbsFg+gj0FxKdecVYzZ8TmgyM9oVN6/KZFegUYKlg1HDRAfjceKehE06aLLS5GXEJCA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.9",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
- "@swagger-api/apidom-error": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -2150,15 +2250,15 @@
}
},
"node_modules/@swagger-api/apidom-reference": {
- "version": "1.0.0-alpha.9",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-1.0.0-alpha.9.tgz",
- "integrity": "sha512-KQ6wB5KplqdSsjxdA8BaQulj5zlF5VBCd5KP3RN/9vvixgsD/gyrVY59nisdzmPTqiL6yjhk612eQ96MgG8KiA==",
+ "version": "1.0.0-alpha.10",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-1.0.0-alpha.10.tgz",
+ "integrity": "sha512-aFG6EHC1NOa0IhawTiE8A8TffzmW0PSO5d+lpzvcJ0w7KbrYG6SFQF2L6lZppqGaIGWbmV0Mq3LDU9mgSVEqqQ==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.9",
+ "@swagger-api/apidom-core": "^1.0.0-alpha.10",
"@types/ramda": "~0.30.0",
- "axios": "^1.4.0",
+ "axios": "^1.7.4",
"minimatch": "^7.4.3",
"process": "^0.11.10",
"ramda": "~0.30.0",
@@ -2204,14 +2304,15 @@
}
},
"node_modules/@swc/core": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.7.26.tgz",
- "integrity": "sha512-f5uYFf+TmMQyYIoxkn/evWhNGuUzC730dFwAKGwBVHHVoPyak1/GvJUm6i1SKl+2Hrj9oN0i3WSoWWZ4pgI8lw==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.9.2.tgz",
+ "integrity": "sha512-dYyEkO6mRYtZFpnOsnYzv9rY69fHAHoawYOjGOEcxk9WYtaJhowMdP/w6NcOKnz2G7GlZaenjkzkMa6ZeQeMsg==",
"dev": true,
"hasInstallScript": true,
+ "license": "Apache-2.0",
"dependencies": {
"@swc/counter": "^0.1.3",
- "@swc/types": "^0.1.12"
+ "@swc/types": "^0.1.15"
},
"engines": {
"node": ">=10"
@@ -2221,16 +2322,16 @@
"url": "https://opencollective.com/swc"
},
"optionalDependencies": {
- "@swc/core-darwin-arm64": "1.7.26",
- "@swc/core-darwin-x64": "1.7.26",
- "@swc/core-linux-arm-gnueabihf": "1.7.26",
- "@swc/core-linux-arm64-gnu": "1.7.26",
- "@swc/core-linux-arm64-musl": "1.7.26",
- "@swc/core-linux-x64-gnu": "1.7.26",
- "@swc/core-linux-x64-musl": "1.7.26",
- "@swc/core-win32-arm64-msvc": "1.7.26",
- "@swc/core-win32-ia32-msvc": "1.7.26",
- "@swc/core-win32-x64-msvc": "1.7.26"
+ "@swc/core-darwin-arm64": "1.9.2",
+ "@swc/core-darwin-x64": "1.9.2",
+ "@swc/core-linux-arm-gnueabihf": "1.9.2",
+ "@swc/core-linux-arm64-gnu": "1.9.2",
+ "@swc/core-linux-arm64-musl": "1.9.2",
+ "@swc/core-linux-x64-gnu": "1.9.2",
+ "@swc/core-linux-x64-musl": "1.9.2",
+ "@swc/core-win32-arm64-msvc": "1.9.2",
+ "@swc/core-win32-ia32-msvc": "1.9.2",
+ "@swc/core-win32-x64-msvc": "1.9.2"
},
"peerDependencies": {
"@swc/helpers": "*"
@@ -2242,13 +2343,14 @@
}
},
"node_modules/@swc/core-darwin-arm64": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.7.26.tgz",
- "integrity": "sha512-FF3CRYTg6a7ZVW4yT9mesxoVVZTrcSWtmZhxKCYJX9brH4CS/7PRPjAKNk6kzWgWuRoglP7hkjQcd6EpMcZEAw==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.9.2.tgz",
+ "integrity": "sha512-nETmsCoY29krTF2PtspEgicb3tqw7Ci5sInTI03EU5zpqYbPjoPH99BVTjj0OsF53jP5MxwnLI5Hm21lUn1d6A==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"darwin"
@@ -2258,13 +2360,14 @@
}
},
"node_modules/@swc/core-darwin-x64": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.7.26.tgz",
- "integrity": "sha512-az3cibZdsay2HNKmc4bjf62QVukuiMRh5sfM5kHR/JMTrLyS6vSw7Ihs3UTkZjUxkLTT8ro54LI6sV6sUQUbLQ==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.9.2.tgz",
+ "integrity": "sha512-9gD+bwBz8ZByjP6nZTXe/hzd0tySIAjpDHgkFiUrc+5zGF+rdTwhcNrzxNHJmy6mw+PW38jqII4uspFHUqqxuQ==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"darwin"
@@ -2274,13 +2377,14 @@
}
},
"node_modules/@swc/core-linux-arm-gnueabihf": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.7.26.tgz",
- "integrity": "sha512-VYPFVJDO5zT5U3RpCdHE5v1gz4mmR8BfHecUZTmD2v1JeFY6fv9KArJUpjrHEEsjK/ucXkQFmJ0jaiWXmpOV9Q==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.9.2.tgz",
+ "integrity": "sha512-kYq8ief1Qrn+WmsTWAYo4r+Coul4dXN6cLFjiPZ29Cv5pyU+GFvSPAB4bEdMzwy99rCR0u2P10UExaeCjurjvg==",
"cpu": [
"arm"
],
"dev": true,
+ "license": "Apache-2.0",
"optional": true,
"os": [
"linux"
@@ -2290,13 +2394,14 @@
}
},
"node_modules/@swc/core-linux-arm64-gnu": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.7.26.tgz",
- "integrity": "sha512-YKevOV7abpjcAzXrhsl+W48Z9mZvgoVs2eP5nY+uoMAdP2b3GxC0Df1Co0I90o2lkzO4jYBpTMcZlmUXLdXn+Q==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.9.2.tgz",
+ "integrity": "sha512-n0W4XiXlmEIVqxt+rD3ZpkogsEWUk1jJ+i5bQNgB+1JuWh0fBE8c/blDgTQXa0GB5lTPVDZQussgdNOCnAZwiA==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"linux"
@@ -2306,13 +2411,14 @@
}
},
"node_modules/@swc/core-linux-arm64-musl": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.7.26.tgz",
- "integrity": "sha512-3w8iZICMkQQON0uIcvz7+Q1MPOW6hJ4O5ETjA0LSP/tuKqx30hIniCGOgPDnv3UTMruLUnQbtBwVCZTBKR3Rkg==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.9.2.tgz",
+ "integrity": "sha512-8xzrOmsyCC1zrx2Wzx/h8dVsdewO1oMCwBTLc1gSJ/YllZYTb04pNm6NsVbzUX2tKddJVRgSJXV10j/NECLwpA==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"linux"
@@ -2322,13 +2428,14 @@
}
},
"node_modules/@swc/core-linux-x64-gnu": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.7.26.tgz",
- "integrity": "sha512-c+pp9Zkk2lqb06bNGkR2Looxrs7FtGDMA4/aHjZcCqATgp348hOKH5WPvNLBl+yPrISuWjbKDVn3NgAvfvpH4w==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.9.2.tgz",
+ "integrity": "sha512-kZrNz/PjRQKcchWF6W292jk3K44EoVu1ad5w+zbS4jekIAxsM8WwQ1kd+yjUlN9jFcF8XBat5NKIs9WphJCVXg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"linux"
@@ -2338,13 +2445,14 @@
}
},
"node_modules/@swc/core-linux-x64-musl": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.7.26.tgz",
- "integrity": "sha512-PgtyfHBF6xG87dUSSdTJHwZ3/8vWZfNIXQV2GlwEpslrOkGqy+WaiiyE7Of7z9AvDILfBBBcJvJ/r8u980wAfQ==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.9.2.tgz",
+ "integrity": "sha512-TTIpR4rjMkhX1lnFR+PSXpaL83TrQzp9znRdp2TzYrODlUd/R20zOwSo9vFLCyH6ZoD47bccY7QeGZDYT3nlRg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"linux"
@@ -2354,13 +2462,14 @@
}
},
"node_modules/@swc/core-win32-arm64-msvc": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.7.26.tgz",
- "integrity": "sha512-9TNXPIJqFynlAOrRD6tUQjMq7KApSklK3R/tXgIxc7Qx+lWu8hlDQ/kVPLpU7PWvMMwC/3hKBW+p5f+Tms1hmA==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.9.2.tgz",
+ "integrity": "sha512-+Eg2d4icItKC0PMjZxH7cSYFLWk0aIp94LNmOw6tPq0e69ax6oh10upeq0D1fjWsKLmOJAWEvnXlayZcijEXDw==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"win32"
@@ -2370,13 +2479,14 @@
}
},
"node_modules/@swc/core-win32-ia32-msvc": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.7.26.tgz",
- "integrity": "sha512-9YngxNcG3177GYdsTum4V98Re+TlCeJEP4kEwEg9EagT5s3YejYdKwVAkAsJszzkXuyRDdnHUpYbTrPG6FiXrQ==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.9.2.tgz",
+ "integrity": "sha512-nLWBi4vZDdM/LkiQmPCakof8Dh1/t5EM7eudue04V1lIcqx9YHVRS3KMwEaCoHLGg0c312Wm4YgrWQd9vwZ5zQ==",
"cpu": [
"ia32"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"win32"
@@ -2386,13 +2496,14 @@
}
},
"node_modules/@swc/core-win32-x64-msvc": {
- "version": "1.7.26",
- "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.7.26.tgz",
- "integrity": "sha512-VR+hzg9XqucgLjXxA13MtV5O3C0bK0ywtLIBw/+a+O+Oc6mxFWHtdUeXDbIi5AiPbn0fjgVJMqYnyjGyyX8u0w==",
+ "version": "1.9.2",
+ "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.9.2.tgz",
+ "integrity": "sha512-ik/k+JjRJBFkXARukdU82tSVx0CbExFQoQ78qTO682esbYXzjdB5eLVkoUbwen299pnfr88Kn4kyIqFPTje8Xw==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "Apache-2.0 AND MIT",
"optional": true,
"os": [
"win32"
@@ -2405,23 +2516,26 @@
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
"integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==",
- "dev": true
+ "dev": true,
+ "license": "Apache-2.0"
},
"node_modules/@swc/types": {
- "version": "0.1.12",
- "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.12.tgz",
- "integrity": "sha512-wBJA+SdtkbFhHjTMYH+dEH1y4VpfGdAc2Kw/LK09i9bXd/K6j6PkDcFCEzb6iVfZMkPRrl/q0e3toqTAJdkIVA==",
+ "version": "0.1.15",
+ "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.15.tgz",
+ "integrity": "sha512-XKaZ+dzDIQ9Ot9o89oJQ/aluI17+VvUnIpYJTcZtvv1iYX6MzHh3Ik2CSR7MdPKpPwfZXHBeCingb2b4PoDVdw==",
"dev": true,
+ "license": "Apache-2.0",
"dependencies": {
"@swc/counter": "^0.1.3"
}
},
"node_modules/@tanstack/react-virtual": {
- "version": "3.10.4",
- "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.10.4.tgz",
- "integrity": "sha512-Y2y1QJN3e5gNTG4wlZcoW2IAFrVCuho80oyeODKKFVSbAhJAXmkDNH3ZztM6EQij5ueqpqgz5FlsgKP9TGjImA==",
+ "version": "3.10.9",
+ "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.10.9.tgz",
+ "integrity": "sha512-OXO2uBjFqA4Ibr2O3y0YMnkrRWGVNqcvHQXmGvMu6IK8chZl3PrDxFXdGZ2iZkSrKh3/qUYoFqYe+Rx23RoU0g==",
+ "license": "MIT",
"dependencies": {
- "@tanstack/virtual-core": "3.10.4"
+ "@tanstack/virtual-core": "3.10.9"
},
"funding": {
"type": "github",
@@ -2433,9 +2547,10 @@
}
},
"node_modules/@tanstack/virtual-core": {
- "version": "3.10.4",
- "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.10.4.tgz",
- "integrity": "sha512-yHyli4RHVsI+eJ0RjmOsjA9RpHp3/Zah9t+iRjmFa72dq00TeG/NwuLYuCV6CB4RkWD4i5RD421j1eb6BdKgvQ==",
+ "version": "3.10.9",
+ "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz",
+ "integrity": "sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw==",
+ "license": "MIT",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
@@ -2445,14 +2560,15 @@
"version": "5.60.15",
"resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-5.60.15.tgz",
"integrity": "sha512-dTOvwEQ+ouKJ/rE9LT1Ue2hmP6H1mZv5+CCnNWu2qtiOe2LQa9lCprEY20HxiDmV/Bxh+dXjywmy5aKvoGjULA==",
+ "license": "MIT",
"dependencies": {
"@types/tern": "*"
}
},
"node_modules/@types/estree": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
- "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
+ "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"license": "MIT"
},
"node_modules/@types/hast": {
@@ -2468,19 +2584,20 @@
"version": "7.0.15",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
- "dev": true
+ "dev": true,
+ "license": "MIT"
},
"node_modules/@types/prop-types": {
- "version": "15.7.12",
- "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz",
- "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==",
- "devOptional": true,
+ "version": "15.7.13",
+ "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.13.tgz",
+ "integrity": "sha512-hCZTSvwbzWGvhqxp/RqVqwU999pBf2vp7hzIjiYOsl8wqOmUxkQ6ddw1cV3l8811+kdUFus/q4d1Y3E3SyEifA==",
+ "dev": true,
"license": "MIT"
},
"node_modules/@types/ramda": {
- "version": "0.30.1",
- "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.30.1.tgz",
- "integrity": "sha512-aoyF/ADPL6N+/NXXfhPWF+Qj6w1Cql59m9wX0Gi15uyF+bpzXeLd63HPdiTDE2bmLXfNcVufsDPKmbfOrOzTBA==",
+ "version": "0.30.2",
+ "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.30.2.tgz",
+ "integrity": "sha512-PyzHvjCalm2BRYjAU6nIB3TprYwMNOUY/7P/N8bSzp9W/yM2YrtGtAnnVtaCNSeOZ8DzKyFDvaqQs7LnWwwmBA==",
"license": "MIT",
"dependencies": {
"types-ramda": "^0.30.1"
@@ -2490,7 +2607,8 @@
"version": "18.3.12",
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.12.tgz",
"integrity": "sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==",
- "devOptional": true,
+ "dev": true,
+ "license": "MIT",
"dependencies": {
"@types/prop-types": "*",
"csstype": "^3.0.2"
@@ -2500,7 +2618,8 @@
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==",
- "devOptional": true,
+ "dev": true,
+ "license": "MIT",
"dependencies": {
"@types/react": "*"
}
@@ -2519,6 +2638,7 @@
"version": "0.23.9",
"resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.9.tgz",
"integrity": "sha512-ypzHFE/wBzh+BlH6rrBgS5I/Z7RD21pGhZ2rltb/+ZrVM1awdZwjx7hE5XfuYgHWk9uvV5HLZN3SloevCAp3Bw==",
+ "license": "MIT",
"dependencies": {
"@types/estree": "*"
}
@@ -2536,16 +2656,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.12.2.tgz",
- "integrity": "sha512-gQxbxM8mcxBwaEmWdtLCIGLfixBMHhQjBqR8sVWNTPpcj45WlYL2IObS/DNMLH1DBP0n8qz+aiiLTGfopPEebw==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.14.0.tgz",
+ "integrity": "sha512-tqp8H7UWFaZj0yNO6bycd5YjMwxa6wIHOLZvWPkidwbgLCsBMetQoGj7DPuAlWa2yGO3H48xmPwjhsSPPCGU5w==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.12.2",
- "@typescript-eslint/type-utils": "8.12.2",
- "@typescript-eslint/utils": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2",
+ "@typescript-eslint/scope-manager": "8.14.0",
+ "@typescript-eslint/type-utils": "8.14.0",
+ "@typescript-eslint/utils": "8.14.0",
+ "@typescript-eslint/visitor-keys": "8.14.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2568,63 +2689,17 @@
}
}
},
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
- "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
- "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
- "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/parser": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.12.2.tgz",
- "integrity": "sha512-MrvlXNfGPLH3Z+r7Tk+Z5moZAc0dzdVjTgUgwsdGweH7lydysQsnSww3nAmsq8blFuRD5VRlAr9YdEFw3e6PBw==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.14.0.tgz",
+ "integrity": "sha512-2p82Yn9juUJq0XynBXtFCyrBDb6/dJombnz6vbo6mgQEtWHfvHbQuEa9kAOVIt1c9YFwi7H6WxtPj1kg+80+RA==",
"dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.12.2",
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/typescript-estree": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2",
+ "@typescript-eslint/scope-manager": "8.14.0",
+ "@typescript-eslint/types": "8.14.0",
+ "@typescript-eslint/typescript-estree": "8.14.0",
+ "@typescript-eslint/visitor-keys": "8.14.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2643,90 +2718,15 @@
}
}
},
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
- "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
- "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
- "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
- "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.11.0.tgz",
- "integrity": "sha512-Uholz7tWhXmA4r6epo+vaeV7yjdKy5QFCERMjs1kMVsLRKIrSdM6o21W2He9ftp5PP6aWOVpD5zvrvuHZC0bMQ==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.14.0.tgz",
+ "integrity": "sha512-aBbBrnW9ARIDn92Zbo7rguLnqQ/pOrUguVpbUwzOhkFg2npFDwTgPGqFqE0H5feXcOoJOfX3SxlJaKEVtq54dw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.11.0",
- "@typescript-eslint/visitor-keys": "8.11.0"
+ "@typescript-eslint/types": "8.14.0",
+ "@typescript-eslint/visitor-keys": "8.14.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2737,55 +2737,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.12.2.tgz",
- "integrity": "sha512-bwuU4TAogPI+1q/IJSKuD4shBLc/d2vGcRT588q+jzayQyjVK2X6v/fbR4InY2U2sgf8MEvVCqEWUzYzgBNcGQ==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/typescript-estree": "8.12.2",
- "@typescript-eslint/utils": "8.12.2",
- "debug": "^4.3.4",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
- "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
- "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.14.0.tgz",
+ "integrity": "sha512-Xcz9qOtZuGusVOH5Uk07NGs39wrKkf3AxlkK79RBK6aJC1l03CobXjJbwBPSidetAOV+5rEVuiT1VSBUOAsanQ==",
"dev": true,
+ "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.14.0",
+ "@typescript-eslint/utils": "8.14.0",
"debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
"ts-api-utils": "^1.3.0"
},
"engines": {
@@ -2801,27 +2761,10 @@
}
}
},
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
- "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/types": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.11.0.tgz",
- "integrity": "sha512-tn6sNMHf6EBAYMvmPUaKaVeYvhUsrE6x+bXQTxjQRp360h1giATU0WvgeEys1spbvb5R+VpNOZ+XJmjD8wOUHw==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.14.0.tgz",
+ "integrity": "sha512-yjeB9fnO/opvLJFAsPNYlKPnEM8+z4og09Pk504dkqonT02AyL5Z9SSqlE0XqezS93v6CXn49VHvB2G7XSsl0g==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2833,14 +2776,14 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.11.0.tgz",
- "integrity": "sha512-yHC3s1z1RCHoCz5t06gf7jH24rr3vns08XXhfEqzYpd6Hll3z/3g23JRi0jM8A47UFKNc3u/y5KIMx8Ynbjohg==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.14.0.tgz",
+ "integrity": "sha512-OPXPLYKGZi9XS/49rdaCbR5j/S14HazviBlUQFvSKz3npr3NikF+mrgK7CFVur6XEt95DZp/cmke9d5i3vtVnQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/types": "8.11.0",
- "@typescript-eslint/visitor-keys": "8.11.0",
+ "@typescript-eslint/types": "8.14.0",
+ "@typescript-eslint/visitor-keys": "8.14.0",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -2862,15 +2805,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz",
- "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.14.0.tgz",
+ "integrity": "sha512-OGqj6uB8THhrHj0Fk27DcHPojW7zKwKkPmHXHvQ58pLYp4hy8CSUdTKykKeh+5vFqTTVmjz0zCOOPKRovdsgHA==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.12.2",
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/typescript-estree": "8.12.2"
+ "@typescript-eslint/scope-manager": "8.14.0",
+ "@typescript-eslint/types": "8.14.0",
+ "@typescript-eslint/typescript-estree": "8.14.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2883,89 +2827,14 @@
"eslint": "^8.57.0 || ^9.0.0"
}
},
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
- "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
- "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
- "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "@typescript-eslint/visitor-keys": "8.12.2",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.12.2",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
- "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.12.2",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.11.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.11.0.tgz",
- "integrity": "sha512-EaewX6lxSjRJnc+99+dqzTeoDZUfyrA52d2/HRrkI830kgovWsmIiTfmr0NZorzqic7ga+1bS60lRBUgR3n/Bw==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.14.0.tgz",
+ "integrity": "sha512-vG0XZo8AdTH9OE6VFRwAZldNc7qtJ/6NLGWak+BtENuEUXGZgFpihILPiBvKXvJ2nFu27XNGC6rKiwuaoMbYzQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.11.0",
+ "@typescript-eslint/types": "8.14.0",
"eslint-visitor-keys": "^3.4.3"
},
"engines": {
@@ -2981,6 +2850,7 @@
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.1.tgz",
"integrity": "sha512-vgWOY0i1EROUK0Ctg1hwhtC3SdcDjZcdit4Ups4aPkDcB1jYhmo+RMYWY87cmXMhvtD5uf8lV89j2w16vkdSVg==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"@swc/core": "^1.7.26"
},
@@ -2993,6 +2863,7 @@
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"dev": true,
+ "license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
@@ -3059,6 +2930,7 @@
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz",
"integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==",
+ "license": "MIT",
"dependencies": {
"tslib": "^2.0.0"
},
@@ -3082,9 +2954,9 @@
}
},
"node_modules/axios": {
- "version": "1.7.4",
- "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
- "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
+ "version": "1.7.7",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz",
+ "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
@@ -3250,25 +3122,29 @@
"node_modules/client-only": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz",
- "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="
+ "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==",
+ "license": "MIT"
},
"node_modules/clsx": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz",
"integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==",
+ "license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/codemirror": {
- "version": "5.65.17",
- "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.17.tgz",
- "integrity": "sha512-1zOsUx3lzAOu/gnMAZkQ9kpIHcPYOc9y1Fbm2UVk5UBPkdq380nhkelG0qUwm1f7wPvTbndu9ZYlug35EwAZRQ=="
+ "version": "5.65.18",
+ "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.18.tgz",
+ "integrity": "sha512-Gaz4gHnkbHMGgahNt3CA5HBk5lLQBqmD/pBgeB4kQU6OedZmqMBjlRF0LSrp2tJ4wlLNPm2FfaUd1pDy0mdlpA==",
+ "license": "MIT"
},
"node_modules/codemirror-graphql": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.1.1.tgz",
"integrity": "sha512-qVNd+H4OqkeBLDztB5bYllAmToxmZASOoELgbf+csNcoovIHKqSB/eppkzWI5jdQGd8bvLK1lTePfqXsCBFryw==",
+ "license": "MIT",
"dependencies": {
"@types/codemirror": "^0.0.90",
"graphql-language-service": "5.3.0"
@@ -3283,6 +3159,7 @@
"version": "0.0.90",
"resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.90.tgz",
"integrity": "sha512-8Z9+tSg27NPRGubbUPUCrt5DDG/OWzLph5BvcDykwR5D7RyZh5mhHG0uS1ePKV1YFCA+/cwc4Ey2AJAEFfV3IA==",
+ "license": "MIT",
"dependencies": {
"@types/tern": "*"
}
@@ -3337,9 +3214,9 @@
"license": "MIT"
},
"node_modules/cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
+ "version": "0.7.2",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
+ "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
@@ -3355,9 +3232,9 @@
}
},
"node_modules/core-js-pure": {
- "version": "3.38.1",
- "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.38.1.tgz",
- "integrity": "sha512-BY8Etc1FZqdw1glX0XNOq2FDwfrg/VGqoZOZCdaL+UmdaqDwQwYXkMJT4t6In+zfEfOJDcM9T0KdbBeJg8KKCQ==",
+ "version": "3.39.0",
+ "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.39.0.tgz",
+ "integrity": "sha512-7fEcWwKI4rJinnK+wLTezeg2smbFFdSBP6E2kQZNbnzM2s1rpKQ6aaRteZSSg7FLU3P0HGGVo/gbpfanU36urg==",
"hasInstallScript": true,
"license": "MIT",
"funding": {
@@ -3366,9 +3243,9 @@
}
},
"node_modules/cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "version": "7.0.5",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz",
+ "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3390,22 +3267,23 @@
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
"integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
- "devOptional": true,
+ "dev": true,
"license": "MIT"
},
"node_modules/debounce-promise": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/debounce-promise/-/debounce-promise-3.1.2.tgz",
- "integrity": "sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg=="
+ "integrity": "sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg==",
+ "license": "MIT"
},
"node_modules/debug": {
- "version": "4.3.6",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz",
- "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==",
+ "version": "4.3.7",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
+ "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "ms": "2.1.2"
+ "ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
@@ -3479,12 +3357,13 @@
"node_modules/detect-node-es": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz",
- "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ=="
+ "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==",
+ "license": "MIT"
},
"node_modules/dompurify": {
- "version": "3.1.4",
- "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.4.tgz",
- "integrity": "sha512-2gnshi6OshmuKil8rMZuQCGiUF3cUxHY3NGDzUAdUx/NPEe5DVnO8BDoAQouvgwnx0R/+a6jUn36Z0FSdq8vww==",
+ "version": "3.1.6",
+ "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.6.tgz",
+ "integrity": "sha512-cTOAhc36AalkjtBpfG6O8JimdTMWNXjiePT2xQH/ppBGi/4uIpmj8eKyIkMJErXWARyINV/sB38yf8JCLF5pbQ==",
"license": "(MPL-2.0 OR Apache-2.0)"
},
"node_modules/drange": {
@@ -3510,6 +3389,7 @@
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
+ "license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
@@ -3574,6 +3454,7 @@
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.14.0.tgz",
"integrity": "sha512-c2FHsVBr87lnUtjP4Yhvk4yEhKrQavGafRA/Se1ouse8PfbfC/Qh9Mxa00yWsZRlqeUB9raXip0aiiUZkgnr9g==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.12.1",
@@ -3634,6 +3515,7 @@
"resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.0.0.tgz",
"integrity": "sha512-hIOwI+5hYGpJEc4uPRmz2ulCjAGD/N13Lukkh8cLV0i2IRk/bdZDYjgLVHj+U9Z704kLIdIO6iueGvxNur0sgw==",
"dev": true,
+ "license": "MIT",
"engines": {
"node": ">=10"
},
@@ -3646,6 +3528,7 @@
"resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.14.tgz",
"integrity": "sha512-aXvzCTK7ZBv1e7fahFuR3Z/fyQQSIQ711yPgYRj+Oj64tyTgO4iQIDmYXDBqvSWQ/FA4OSCsXOStlF+noU0/NA==",
"dev": true,
+ "license": "MIT",
"peerDependencies": {
"eslint": ">=7"
}
@@ -3655,6 +3538,7 @@
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
"integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
"dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
"esrecurse": "^4.3.0",
"estraverse": "^5.2.0"
@@ -3679,12 +3563,6 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/eslint/node_modules/@types/estree": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
- "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
- "dev": true
- },
"node_modules/eslint/node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
@@ -3701,6 +3579,7 @@
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -3726,6 +3605,7 @@
"resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz",
"integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
"dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
"acorn": "^8.14.0",
"acorn-jsx": "^5.3.2",
@@ -3743,6 +3623,7 @@
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
+ "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -3768,6 +3649,7 @@
"resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
"dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
"estraverse": "^5.2.0"
},
@@ -3950,9 +3832,9 @@
"license": "ISC"
},
"node_modules/follow-redirects": {
- "version": "1.15.6",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
- "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
+ "version": "1.15.9",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
+ "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
"funding": [
{
"type": "individual",
@@ -3970,9 +3852,9 @@
}
},
"node_modules/form-data": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
- "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
+ "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
@@ -3995,6 +3877,7 @@
"version": "6.5.1",
"resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-6.5.1.tgz",
"integrity": "sha512-o1BGqqposwi7cgDrtg0dNONhkmPsUFDaLcKXigzuTFC5x58mE8iyTazxSudFzmT6MEyJKfjjU8ItoMe3W+3fiw==",
+ "license": "MIT",
"dependencies": {
"@motionone/dom": "10.12.0",
"framesync": "6.0.1",
@@ -4015,6 +3898,7 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/framesync/-/framesync-6.0.1.tgz",
"integrity": "sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==",
+ "license": "MIT",
"dependencies": {
"tslib": "^2.1.0"
}
@@ -4045,6 +3929,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz",
"integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==",
+ "license": "MIT",
"engines": {
"node": ">=6"
}
@@ -4053,6 +3938,7 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/get-value/-/get-value-3.0.1.tgz",
"integrity": "sha512-mKZj9JLQrwMBtj5wxi6MH8Z5eSKaERpAwjg43dPtlGI1ZVEgH/qC7T8/6R2OBSUA+zzHBZgICsVJaEIV2tKTDA==",
+ "license": "MIT",
"dependencies": {
"isobject": "^3.0.1"
},
@@ -4101,14 +3987,15 @@
"license": "MIT"
},
"node_modules/graphiql": {
- "version": "3.7.1",
- "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.7.1.tgz",
- "integrity": "sha512-kmummedOrFYs0BI5evrVY0AerOYlaMt/Sc/e+Sta1x8X6vEMYWNeUUz/kKF2NQT5BcsR3FnNdFt1Gk2QMgueGQ==",
+ "version": "3.7.2",
+ "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.7.2.tgz",
+ "integrity": "sha512-DL+KrX+aQdyzl+KwcqjlmdYdjyKegm7FcZJKkIQ1e56xn6Eoe8lw5F4t65gFex/45fHzv8e8CpaIcljxfJhO7A==",
+ "license": "MIT",
"dependencies": {
- "@graphiql/react": "^0.26.2"
+ "@graphiql/react": "^0.27.0"
},
"peerDependencies": {
- "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0-alpha.2",
+ "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0",
"react": "^16.8.0 || ^17 || ^18",
"react-dom": "^16.8.0 || ^17 || ^18"
}
@@ -4126,6 +4013,7 @@
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/graphql-language-service/-/graphql-language-service-5.3.0.tgz",
"integrity": "sha512-gCQIIy7lM9CB1KPLEb+DNZLczA9zuTLEOJE2hEQZTFYInogdmMDRa6RAkvM4LL0LcgcS+3uPs6KtHlcjCqRbUg==",
+ "license": "MIT",
"dependencies": {
"debounce-promise": "^3.1.2",
"nullthrows": "^1.0.0",
@@ -4178,7 +4066,8 @@
"node_modules/hey-listen": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/hey-listen/-/hey-listen-1.0.8.tgz",
- "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q=="
+ "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==",
+ "license": "MIT"
},
"node_modules/highlight.js": {
"version": "10.7.3",
@@ -4189,6 +4078,12 @@
"node": "*"
}
},
+ "node_modules/highlightjs-vue": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/highlightjs-vue/-/highlightjs-vue-1.0.0.tgz",
+ "integrity": "sha512-PDEfEF102G23vHmPhLyPboFCD+BkMGu+GuJe2d9/eH4FsCwvgBpnc9n0pGE+ffKdph38s6foEZiEjdgHdzp+IA==",
+ "license": "CC0-1.0"
+ },
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
@@ -4358,6 +4253,7 @@
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
"integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==",
+ "license": "MIT",
"dependencies": {
"isobject": "^3.0.1"
},
@@ -4369,6 +4265,7 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz",
"integrity": "sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==",
+ "license": "MIT",
"engines": {
"node": ">=0.10.0"
}
@@ -4384,6 +4281,7 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
"integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==",
+ "license": "MIT",
"engines": {
"node": ">=0.10.0"
}
@@ -4461,6 +4359,7 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
"integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
+ "license": "MIT",
"dependencies": {
"uc.micro": "^2.0.0"
}
@@ -4530,6 +4429,7 @@
"version": "14.1.0",
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz",
"integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==",
+ "license": "MIT",
"dependencies": {
"argparse": "^2.0.1",
"entities": "^4.4.0",
@@ -4545,7 +4445,8 @@
"node_modules/mdurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
- "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w=="
+ "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==",
+ "license": "MIT"
},
"node_modules/merge2": {
"version": "1.4.1",
@@ -4561,6 +4462,7 @@
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/meros/-/meros-1.3.0.tgz",
"integrity": "sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w==",
+ "license": "MIT",
"engines": {
"node": ">=13"
},
@@ -4667,16 +4569,16 @@
"optional": true
},
"node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true,
"license": "MIT"
},
"node_modules/nan": {
- "version": "2.20.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.20.0.tgz",
- "integrity": "sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==",
+ "version": "2.22.0",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
+ "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
"license": "MIT",
"optional": true
},
@@ -4691,6 +4593,7 @@
"url": "https://github.com/sponsors/ai"
}
],
+ "license": "MIT",
"bin": {
"nanoid": "bin/nanoid.cjs"
},
@@ -4722,9 +4625,9 @@
}
},
"node_modules/node-abi": {
- "version": "3.67.0",
- "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.67.0.tgz",
- "integrity": "sha512-bLn/fU/ALVBE9wj+p4Y21ZJWYFjUXLXPi/IewyLZkx3ApxKDNBWCKdReeKOtD8dWpOdDCeMyLh6ZewzcLsG2Nw==",
+ "version": "3.71.0",
+ "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.71.0.tgz",
+ "integrity": "sha512-SZ40vRiy/+wRTf21hxkkEjPJZpARzUMVcJoQse2EF8qkUWbbO2z7vd5oA/H6bVH6SZQ5STGcu0KRDS7biNRfxw==",
"license": "MIT",
"optional": true,
"dependencies": {
@@ -4779,7 +4682,8 @@
"node_modules/nullthrows": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz",
- "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw=="
+ "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==",
+ "license": "MIT"
},
"node_modules/object-assign": {
"version": "4.1.1",
@@ -4926,10 +4830,11 @@
}
},
"node_modules/picocolors": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz",
- "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==",
- "dev": true
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
+ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
+ "dev": true,
+ "license": "ISC"
},
"node_modules/picomatch": {
"version": "2.3.1",
@@ -4948,6 +4853,7 @@
"version": "11.0.3",
"resolved": "https://registry.npmjs.org/popmotion/-/popmotion-11.0.3.tgz",
"integrity": "sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==",
+ "license": "MIT",
"dependencies": {
"framesync": "6.0.1",
"hey-listen": "^1.0.8",
@@ -4956,9 +4862,9 @@
}
},
"node_modules/postcss": {
- "version": "8.4.45",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.45.tgz",
- "integrity": "sha512-7KTLTdzdZZYscUc65XmjFiB73vBhBfbPztCYdUNvlaso9PrzjzcmjqBPR0lNGkcVlcO4BjiO5rK/qNz+XAen1Q==",
+ "version": "8.4.49",
+ "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz",
+ "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==",
"dev": true,
"funding": [
{
@@ -4974,10 +4880,11 @@
"url": "https://github.com/sponsors/ai"
}
],
+ "license": "MIT",
"dependencies": {
"nanoid": "^3.3.7",
- "picocolors": "^1.0.1",
- "source-map-js": "^1.2.0"
+ "picocolors": "^1.1.1",
+ "source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
@@ -5069,9 +4976,9 @@
"license": "MIT"
},
"node_modules/pump": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
- "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz",
+ "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==",
"license": "MIT",
"optional": true,
"dependencies": {
@@ -5093,6 +5000,7 @@
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
"integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
+ "license": "MIT",
"engines": {
"node": ">=6"
}
@@ -5311,11 +5219,12 @@
}
},
"node_modules/react-remove-scroll": {
- "version": "2.5.7",
- "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.7.tgz",
- "integrity": "sha512-FnrTWO4L7/Bhhf3CYBNArEG/yROV0tKmTv7/3h9QCFvH6sndeFf1wPqOcbFVu5VAulS5dV1wGT3GZZ/1GawqiA==",
+ "version": "2.6.0",
+ "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.6.0.tgz",
+ "integrity": "sha512-I2U4JVEsQenxDAKaVa3VZ/JeJZe0/2DxPWL8Tj8yLKctQJQiZM52pn/GWFpSp8dftjM3pSAHVJZscAnC/y+ySQ==",
+ "license": "MIT",
"dependencies": {
- "react-remove-scroll-bar": "^2.3.4",
+ "react-remove-scroll-bar": "^2.3.6",
"react-style-singleton": "^2.2.1",
"tslib": "^2.1.0",
"use-callback-ref": "^1.3.0",
@@ -5338,6 +5247,7 @@
"version": "2.3.6",
"resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz",
"integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==",
+ "license": "MIT",
"dependencies": {
"react-style-singleton": "^2.2.1",
"tslib": "^2.0.0"
@@ -5359,6 +5269,7 @@
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz",
"integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==",
+ "license": "MIT",
"dependencies": {
"get-nonce": "^1.0.0",
"invariant": "^2.2.4",
@@ -5378,13 +5289,14 @@
}
},
"node_modules/react-syntax-highlighter": {
- "version": "15.5.0",
- "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-15.5.0.tgz",
- "integrity": "sha512-+zq2myprEnQmH5yw6Gqc8lD55QHnpKaU8TOcFeC/Lg/MQSs8UknEA0JC4nTZGFAXC2J2Hyj/ijJ7NlabyPi2gg==",
+ "version": "15.6.1",
+ "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-15.6.1.tgz",
+ "integrity": "sha512-OqJ2/vL7lEeV5zTJyG7kmARppUjiB9h9udl4qHQjjgEos66z00Ia0OckwYfRxCSFrW8RJIBnsBwQsHZbVPspqg==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.3.1",
"highlight.js": "^10.4.1",
+ "highlightjs-vue": "^1.0.0",
"lowlight": "^1.17.0",
"prismjs": "^1.27.0",
"refractor": "^3.6.0"
@@ -5530,12 +5442,13 @@
}
},
"node_modules/rollup": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz",
- "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==",
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.25.0.tgz",
+ "integrity": "sha512-uVbClXmR6wvx5R1M3Od4utyLUxrmOcEm3pAtMphn73Apq19PDtHpgZoEvqH2YnnaNUuvKmg2DgRd2Sqv+odyqg==",
"dev": true,
+ "license": "MIT",
"dependencies": {
- "@types/estree": "1.0.5"
+ "@types/estree": "1.0.6"
},
"bin": {
"rollup": "dist/bin/rollup"
@@ -5545,22 +5458,24 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
- "@rollup/rollup-android-arm-eabi": "4.22.4",
- "@rollup/rollup-android-arm64": "4.22.4",
- "@rollup/rollup-darwin-arm64": "4.22.4",
- "@rollup/rollup-darwin-x64": "4.22.4",
- "@rollup/rollup-linux-arm-gnueabihf": "4.22.4",
- "@rollup/rollup-linux-arm-musleabihf": "4.22.4",
- "@rollup/rollup-linux-arm64-gnu": "4.22.4",
- "@rollup/rollup-linux-arm64-musl": "4.22.4",
- "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4",
- "@rollup/rollup-linux-riscv64-gnu": "4.22.4",
- "@rollup/rollup-linux-s390x-gnu": "4.22.4",
- "@rollup/rollup-linux-x64-gnu": "4.22.4",
- "@rollup/rollup-linux-x64-musl": "4.22.4",
- "@rollup/rollup-win32-arm64-msvc": "4.22.4",
- "@rollup/rollup-win32-ia32-msvc": "4.22.4",
- "@rollup/rollup-win32-x64-msvc": "4.22.4",
+ "@rollup/rollup-android-arm-eabi": "4.25.0",
+ "@rollup/rollup-android-arm64": "4.25.0",
+ "@rollup/rollup-darwin-arm64": "4.25.0",
+ "@rollup/rollup-darwin-x64": "4.25.0",
+ "@rollup/rollup-freebsd-arm64": "4.25.0",
+ "@rollup/rollup-freebsd-x64": "4.25.0",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.25.0",
+ "@rollup/rollup-linux-arm-musleabihf": "4.25.0",
+ "@rollup/rollup-linux-arm64-gnu": "4.25.0",
+ "@rollup/rollup-linux-arm64-musl": "4.25.0",
+ "@rollup/rollup-linux-powerpc64le-gnu": "4.25.0",
+ "@rollup/rollup-linux-riscv64-gnu": "4.25.0",
+ "@rollup/rollup-linux-s390x-gnu": "4.25.0",
+ "@rollup/rollup-linux-x64-gnu": "4.25.0",
+ "@rollup/rollup-linux-x64-musl": "4.25.0",
+ "@rollup/rollup-win32-arm64-msvc": "4.25.0",
+ "@rollup/rollup-win32-ia32-msvc": "4.25.0",
+ "@rollup/rollup-win32-x64-msvc": "4.25.0",
"fsevents": "~2.3.2"
}
},
@@ -5654,6 +5569,7 @@
"https://paypal.me/jonathanschlinkert",
"https://jonschlinkert.dev/sponsor"
],
+ "license": "MIT",
"dependencies": {
"is-plain-object": "^2.0.4",
"is-primitive": "^3.0.1"
@@ -5760,6 +5676,7 @@
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"dev": true,
+ "license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
@@ -5803,16 +5720,11 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/style-mod": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz",
- "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==",
- "peer": true
- },
"node_modules/style-value-types": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz",
"integrity": "sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==",
+ "license": "MIT",
"dependencies": {
"hey-listen": "^1.0.8",
"tslib": "^2.1.0"
@@ -5832,18 +5744,19 @@
}
},
"node_modules/swagger-client": {
- "version": "3.29.2",
- "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.29.2.tgz",
- "integrity": "sha512-7dOIAodJeUsYbvWTpDODY2+bfJcZ34WG84TByMet76OJ/ZjOLHZtJSgMFxEvnh9+yR0qn8wvHUdfg27ylg2eiQ==",
+ "version": "3.31.0",
+ "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.31.0.tgz",
+ "integrity": "sha512-hVYift5XB8nOgNJVl6cbNtVTVPT2Fdx2wCOcIvuAFcyq0Mwe6+70ezoZ5WfiaIAzzwWfq72jyaLeg8TViGNSmw==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.22.15",
+ "@scarf/scarf": "=1.4.0",
"@swagger-api/apidom-core": ">=1.0.0-alpha.9 <1.0.0-beta.0",
"@swagger-api/apidom-error": ">=1.0.0-alpha.9 <1.0.0-beta.0",
"@swagger-api/apidom-json-pointer": ">=1.0.0-alpha.9 <1.0.0-beta.0",
"@swagger-api/apidom-ns-openapi-3-1": ">=1.0.0-alpha.9 <1.0.0-beta.0",
"@swagger-api/apidom-reference": ">=1.0.0-alpha.9 <1.0.0-beta.0",
- "cookie": "~0.6.0",
+ "cookie": "~0.7.2",
"deepmerge": "~4.3.0",
"fast-json-patch": "^3.0.0-1",
"js-yaml": "^4.1.0",
@@ -5852,22 +5765,24 @@
"node-fetch-commonjs": "^3.3.2",
"openapi-path-templating": "^1.5.1",
"openapi-server-url-templating": "^1.0.0",
+ "ramda": "^0.30.1",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/swagger-ui-react": {
- "version": "5.17.14",
- "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.17.14.tgz",
- "integrity": "sha512-mCXerZrbcn4ftPYifUF0+iKIRTHoVCv0HcJc/sXl9nCe3oeWdsjmOWVqKabzzAkAa0NwsbKNJFv2UL/Ivnf6VQ==",
+ "version": "5.18.2",
+ "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.18.2.tgz",
+ "integrity": "sha512-vpW7AmkRYdz578iq7C5WrPsg6reBgRzj5xL/fIYR6KTfvY3lvBchpzegFaqg09LWDoL3U2MZvIgOS/1Q9kSJ9g==",
"license": "Apache-2.0",
"dependencies": {
- "@babel/runtime-corejs3": "^7.24.5",
- "@braintree/sanitize-url": "=7.0.2",
+ "@babel/runtime-corejs3": "^7.24.7",
+ "@braintree/sanitize-url": "=7.0.4",
+ "@scarf/scarf": "=1.4.0",
"base64-js": "^1.5.1",
"classnames": "^2.5.1",
"css.escape": "1.5.1",
"deep-extend": "0.6.0",
- "dompurify": "=3.1.4",
+ "dompurify": "=3.1.6",
"ieee754": "^1.2.1",
"immutable": "^3.x.x",
"js-file-download": "^0.4.12",
@@ -5886,10 +5801,10 @@
"redux": "^5.0.1",
"redux-immutable": "^4.0.0",
"remarkable": "^2.0.1",
- "reselect": "^5.1.0",
+ "reselect": "^5.1.1",
"serialize-error": "^8.1.0",
"sha.js": "^2.4.11",
- "swagger-client": "^3.28.1",
+ "swagger-client": "^3.31.0",
"url-parse": "^1.5.10",
"xml": "=1.0.1",
"xml-but-prettier": "^1.0.1",
@@ -5991,9 +5906,9 @@
}
},
"node_modules/ts-api-utils": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz",
- "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==",
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.0.tgz",
+ "integrity": "sha512-032cPxaEKwM+GT3vA5JXNzIaizx388rhsSW79vGRNGXfRRAdEAn2mvk36PvK5HnOchyWZ7afLEXqYCvPCrzuzQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -6016,9 +5931,9 @@
"license": "Apache-2.0"
},
"node_modules/tslib": {
- "version": "2.6.3",
- "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz",
- "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==",
+ "version": "2.8.1",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
+ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD"
},
"node_modules/tunnel-agent": {
@@ -6073,6 +5988,7 @@
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz",
"integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==",
"dev": true,
+ "license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -6084,7 +6000,8 @@
"node_modules/uc.micro": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
- "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A=="
+ "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==",
+ "license": "MIT"
},
"node_modules/unraw": {
"version": "3.0.0",
@@ -6116,6 +6033,7 @@
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz",
"integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==",
+ "license": "MIT",
"dependencies": {
"tslib": "^2.0.0"
},
@@ -6136,6 +6054,7 @@
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz",
"integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==",
+ "license": "MIT",
"dependencies": {
"detect-node-es": "^1.1.0",
"tslib": "^2.0.0"
@@ -6170,10 +6089,11 @@
"optional": true
},
"node_modules/vite": {
- "version": "5.4.10",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.10.tgz",
- "integrity": "sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==",
+ "version": "5.4.11",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz",
+ "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"esbuild": "^0.21.3",
"postcss": "^8.4.43",
@@ -6231,13 +6151,8 @@
"node_modules/vscode-languageserver-types": {
"version": "3.17.5",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz",
- "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg=="
- },
- "node_modules/w3c-keyname": {
- "version": "2.2.8",
- "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
- "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
- "peer": true
+ "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==",
+ "license": "MIT"
},
"node_modules/web-streams-polyfill": {
"version": "3.3.3",
diff --git a/playground/package.json b/playground/package.json
index 3f16d6cc6e..61a6efd544 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -10,23 +10,23 @@
"preview": "vite preview"
},
"dependencies": {
- "graphiql": "^3.7.1",
+ "graphiql": "^3.7.2",
"graphql": "^16.9.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
- "swagger-ui-react": "^5.17.14"
+ "swagger-ui-react": "^5.18.2"
},
"devDependencies": {
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.12.2",
- "@typescript-eslint/parser": "^8.12.2",
+ "@typescript-eslint/eslint-plugin": "^8.13.0",
+ "@typescript-eslint/parser": "^8.13.0",
"@vitejs/plugin-react-swc": "^3.7.1",
"eslint": "^9.14.0",
"eslint-plugin-react-hooks": "^5.0.0",
"eslint-plugin-react-refresh": "^0.4.14",
"typescript": "^5.6.3",
- "vite": "^5.4.10"
+ "vite": "^5.4.11"
}
}
From 909c4af43338eda9283f4f45a1c5870e64e19c1e Mon Sep 17 00:00:00 2001
From: Chris Quigley
Date: Thu, 14 Nov 2024 12:48:03 -0500
Subject: [PATCH 22/47] feat: Support for descending fields CLI index creation
(#3237)
## Relevant issue(s)
Resolves #2460
## Description
Using the HTTP endpoint, it was possible to create a collection index,
with each field being either descending or ascending. This was possible
because one of the parameters was a boolean field called "Descending."
However, this functionality was not present in the CLI equivalent.
This feature adds support for creating descending fields.
For example, the following sorts name in ascending order, because
ascension is the default
`defradb client index create -c User --fields name`
The following sorts it in descending order:
`defradb client index create -c User --fields name:DESC`
And the following sorts multiple fields, in different combinations of
descending and ascending order:
`defradb client index create -c User --fields name:ASC,score:DESC`
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the pull request title adheres to the conventional
commit style
## How has this been tested?
Specify the platform(s) on which this was tested:
- Windows
---
cli/errors.go | 16 +++-
cli/index_create.go | 36 ++++++++-
.../cli/defradb_client_index_create.md | 7 +-
tests/clients/cli/wrapper_collection.go | 16 +++-
.../index/create_unique_composite_test.go | 76 +++++++++++++++++++
5 files changed, 142 insertions(+), 9 deletions(-)
diff --git a/cli/errors.go b/cli/errors.go
index f084ed21b0..ff283de5f9 100644
--- a/cli/errors.go
+++ b/cli/errors.go
@@ -17,9 +17,11 @@ import (
)
const (
- errInvalidLensConfig string = "invalid lens configuration"
- errSchemaVersionNotOfSchema string = "the given schema version is from a different schema"
- errRequiredFlag string = "the required flag [--%s|-%s] is %s"
+ errInvalidLensConfig string = "invalid lens configuration"
+ errSchemaVersionNotOfSchema string = "the given schema version is from a different schema"
+ errRequiredFlag string = "the required flag [--%s|-%s] is %s"
+ errInvalidAscensionOrder string = "invalid order: expected ASC or DESC"
+ errInvalidInxedFieldDescription string = "invalid or malformed field description"
)
var (
@@ -55,3 +57,11 @@ func NewErrSchemaVersionNotOfSchema(schemaRoot string, schemaVersionID string) e
errors.NewKV("SchemaVersionID", schemaVersionID),
)
}
+
+func NewErrInvalidAscensionOrder(fieldName string) error {
+ return errors.New(errInvalidAscensionOrder, errors.NewKV("Field", fieldName))
+}
+
+func NewErrInvalidInxedFieldDescription(fieldName string) error {
+ return errors.New(errInvalidInxedFieldDescription, errors.NewKV("Field", fieldName))
+}
diff --git a/cli/index_create.go b/cli/index_create.go
index 0d724da15b..e9f4350fa0 100644
--- a/cli/index_create.go
+++ b/cli/index_create.go
@@ -11,6 +11,8 @@
package cli
import (
+ "strings"
+
"github.com/spf13/cobra"
"github.com/sourcenetwork/defradb/client"
@@ -22,26 +24,51 @@ func MakeIndexCreateCommand() *cobra.Command {
var fieldsArg []string
var uniqueArg bool
var cmd = &cobra.Command{
- Use: "create -c --collection --fields [-n --name ] [--unique]",
+ Use: "create -c --collection --fields [-n --name ] [--unique]",
Short: "Creates a secondary index on a collection's field(s)",
Long: `Creates a secondary index on a collection's field(s).
The --name flag is optional. If not provided, a name will be generated automatically.
The --unique flag is optional. If provided, the index will be unique.
+If no order is specified for the field, the default value will be "ASC"
Example: create an index for 'Users' collection on 'name' field:
defradb client index create --collection Users --fields name
Example: create a named index for 'Users' collection on 'name' field:
- defradb client index create --collection Users --fields name --name UsersByName`,
+ defradb client index create --collection Users --fields name --name UsersByName
+
+Example: create a unique index for 'Users' collection on 'name' in ascending order, and 'age' in descending order:
+ defradb client index create --collection Users --fields name:ASC,age:DESC --unique
+`,
ValidArgs: []string{"collection", "fields", "name"},
RunE: func(cmd *cobra.Command, args []string) error {
store := mustGetContextStore(cmd)
var fields []client.IndexedFieldDescription
- for _, name := range fieldsArg {
- fields = append(fields, client.IndexedFieldDescription{Name: name})
+
+ for _, field := range fieldsArg {
+ // For each field, parse it into a field name and ascension order, separated by a colon
+ // If there is no colon, assume the ascension order is ASC by default
+ const asc = "ASC"
+ const desc = "DESC"
+ parts := strings.Split(field, ":")
+ fieldName := parts[0]
+ order := asc
+ if len(parts) == 2 {
+ order = strings.ToUpper(parts[1])
+ if order != asc && order != desc {
+ return NewErrInvalidAscensionOrder(field)
+ }
+ } else if len(parts) > 2 {
+ return NewErrInvalidInxedFieldDescription(field)
+ }
+ fields = append(fields, client.IndexedFieldDescription{
+ Name: fieldName,
+ Descending: order == desc,
+ })
}
+
desc := client.IndexDescription{
Name: nameArg,
Fields: fields,
@@ -51,6 +78,7 @@ Example: create a named index for 'Users' collection on 'name' field:
if err != nil {
return err
}
+
desc, err = col.CreateIndex(cmd.Context(), desc)
if err != nil {
return err
diff --git a/docs/website/references/cli/defradb_client_index_create.md b/docs/website/references/cli/defradb_client_index_create.md
index f37231771d..268cd9eb70 100644
--- a/docs/website/references/cli/defradb_client_index_create.md
+++ b/docs/website/references/cli/defradb_client_index_create.md
@@ -8,15 +8,20 @@ Creates a secondary index on a collection's field(s).
The --name flag is optional. If not provided, a name will be generated automatically.
The --unique flag is optional. If provided, the index will be unique.
+If no order is specified for the field, the default value will be "ASC"
Example: create an index for 'Users' collection on 'name' field:
defradb client index create --collection Users --fields name
Example: create a named index for 'Users' collection on 'name' field:
defradb client index create --collection Users --fields name --name UsersByName
+
+Example: create a unique index for 'Users' collection on 'name' in ascending order, and 'age' in descending order:
+ defradb client index create --collection Users --fields name:ASC,age:DESC --unique
+
```
-defradb client index create -c --collection --fields [-n --name ] [--unique] [flags]
+defradb client index create -c --collection --fields [-n --name ] [--unique] [flags]
```
### Options
diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go
index cfa49b9e8e..eb9c5f5466 100644
--- a/tests/clients/cli/wrapper_collection.go
+++ b/tests/clients/cli/wrapper_collection.go
@@ -348,10 +348,24 @@ func (c *Collection) CreateIndex(
}
fields := make([]string, len(indexDesc.Fields))
+ orders := make([]bool, len(indexDesc.Fields))
+
for i := range indexDesc.Fields {
fields[i] = indexDesc.Fields[i].Name
+ orders[i] = indexDesc.Fields[i].Descending
+ }
+
+ orderedFields := make([]string, len(fields))
+
+ for i := range fields {
+ if orders[i] {
+ orderedFields[i] = fields[i] + ":DESC"
+ } else {
+ orderedFields[i] = fields[i] + ":ASC"
+ }
}
- args = append(args, "--fields", strings.Join(fields, ","))
+
+ args = append(args, "--fields", strings.Join(orderedFields, ","))
data, err := c.cmd.execute(ctx, args)
if err != nil {
diff --git a/tests/integration/index/create_unique_composite_test.go b/tests/integration/index/create_unique_composite_test.go
index 7a7e9fc5e0..88778d2e64 100644
--- a/tests/integration/index/create_unique_composite_test.go
+++ b/tests/integration/index/create_unique_composite_test.go
@@ -178,3 +178,79 @@ func TestUniqueCompositeIndexCreate_IfFieldValuesAreUnique_Succeed(t *testing.T)
testUtils.ExecuteTestCase(t, test)
}
+
+func TestUniqueCompositeIndexCreate_IfFieldValuesAreOrdered_Succeed(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "create unique composite index if all docs have unique fields combinations",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User {
+ name: String
+ age: Int
+ email: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21,
+ "email": "some@gmail.com"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 35,
+ "email": "another@gmail.com"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Andy",
+ "age": 35,
+ "email": "different@gmail.com"
+ }`,
+ },
+ testUtils.CreateIndex{
+ CollectionID: 0,
+ Fields: []testUtils.IndexedField{{Name: "name", Descending: true}, {Name: "age", Descending: false}, {Name: "email"}},
+ IndexName: "name_age_unique_index",
+ Unique: true,
+ },
+ testUtils.GetIndexes{
+ CollectionID: 0,
+ ExpectedIndexes: []client.IndexDescription{
+ {
+ Name: "name_age_unique_index",
+ ID: 1,
+ Unique: true,
+ Fields: []client.IndexedFieldDescription{
+ {
+ Name: "name",
+ Descending: true,
+ },
+ {
+ Name: "age",
+ Descending: false,
+ },
+ {
+ Name: "email",
+ Descending: false,
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
From 198454b2bd22820b0aebdc0f6ff49659a3a40934 Mon Sep 17 00:00:00 2001
From: Islam Aliev
Date: Thu, 14 Nov 2024 21:35:59 +0100
Subject: [PATCH 23/47] refactor: Consolidate node-related fields into a struct
(#3232)
## Relevant issue(s)
Resolves #3208
## Description
All node-related fields are moved into a separate `nodeState` struct so
now we don't need to maintain all slices and node indexes and it's also
easier to reason about a node's state.
---
tests/integration/acp.go | 4 +-
tests/integration/db.go | 42 +++++++--
tests/integration/events.go | 76 ++++++++--------
tests/integration/lens.go | 2 +-
tests/integration/p2p.go | 14 +--
tests/integration/state.go | 66 +++++++-------
tests/integration/utils.go | 168 +++++++++++-------------------------
7 files changed, 167 insertions(+), 205 deletions(-)
diff --git a/tests/integration/acp.go b/tests/integration/acp.go
index b98be7a059..78a5a50997 100644
--- a/tests/integration/acp.go
+++ b/tests/integration/acp.go
@@ -304,7 +304,7 @@ func getCollectionAndDocInfo(s *state, collectionID, docInd, nodeID int) (string
collectionName := ""
docID := ""
if collectionID != -1 {
- collection := s.collections[nodeID][collectionID]
+ collection := s.nodes[nodeID].collections[collectionID]
if !collection.Description().Name.HasValue() {
require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description)
}
@@ -617,7 +617,7 @@ func getNodeAudience(s *state, nodeIndex int) immutable.Option[string] {
if nodeIndex >= len(s.nodes) {
return immutable.None[string]()
}
- switch client := s.nodes[nodeIndex].(type) {
+ switch client := s.nodes[nodeIndex].Client.(type) {
case *http.Wrapper:
return immutable.Some(strings.TrimPrefix(client.Host(), "http://"))
case *cli.Wrapper:
diff --git a/tests/integration/db.go b/tests/integration/db.go
index 784ff6952f..8a099e0a94 100644
--- a/tests/integration/db.go
+++ b/tests/integration/db.go
@@ -22,6 +22,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/crypto"
"github.com/sourcenetwork/defradb/internal/kms"
+ "github.com/sourcenetwork/defradb/net"
"github.com/sourcenetwork/defradb/node"
changeDetector "github.com/sourcenetwork/defradb/tests/change_detector"
)
@@ -140,7 +141,7 @@ func getDefaultNodeOpts() []node.Option {
// setupNode returns the database implementation for the current
// testing state. The database type on the test state is used to
// select the datastore implementation to use.
-func setupNode(s *state, opts ...node.Option) (*node.Node, string, error) {
+func setupNode(s *state, opts ...node.Option) (*nodeState, error) {
opts = append(getDefaultNodeOpts(), opts...)
switch acpType {
@@ -189,20 +190,51 @@ func setupNode(s *state, opts ...node.Option) (*node.Node, string, error) {
opts = append(opts, node.WithStoreType(node.MemoryStore))
default:
- return nil, "", fmt.Errorf("invalid database type: %v", s.dbt)
+ return nil, fmt.Errorf("invalid database type: %v", s.dbt)
}
if s.kms == PubSubKMSType {
opts = append(opts, node.WithKMS(kms.PubSubServiceType))
}
+ netOpts := make([]net.NodeOpt, 0)
+ for _, opt := range opts {
+ if opt, ok := opt.(net.NodeOpt); ok {
+ netOpts = append(netOpts, opt)
+ }
+ }
+
+ if s.isNetworkEnabled {
+ opts = append(opts, node.WithDisableP2P(false))
+ }
+
node, err := node.New(s.ctx, opts...)
if err != nil {
- return nil, "", err
+ return nil, err
}
+
err = node.Start(s.ctx)
if err != nil {
- return nil, "", err
+ return nil, err
}
- return node, path, nil
+
+ c, err := setupClient(s, node)
+ require.Nil(s.t, err)
+
+ eventState, err := newEventState(c.Events())
+ require.NoError(s.t, err)
+
+ st := &nodeState{
+ Client: c,
+ event: eventState,
+ p2p: newP2PState(),
+ dbPath: path,
+ netOpts: netOpts,
+ }
+
+ if node.Peer != nil {
+ st.peerInfo = node.Peer.PeerInfo()
+ }
+
+ return st, nil
}
diff --git a/tests/integration/events.go b/tests/integration/events.go
index bbe19ce391..0e28f3e3df 100644
--- a/tests/integration/events.go
+++ b/tests/integration/events.go
@@ -39,13 +39,13 @@ func waitForNetworkSetupEvents(s *state, nodeID int) {
for p2pTopicEvent && replicatorEvents > 0 {
select {
- case _, ok := <-s.nodeEvents[nodeID].replicator.Message():
+ case _, ok := <-s.nodes[nodeID].event.replicator.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for network setup events")
}
replicatorEvents--
- case _, ok := <-s.nodeEvents[nodeID].p2pTopic.Message():
+ case _, ok := <-s.nodes[nodeID].event.p2pTopic.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for network setup events")
}
@@ -63,7 +63,7 @@ func waitForNetworkSetupEvents(s *state, nodeID int) {
// Expected document heads will be updated for the targeted node.
func waitForReplicatorConfigureEvent(s *state, cfg ConfigureReplicator) {
select {
- case _, ok := <-s.nodeEvents[cfg.SourceNodeID].replicator.Message():
+ case _, ok := <-s.nodes[cfg.SourceNodeID].event.replicator.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for replicator event")
}
@@ -73,21 +73,21 @@ func waitForReplicatorConfigureEvent(s *state, cfg ConfigureReplicator) {
}
// all previous documents should be merged on the subscriber node
- for key, val := range s.nodeP2P[cfg.SourceNodeID].actualDocHeads {
- s.nodeP2P[cfg.TargetNodeID].expectedDocHeads[key] = val.cid
+ for key, val := range s.nodes[cfg.SourceNodeID].p2p.actualDocHeads {
+ s.nodes[cfg.TargetNodeID].p2p.expectedDocHeads[key] = val.cid
}
// update node connections and replicators
- s.nodeP2P[cfg.TargetNodeID].connections[cfg.SourceNodeID] = struct{}{}
- s.nodeP2P[cfg.SourceNodeID].connections[cfg.TargetNodeID] = struct{}{}
- s.nodeP2P[cfg.SourceNodeID].replicators[cfg.TargetNodeID] = struct{}{}
+ s.nodes[cfg.TargetNodeID].p2p.connections[cfg.SourceNodeID] = struct{}{}
+ s.nodes[cfg.SourceNodeID].p2p.connections[cfg.TargetNodeID] = struct{}{}
+ s.nodes[cfg.SourceNodeID].p2p.replicators[cfg.TargetNodeID] = struct{}{}
}
// waitForReplicatorConfigureEvent waits for a node to publish a
// replicator completed event on the local event bus.
func waitForReplicatorDeleteEvent(s *state, cfg DeleteReplicator) {
select {
- case _, ok := <-s.nodeEvents[cfg.SourceNodeID].replicator.Message():
+ case _, ok := <-s.nodes[cfg.SourceNodeID].event.replicator.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for replicator event")
}
@@ -96,9 +96,9 @@ func waitForReplicatorDeleteEvent(s *state, cfg DeleteReplicator) {
require.Fail(s.t, "timeout waiting for replicator event")
}
- delete(s.nodeP2P[cfg.TargetNodeID].connections, cfg.SourceNodeID)
- delete(s.nodeP2P[cfg.SourceNodeID].connections, cfg.TargetNodeID)
- delete(s.nodeP2P[cfg.SourceNodeID].replicators, cfg.TargetNodeID)
+ delete(s.nodes[cfg.TargetNodeID].p2p.connections, cfg.SourceNodeID)
+ delete(s.nodes[cfg.SourceNodeID].p2p.connections, cfg.TargetNodeID)
+ delete(s.nodes[cfg.SourceNodeID].p2p.replicators, cfg.TargetNodeID)
}
// waitForSubscribeToCollectionEvent waits for a node to publish a
@@ -107,7 +107,7 @@ func waitForReplicatorDeleteEvent(s *state, cfg DeleteReplicator) {
// Expected document heads will be updated for the subscriber node.
func waitForSubscribeToCollectionEvent(s *state, action SubscribeToCollection) {
select {
- case _, ok := <-s.nodeEvents[action.NodeID].p2pTopic.Message():
+ case _, ok := <-s.nodes[action.NodeID].event.p2pTopic.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for p2p topic event")
}
@@ -121,7 +121,7 @@ func waitForSubscribeToCollectionEvent(s *state, action SubscribeToCollection) {
if collectionIndex == NonExistentCollectionID {
continue // don't track non existent collections
}
- s.nodeP2P[action.NodeID].peerCollections[collectionIndex] = struct{}{}
+ s.nodes[action.NodeID].p2p.peerCollections[collectionIndex] = struct{}{}
}
}
@@ -129,7 +129,7 @@ func waitForSubscribeToCollectionEvent(s *state, action SubscribeToCollection) {
// p2p topic completed event on the local event bus.
func waitForUnsubscribeToCollectionEvent(s *state, action UnsubscribeToCollection) {
select {
- case _, ok := <-s.nodeEvents[action.NodeID].p2pTopic.Message():
+ case _, ok := <-s.nodes[action.NodeID].event.p2pTopic.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for p2p topic event")
}
@@ -142,7 +142,7 @@ func waitForUnsubscribeToCollectionEvent(s *state, action UnsubscribeToCollectio
if collectionIndex == NonExistentCollectionID {
continue // don't track non existent collections
}
- delete(s.nodeP2P[action.NodeID].peerCollections, collectionIndex)
+ delete(s.nodes[action.NodeID].p2p.peerCollections, collectionIndex)
}
}
@@ -160,7 +160,8 @@ func waitForUpdateEvents(
continue // node is not selected
}
- if _, ok := s.closedNodes[i]; ok {
+ node := s.nodes[i]
+ if node.closed {
continue // node is closed
}
@@ -172,7 +173,7 @@ func waitForUpdateEvents(
for len(expect) > 0 {
var evt event.Update
select {
- case msg, ok := <-s.nodeEvents[i].update.Message():
+ case msg, ok := <-node.event.update.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for update event", "Node %d", i)
}
@@ -195,7 +196,7 @@ func waitForUpdateEvents(
// we only need to update the network state if the nodes
// are configured for networking
- if i < len(s.nodeConfigs) {
+ if s.isNetworkEnabled {
updateNetworkState(s, i, evt)
}
}
@@ -208,15 +209,16 @@ func waitForUpdateEvents(
// from running forever.
func waitForMergeEvents(s *state, action WaitForSync) {
for nodeID := 0; nodeID < len(s.nodes); nodeID++ {
- if _, ok := s.closedNodes[nodeID]; ok {
+ node := s.nodes[nodeID]
+ if node.closed {
continue // node is closed
}
- expect := s.nodeP2P[nodeID].expectedDocHeads
+ expect := node.p2p.expectedDocHeads
// remove any docs that are already merged
// up to the expected document head
- for key, val := range s.nodeP2P[nodeID].actualDocHeads {
+ for key, val := range node.p2p.actualDocHeads {
if head, ok := expect[key]; ok && head.String() == val.cid.String() {
delete(expect, key)
}
@@ -228,7 +230,7 @@ func waitForMergeEvents(s *state, action WaitForSync) {
require.Fail(s.t, "doc index %d out of range", docIndex)
}
docID := s.docIDs[0][docIndex].String()
- actual, hasActual := s.nodeP2P[nodeID].actualDocHeads[docID]
+ actual, hasActual := node.p2p.actualDocHeads[docID]
if !hasActual || !actual.decrypted {
expectDecrypted[docID] = struct{}{}
}
@@ -243,7 +245,7 @@ func waitForMergeEvents(s *state, action WaitForSync) {
for len(expect) > 0 || len(expectDecrypted) > 0 {
var evt event.MergeComplete
select {
- case msg, ok := <-s.nodeEvents[nodeID].merge.Message():
+ case msg, ok := <-node.event.merge.Message():
if !ok {
require.Fail(s.t, "subscription closed waiting for merge complete event")
}
@@ -262,7 +264,7 @@ func waitForMergeEvents(s *state, action WaitForSync) {
if ok && head.String() == evt.Merge.Cid.String() {
delete(expect, evt.Merge.DocID)
}
- s.nodeP2P[nodeID].actualDocHeads[evt.Merge.DocID] = docHeadState{cid: evt.Merge.Cid, decrypted: evt.Decrypted}
+ node.p2p.actualDocHeads[evt.Merge.DocID] = docHeadState{cid: evt.Merge.Cid, decrypted: evt.Decrypted}
}
}
}
@@ -272,31 +274,33 @@ func waitForMergeEvents(s *state, action WaitForSync) {
func updateNetworkState(s *state, nodeID int, evt event.Update) {
// find the correct collection index for this update
collectionID := -1
- for i, c := range s.collections[nodeID] {
+ for i, c := range s.nodes[nodeID].collections {
if c.SchemaRoot() == evt.SchemaRoot {
collectionID = i
}
}
+ node := s.nodes[nodeID]
+
// update the actual document head on the node that updated it
// as the node created the document, it is already decrypted
- s.nodeP2P[nodeID].actualDocHeads[evt.DocID] = docHeadState{cid: evt.Cid, decrypted: true}
+ node.p2p.actualDocHeads[evt.DocID] = docHeadState{cid: evt.Cid, decrypted: true}
// update the expected document heads of replicator targets
- for id := range s.nodeP2P[nodeID].replicators {
+ for id := range node.p2p.replicators {
// replicator target nodes push updates to source nodes
- s.nodeP2P[id].expectedDocHeads[evt.DocID] = evt.Cid
+ s.nodes[id].p2p.expectedDocHeads[evt.DocID] = evt.Cid
}
// update the expected document heads of connected nodes
- for id := range s.nodeP2P[nodeID].connections {
+ for id := range node.p2p.connections {
// connected nodes share updates of documents they have in common
- if _, ok := s.nodeP2P[id].actualDocHeads[evt.DocID]; ok {
- s.nodeP2P[id].expectedDocHeads[evt.DocID] = evt.Cid
+ if _, ok := s.nodes[id].p2p.actualDocHeads[evt.DocID]; ok {
+ s.nodes[id].p2p.expectedDocHeads[evt.DocID] = evt.Cid
}
// peer collection subscribers receive updates from any other subscriber node
- if _, ok := s.nodeP2P[id].peerCollections[collectionID]; ok {
- s.nodeP2P[id].expectedDocHeads[evt.DocID] = evt.Cid
+ if _, ok := s.nodes[id].p2p.peerCollections[collectionID]; ok {
+ s.nodes[id].p2p.expectedDocHeads[evt.DocID] = evt.Cid
}
}
@@ -325,9 +329,9 @@ func getEventsForUpdateDoc(s *state, action UpdateDoc) map[string]struct{} {
func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} {
var collection client.Collection
if action.NodeID.HasValue() {
- collection = s.collections[action.NodeID.Value()][action.CollectionID]
+ collection = s.nodes[action.NodeID.Value()].collections[action.CollectionID]
} else {
- collection = s.collections[0][action.CollectionID]
+ collection = s.nodes[0].collections[action.CollectionID]
}
docs, err := parseCreateDocs(action, collection)
diff --git a/tests/integration/lens.go b/tests/integration/lens.go
index c361c55342..fb002f076f 100644
--- a/tests/integration/lens.go
+++ b/tests/integration/lens.go
@@ -59,7 +59,7 @@ func configureMigration(
) {
_, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for _, node := range nodes {
- txn := getTransaction(s, node, action.TransactionID, action.ExpectedError)
+ txn := getTransaction(s, node.Client, action.TransactionID, action.ExpectedError)
ctx := db.SetContextTxn(s.ctx, txn)
err := node.SetMigration(ctx, action.LensConfig)
diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go
index 87e224dce4..0123fc1787 100644
--- a/tests/integration/p2p.go
+++ b/tests/integration/p2p.go
@@ -156,8 +156,8 @@ func connectPeers(
err := sourceNode.Connect(s.ctx, targetNode.PeerInfo())
require.NoError(s.t, err)
- s.nodeP2P[cfg.SourceNodeID].connections[cfg.TargetNodeID] = struct{}{}
- s.nodeP2P[cfg.TargetNodeID].connections[cfg.SourceNodeID] = struct{}{}
+ s.nodes[cfg.SourceNodeID].p2p.connections[cfg.TargetNodeID] = struct{}{}
+ s.nodes[cfg.TargetNodeID].p2p.connections[cfg.SourceNodeID] = struct{}{}
// Bootstrap triggers a bunch of async stuff for which we have no good way of waiting on. It must be
// allowed to complete before documentation begins or it will not even try and sync it. So for now, we
@@ -219,7 +219,7 @@ func subscribeToCollection(
continue
}
- col := s.collections[action.NodeID][collectionIndex]
+ col := s.nodes[action.NodeID].collections[collectionIndex]
schemaRoots = append(schemaRoots, col.SchemaRoot())
}
@@ -253,7 +253,7 @@ func unsubscribeToCollection(
continue
}
- col := s.collections[action.NodeID][collectionIndex]
+ col := s.nodes[action.NodeID].collections[collectionIndex]
schemaRoots = append(schemaRoots, col.SchemaRoot())
}
@@ -281,7 +281,7 @@ func getAllP2PCollections(
) {
expectedCollections := []string{}
for _, collectionIndex := range action.ExpectedCollectionIDs {
- col := s.collections[action.NodeID][collectionIndex]
+ col := s.nodes[action.NodeID].collections[collectionIndex]
expectedCollections = append(expectedCollections, col.SchemaRoot())
}
@@ -294,8 +294,8 @@ func getAllP2PCollections(
// reconnectPeers makes sure that all peers are connected after a node restart action.
func reconnectPeers(s *state) {
- for i, n := range s.nodeP2P {
- for j := range n.connections {
+ for i, n := range s.nodes {
+ for j := range n.p2p.connections {
sourceNode := s.nodes[i]
targetNode := s.nodes[j]
diff --git a/tests/integration/state.go b/tests/integration/state.go
index e7130f2ebd..c163a2d9d3 100644
--- a/tests/integration/state.go
+++ b/tests/integration/state.go
@@ -114,6 +114,30 @@ func newEventState(bus *event.Bus) (*eventState, error) {
}, nil
}
+// nodeState contains all testing state for a node.
+type nodeState struct {
+ // The node's client active in this test.
+ clients.Client
+ // event contains all event node subscriptions.
+ event *eventState
+ // p2p contains p2p states for the node.
+ p2p *p2pState
+ // The network configurations for the nodes
+ netOpts []net.NodeOpt
+ // The path to any file-based databases active in this test.
+ dbPath string
+ // Collections by index present in the test.
+ // Indexes matches that of collectionNames.
+ collections []client.Collection
+ // Indexes, by index, by collection index.
+ indexes [][]client.IndexDescription
+ // indicates if the node is closed.
+ closed bool
+ // peerInfo contains the peer information for the node.
+ peerInfo peer.AddrInfo
+}
+
+// state contains all testing state.
type state struct {
// The test context.
ctx context.Context
@@ -124,6 +148,7 @@ type state struct {
// The TestCase currently being executed.
testCase TestCase
+ // The type of KMS currently being tested.
kms KMSType
// The type of database currently being tested.
@@ -153,30 +178,11 @@ type state struct {
// These channels will receive a function which asserts results of any subscription requests.
subscriptionResultsChans []chan func()
- // nodeEvents contains all event node subscriptions.
- nodeEvents []*eventState
-
- // The addresses of any nodes configured.
- nodeAddresses []peer.AddrInfo
-
- // The configurations for any nodes
- nodeConfigs [][]net.NodeOpt
-
// The nodes active in this test.
- nodes []clients.Client
-
- // closedNodes contains the indexes of nodes that have been closed.
- closedNodes map[int]struct{}
-
- // nodeP2P contains p2p states for all nodes
- nodeP2P []*p2pState
+ nodes []*nodeState
- // The paths to any file-based databases active in this test.
- dbPaths []string
-
- // Collections by index, by nodeID present in the test.
- // Indexes matches that of collectionNames.
- collections [][]client.Collection
+ // The ACP options to share between each node.
+ acpOptions []node.ACPOpt
// The names of the collections active in this test.
// Indexes matches that of initial collections.
@@ -196,17 +202,14 @@ type state struct {
// Valid Cid string values by [UniqueCid] ID.
cids map[any]string
- // Indexes, by index, by collection index, by node index.
- indexes [][][]client.IndexDescription
-
// isBench indicates wether the test is currently being benchmarked.
isBench bool
// The SourceHub address used to pay for SourceHub transactions.
sourcehubAddress string
- // The ACP options to share between each node.
- acpOptions []node.ACPOpt
+ // isNetworkEnabled indicates whether the network is enabled.
+ isNetworkEnabled bool
}
// newState returns a new fresh state for the given testCase.
@@ -230,19 +233,10 @@ func newState(
allActionsDone: make(chan struct{}),
identities: map[identityRef]*identityHolder{},
subscriptionResultsChans: []chan func(){},
- nodeEvents: []*eventState{},
- nodeAddresses: []peer.AddrInfo{},
- nodeConfigs: [][]net.NodeOpt{},
- nodeP2P: []*p2pState{},
- nodes: []clients.Client{},
- closedNodes: map[int]struct{}{},
- dbPaths: []string{},
- collections: [][]client.Collection{},
collectionNames: collectionNames,
collectionIndexesByRoot: map[uint32]int{},
docIDs: [][]client.DocID{},
cids: map[any]string{},
- indexes: [][][]client.IndexDescription{},
isBench: false,
}
}
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index f827ac0130..39c9ea9624 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -41,7 +41,6 @@ import (
"github.com/sourcenetwork/defradb/net"
"github.com/sourcenetwork/defradb/node"
changeDetector "github.com/sourcenetwork/defradb/tests/change_detector"
- "github.com/sourcenetwork/defradb/tests/clients"
"github.com/sourcenetwork/defradb/tests/gen"
"github.com/sourcenetwork/defradb/tests/predefined"
)
@@ -442,7 +441,7 @@ func createGenerateDocs(s *state, docs []gen.GeneratedDoc, nodeID immutable.Opti
func generateDocs(s *state, action GenerateDocs) {
nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
firstNodesID := nodeIDs[0]
- collections := s.collections[firstNodesID]
+ collections := s.nodes[firstNodesID].collections
defs := make([]client.CollectionDefinition, 0, len(collections))
for _, collection := range collections {
if len(action.ForCollections) == 0 || slices.Contains(action.ForCollections, collection.Name().Value()) {
@@ -459,7 +458,7 @@ func generateDocs(s *state, action GenerateDocs) {
func generatePredefinedDocs(s *state, action CreatePredefinedDocs) {
nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
firstNodesID := nodeIDs[0]
- collections := s.collections[firstNodesID]
+ collections := s.nodes[firstNodesID].collections
defs := make([]client.CollectionDefinition, 0, len(collections))
for _, col := range collections {
defs = append(defs, col.Definition())
@@ -577,10 +576,10 @@ func closeNodes(
s *state,
action Close,
) {
- nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
- for i, node := range nodes {
+ _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ for _, node := range nodes {
node.Close()
- s.closedNodes[nodeIDs[i]] = struct{}{}
+ node.closed = true
}
}
@@ -594,7 +593,7 @@ func closeNodes(
// greater than 0. For example if requesting a node with nodeID=2 then the resulting output will contain only
// one element (at index 0) caller might accidentally assume that this node belongs to node 0. Therefore, the
// caller should always use the returned IDs, instead of guessing the IDs based on node indexes.
-func getNodesWithIDs(nodeID immutable.Option[int], nodes []clients.Client) ([]int, []clients.Client) {
+func getNodesWithIDs(nodeID immutable.Option[int], nodes []*nodeState) ([]int, []*nodeState) {
if !nodeID.HasValue() {
indexes := make([]int, len(nodes))
for i := range nodes {
@@ -603,7 +602,7 @@ func getNodesWithIDs(nodeID immutable.Option[int], nodes []clients.Client) ([]in
return indexes, nodes
}
- return []int{nodeID.Value()}, []clients.Client{nodes[nodeID.Value()]}
+ return []int{nodeID.Value()}, []*nodeState{nodes[nodeID.Value()]}
}
func calculateLenForFlattenedActions(testCase *TestCase) int {
@@ -711,83 +710,44 @@ ActionLoop:
func setStartingNodes(
s *state,
) {
- hasExplicitNode := false
for _, action := range s.testCase.Actions {
switch action.(type) {
case ConfigureNode:
- hasExplicitNode = true
+ s.isNetworkEnabled = true
}
}
// If nodes have not been explicitly configured via actions, setup a default one.
- if !hasExplicitNode {
- node, path, err := setupNode(s)
- require.Nil(s.t, err)
-
- c, err := setupClient(s, node)
+ if !s.isNetworkEnabled {
+ st, err := setupNode(s)
require.Nil(s.t, err)
-
- eventState, err := newEventState(c.Events())
- require.NoError(s.t, err)
-
- s.nodes = append(s.nodes, c)
- s.nodeEvents = append(s.nodeEvents, eventState)
- s.nodeP2P = append(s.nodeP2P, newP2PState())
- s.dbPaths = append(s.dbPaths, path)
+ s.nodes = append(s.nodes, st)
}
}
func startNodes(s *state, action Start) {
- _, nodes := getNodesWithIDs(action.NodeID, s.nodes)
+ nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
// We need to restart the nodes in reverse order, to avoid dial backoff issues.
for i := len(nodes) - 1; i >= 0; i-- {
- nodeIndex := i
- if action.NodeID.HasValue() {
- nodeIndex = action.NodeID.Value()
- }
+ nodeIndex := nodeIDs[i]
originalPath := databaseDir
- databaseDir = s.dbPaths[nodeIndex]
- node, _, err := setupNode(s, db.WithNodeIdentity(getIdentity(s, NodeIdentity(nodeIndex))))
- require.NoError(s.t, err)
- databaseDir = originalPath
-
- if len(s.nodeConfigs) == 0 {
- // If there are no explicit node configuration actions the node will be
- // basic (i.e. no P2P stuff) and can be yielded now.
- c, err := setupClient(s, node)
- require.NoError(s.t, err)
- s.nodes[nodeIndex] = c
-
- eventState, err := newEventState(c.Events())
- require.NoError(s.t, err)
- s.nodeEvents[nodeIndex] = eventState
- continue
+ databaseDir = s.nodes[nodeIndex].dbPath
+ opts := []node.Option{db.WithNodeIdentity(getIdentity(s, NodeIdentity(nodeIndex)))}
+ for _, opt := range s.nodes[nodeIndex].netOpts {
+ opts = append(opts, opt)
}
-
- // We need to make sure the node is configured with its old address, otherwise
- // a new one may be selected and reconnection to it will fail.
var addresses []string
- for _, addr := range s.nodeAddresses[nodeIndex].Addrs {
+ for _, addr := range s.nodes[nodeIndex].peerInfo.Addrs {
addresses = append(addresses, addr.String())
}
-
- nodeOpts := s.nodeConfigs[nodeIndex]
- nodeOpts = append(nodeOpts, net.WithListenAddresses(addresses...))
-
- node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Encstore(), node.DB.Events(), nodeOpts...)
+ opts = append(opts, net.WithListenAddresses(addresses...))
+ node, err := setupNode(s, opts...)
require.NoError(s.t, err)
+ databaseDir = originalPath
+ node.p2p = s.nodes[nodeIndex].p2p
+ s.nodes[nodeIndex] = node
- c, err := setupClient(s, node)
- require.NoError(s.t, err)
- s.nodes[nodeIndex] = c
-
- eventState, err := newEventState(c.Events())
- require.NoError(s.t, err)
- s.nodeEvents[nodeIndex] = eventState
-
- delete(s.closedNodes, nodeIndex)
-
- waitForNetworkSetupEvents(s, i)
+ waitForNetworkSetupEvents(s, nodeIndex)
}
// If the db was restarted we need to refresh the collection definitions as the old instances
@@ -814,10 +774,8 @@ func restartNodes(
func refreshCollections(
s *state,
) {
- s.collections = make([][]client.Collection, len(s.nodes))
-
- for nodeID, node := range s.nodes {
- s.collections[nodeID] = make([]client.Collection, len(s.collectionNames))
+ for _, node := range s.nodes {
+ node.collections = make([]client.Collection, len(s.collectionNames))
allCollections, err := node.GetCollections(s.ctx, client.CollectionFetchOptions{})
require.Nil(s.t, err)
@@ -838,7 +796,7 @@ func refreshCollections(
for _, collection := range allCollections {
if index, ok := s.collectionIndexesByRoot[collection.Description().RootID]; ok {
- s.collections[nodeID][index] = collection
+ node.collections[index] = collection
}
}
}
@@ -864,35 +822,23 @@ func configureNode(
netNodeOpts := action()
netNodeOpts = append(netNodeOpts, net.WithPrivateKey(privateKey))
- nodeOpts := []node.Option{node.WithDisableP2P(false), db.WithRetryInterval([]time.Duration{time.Millisecond * 1})}
+ nodeOpts := []node.Option{db.WithRetryInterval([]time.Duration{time.Millisecond * 1})}
for _, opt := range netNodeOpts {
nodeOpts = append(nodeOpts, opt)
}
nodeOpts = append(nodeOpts, db.WithNodeIdentity(getIdentity(s, NodeIdentity(len(s.nodes)))))
- node, path, err := setupNode(s, nodeOpts...) //disable change detector, or allow it?
+ node, err := setupNode(s, nodeOpts...) //disable change detector, or allow it?
require.NoError(s.t, err)
- s.nodeAddresses = append(s.nodeAddresses, node.Peer.PeerInfo())
- s.nodeConfigs = append(s.nodeConfigs, netNodeOpts)
-
- c, err := setupClient(s, node)
- require.NoError(s.t, err)
-
- eventState, err := newEventState(c.Events())
- require.NoError(s.t, err)
-
- s.nodes = append(s.nodes, c)
- s.nodeEvents = append(s.nodeEvents, eventState)
- s.nodeP2P = append(s.nodeP2P, newP2PState())
- s.dbPaths = append(s.dbPaths, path)
+ s.nodes = append(s.nodes, node)
}
func refreshDocuments(
s *state,
startActionIndex int,
) {
- if len(s.collections) == 0 {
+ if len(s.nodes) == 0 {
// This should only be possible at the moment for P2P testing, for which the
// change detector is currently disabled. We'll likely need some fancier logic
// here if/when we wish to enable it.
@@ -902,9 +848,9 @@ func refreshDocuments(
// For now just do the initial setup using the collections on the first node,
// this may need to become more involved at a later date depending on testing
// requirements.
- s.docIDs = make([][]client.DocID, len(s.collections[0]))
+ s.docIDs = make([][]client.DocID, len(s.nodes[0].collections))
- for i := range s.collections[0] {
+ for i := range s.nodes[0].collections {
s.docIDs[i] = []client.DocID{}
}
@@ -917,7 +863,7 @@ func refreshDocuments(
// Just use the collection from the first relevant node, as all will be the same for this
// purpose.
firstNodesID := nodeIDs[0]
- collection := s.collections[firstNodesID][action.CollectionID]
+ collection := s.nodes[firstNodesID].collections[action.CollectionID]
if action.DocMap != nil {
substituteRelations(s, action)
@@ -939,16 +885,10 @@ func refreshDocuments(
func refreshIndexes(
s *state,
) {
- if len(s.collections) == 0 {
- return
- }
-
- s.indexes = make([][][]client.IndexDescription, len(s.collections))
-
- for i, nodeCols := range s.collections {
- s.indexes[i] = make([][]client.IndexDescription, len(nodeCols))
+ for _, node := range s.nodes {
+ node.indexes = make([][]client.IndexDescription, len(node.collections))
- for j, col := range nodeCols {
+ for i, col := range node.collections {
if col == nil {
continue
}
@@ -957,7 +897,7 @@ func refreshIndexes(
continue
}
- s.indexes[i][j] = colIndexes
+ node.indexes[i] = colIndexes
}
}
}
@@ -966,7 +906,7 @@ func getIndexes(
s *state,
action GetIndexes,
) {
- if len(s.collections) == 0 {
+ if len(s.nodes) == 0 {
return
}
@@ -974,7 +914,7 @@ func getIndexes(
nodeIDs, _ := getNodesWithIDs(action.NodeID, s.nodes)
for _, nodeID := range nodeIDs {
- collections := s.collections[nodeID]
+ collections := s.nodes[nodeID].collections
err := withRetryOnNode(
s.nodes[nodeID],
func() error {
@@ -1259,7 +1199,7 @@ func createDoc(
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collection := s.collections[nodeID][action.CollectionID]
+ collection := s.nodes[nodeID].collections[action.CollectionID]
err := withRetryOnNode(
node,
func() error {
@@ -1449,7 +1389,7 @@ func deleteDoc(
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collection := s.collections[nodeID][action.CollectionID]
+ collection := s.nodes[nodeID].collections[action.CollectionID]
ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeID)
err := withRetryOnNode(
node,
@@ -1493,7 +1433,7 @@ func updateDoc(
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collection := s.collections[nodeID][action.CollectionID]
+ collection := s.nodes[nodeID].collections[action.CollectionID]
err := withRetryOnNode(
node,
func() error {
@@ -1596,7 +1536,7 @@ func updateWithFilter(s *state, action UpdateWithFilter) {
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collection := s.collections[nodeID][action.CollectionID]
+ collection := s.nodes[nodeID].collections[action.CollectionID]
ctx := getContextWithIdentity(s.ctx, s, action.Identity, nodeID)
err := withRetryOnNode(
node,
@@ -1621,18 +1561,10 @@ func createIndex(
s *state,
action CreateIndex,
) {
- if action.CollectionID >= len(s.indexes) {
- // Expand the slice if required, so that the index can be accessed by collection index
- s.indexes = append(
- s.indexes,
- make([][][]client.IndexDescription, action.CollectionID-len(s.indexes)+1)...,
- )
- }
-
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collection := s.collections[nodeID][action.CollectionID]
+ collection := s.nodes[nodeID].collections[action.CollectionID]
indexDesc := client.IndexDescription{
Name: action.IndexName,
}
@@ -1659,8 +1591,8 @@ func createIndex(
if err != nil {
return err
}
- s.indexes[nodeID][action.CollectionID] = append(
- s.indexes[nodeID][action.CollectionID],
+ s.nodes[nodeID].indexes[action.CollectionID] = append(
+ s.nodes[nodeID].indexes[action.CollectionID],
desc,
)
return nil
@@ -1684,10 +1616,10 @@ func dropIndex(
nodeIDs, nodes := getNodesWithIDs(action.NodeID, s.nodes)
for index, node := range nodes {
nodeID := nodeIDs[index]
- collection := s.collections[nodeID][action.CollectionID]
+ collection := s.nodes[nodeID].collections[action.CollectionID]
indexName := action.IndexName
if indexName == "" {
- indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name
+ indexName = s.nodes[nodeID].indexes[action.CollectionID][action.IndexID].Name
}
err := withRetryOnNode(
@@ -1764,7 +1696,7 @@ func backupImport(
// about this in our tests so we just retry a few times until it works (or the
// retry limit is breached - important incase this is a different error)
func withRetryOnNode(
- node clients.Client,
+ node client.DB,
action func() error,
) error {
for i := 0; i < node.MaxTxnRetries(); i++ {
From 8e24ee42170f821bc0e64301b0bfc0a489a79755 Mon Sep 17 00:00:00 2001
From: Fred Carle
Date: Fri, 15 Nov 2024 14:58:22 -0500
Subject: [PATCH 24/47] fix(i): Remove flakyness of AES decrypt test (#3241)
## Relevant issue(s)
Resolves #3240
## Description
This PR fixes a flaky test where the modification of the cypher text was
supposed to cause it to fail decryption. The modification turned out to
sometime be the same as the actual cypher text.
---
crypto/aes_test.go | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/crypto/aes_test.go b/crypto/aes_test.go
index 7218ca24b2..7d3375c236 100644
--- a/crypto/aes_test.go
+++ b/crypto/aes_test.go
@@ -149,9 +149,10 @@ func TestDecryptAES(t *testing.T) {
errorContains: "message authentication failed",
},
{
- name: "Tampered ciphertext",
- nonce: validNonce,
- cipherText: append([]byte{0}, validCiphertext[AESNonceSize+1:]...),
+ name: "Tampered ciphertext",
+ nonce: validNonce,
+ // Flip a byte in the ciphertext to corrupt it.
+ cipherText: append([]byte{^validCiphertext[AESNonceSize]}, validCiphertext[AESNonceSize+1:]...),
key: validKey,
additionalData: validAAD,
expectError: true,
From 27893cfac3229f6cd7c665615377b157c0b72507 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Mon, 18 Nov 2024 15:59:33 -0500
Subject: [PATCH 25/47] feat(i): Handle collection commits over P2P (#3247)
## Relevant issue(s)
Resolves #3212
## Description
Handles the syncing of collection commits over P2P.
---
internal/db/merge.go | 149 +++++-----
internal/db/merge_test.go | 16 +-
internal/db/messages.go | 31 ++-
net/peer.go | 14 +-
net/server.go | 53 ++--
tests/integration/acp.go | 6 +-
tests/integration/events.go | 80 ++++--
.../commits/branchables/peer_index_test.go | 68 +++++
.../query/commits/branchables/peer_test.go | 115 ++++++--
.../commits/branchables/peer_update_test.go | 260 ++++++++++++++++++
tests/integration/state.go | 20 +-
tests/integration/utils.go | 11 +-
12 files changed, 657 insertions(+), 166 deletions(-)
create mode 100644 tests/integration/query/commits/branchables/peer_index_test.go
create mode 100644 tests/integration/query/commits/branchables/peer_update_test.go
diff --git a/internal/db/merge.go b/internal/db/merge.go
index 47db8740b1..c361aa8a2b 100644
--- a/internal/db/merge.go
+++ b/internal/db/merge.go
@@ -28,6 +28,7 @@ import (
"github.com/sourcenetwork/defradb/event"
"github.com/sourcenetwork/defradb/internal/core"
coreblock "github.com/sourcenetwork/defradb/internal/core/block"
+ "github.com/sourcenetwork/defradb/internal/core/crdt"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/encryption"
"github.com/sourcenetwork/defradb/internal/keys"
@@ -35,30 +36,29 @@ import (
merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt"
)
-func (db *db) executeMerge(ctx context.Context, dagMerge event.Merge) error {
+func (db *db) executeMerge(ctx context.Context, col *collection, dagMerge event.Merge) error {
ctx, txn, err := ensureContextTxn(ctx, db, false)
if err != nil {
return err
}
defer txn.Discard(ctx)
- col, err := getCollectionFromRootSchema(ctx, db, dagMerge.SchemaRoot)
- if err != nil {
- return err
- }
-
- docID, err := client.NewDocIDFromString(dagMerge.DocID)
- if err != nil {
- return err
+ var key keys.HeadstoreKey
+ if dagMerge.DocID != "" {
+ key = keys.HeadstoreDocKey{
+ DocID: dagMerge.DocID,
+ FieldID: core.COMPOSITE_NAMESPACE,
+ }
+ } else {
+ key = keys.NewHeadstoreColKey(col.Description().RootID)
}
- dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(col.Description(), docID.String())
- mp, err := db.newMergeProcessor(txn, col, dsKey)
+ mt, err := getHeadsAsMergeTarget(ctx, txn, key)
if err != nil {
return err
}
- mt, err := getHeadsAsMergeTarget(ctx, txn, dsKey.WithFieldID(core.COMPOSITE_NAMESPACE))
+ mp, err := db.newMergeProcessor(txn, col)
if err != nil {
return err
}
@@ -73,9 +73,15 @@ func (db *db) executeMerge(ctx context.Context, dagMerge event.Merge) error {
return err
}
- err = syncIndexedDoc(ctx, docID, col)
- if err != nil {
- return err
+ for docID := range mp.docIDs {
+ docID, err := client.NewDocIDFromString(docID)
+ if err != nil {
+ return err
+ }
+ err = syncIndexedDoc(ctx, docID, col)
+ if err != nil {
+ return err
+ }
}
err = txn.Commit(ctx)
@@ -94,39 +100,39 @@ func (db *db) executeMerge(ctx context.Context, dagMerge event.Merge) error {
// mergeQueue is synchronization source to ensure that concurrent
// document merges do not cause transaction conflicts.
type mergeQueue struct {
- docs map[string]chan struct{}
+ keys map[string]chan struct{}
mutex sync.Mutex
}
func newMergeQueue() *mergeQueue {
return &mergeQueue{
- docs: make(map[string]chan struct{}),
+ keys: make(map[string]chan struct{}),
}
}
-// add adds a docID to the queue. If the docID is already in the queue, it will
-// wait for the docID to be removed from the queue. For every add call, done must
-// be called to remove the docID from the queue. Otherwise, subsequent add calls will
+// add adds a key to the queue. If the key is already in the queue, it will
+// wait for the key to be removed from the queue. For every add call, done must
+// be called to remove the key from the queue. Otherwise, subsequent add calls will
// block forever.
-func (m *mergeQueue) add(docID string) {
+func (m *mergeQueue) add(key string) {
m.mutex.Lock()
- done, ok := m.docs[docID]
+ done, ok := m.keys[key]
if !ok {
- m.docs[docID] = make(chan struct{})
+ m.keys[key] = make(chan struct{})
}
m.mutex.Unlock()
if ok {
<-done
- m.add(docID)
+ m.add(key)
}
}
-func (m *mergeQueue) done(docID string) {
+func (m *mergeQueue) done(key string) {
m.mutex.Lock()
defer m.mutex.Unlock()
- done, ok := m.docs[docID]
+ done, ok := m.keys[key]
if ok {
- delete(m.docs, docID)
+ delete(m.keys, key)
close(done)
}
}
@@ -135,9 +141,11 @@ type mergeProcessor struct {
txn datastore.Txn
blockLS linking.LinkSystem
encBlockLS linking.LinkSystem
- mCRDTs map[string]merklecrdt.MerkleCRDT
col *collection
- dsKey keys.DataStoreKey
+
+ // docIDs contains all docIDs that have been merged so far by the mergeProcessor
+ docIDs map[string]struct{}
+
// composites is a list of composites that need to be merged.
composites *list.List
// missingEncryptionBlocks is a list of blocks that we failed to fetch
@@ -149,7 +157,6 @@ type mergeProcessor struct {
func (db *db) newMergeProcessor(
txn datastore.Txn,
col *collection,
- dsKey keys.DataStoreKey,
) (*mergeProcessor, error) {
blockLS := cidlink.DefaultLinkSystem()
blockLS.SetReadStorage(txn.Blockstore().AsIPLDStorage())
@@ -161,9 +168,8 @@ func (db *db) newMergeProcessor(
txn: txn,
blockLS: blockLS,
encBlockLS: encBlockLS,
- mCRDTs: make(map[string]merklecrdt.MerkleCRDT),
col: col,
- dsKey: dsKey,
+ docIDs: make(map[string]struct{}),
composites: list.New(),
missingEncryptionBlocks: make(map[cidlink.Link]struct{}),
availableEncryptionBlocks: make(map[cidlink.Link]*coreblock.Encryption),
@@ -375,7 +381,7 @@ func (mp *mergeProcessor) processBlock(
}
if canRead {
- crdt, err := mp.initCRDTForType(dagBlock.Delta.GetFieldName())
+ crdt, err := mp.initCRDTForType(dagBlock.Delta)
if err != nil {
return err
}
@@ -435,50 +441,59 @@ func decryptBlock(
return newBlock, nil
}
-func (mp *mergeProcessor) initCRDTForType(field string) (merklecrdt.MerkleCRDT, error) {
- mcrdt, exists := mp.mCRDTs[field]
- if exists {
- return mcrdt, nil
- }
-
+func (mp *mergeProcessor) initCRDTForType(crdt crdt.CRDT) (merklecrdt.MerkleCRDT, error) {
schemaVersionKey := keys.CollectionSchemaVersionKey{
SchemaVersionID: mp.col.Schema().VersionID,
CollectionID: mp.col.ID(),
}
- if field == "" {
- mcrdt = merklecrdt.NewMerkleCompositeDAG(
+ switch {
+ case crdt.IsComposite():
+ docID := string(crdt.GetDocID())
+ mp.docIDs[docID] = struct{}{}
+
+ return merklecrdt.NewMerkleCompositeDAG(
mp.txn,
schemaVersionKey,
- mp.dsKey.WithFieldID(core.COMPOSITE_NAMESPACE),
- )
- mp.mCRDTs[field] = mcrdt
- return mcrdt, nil
- }
+ base.MakeDataStoreKeyWithCollectionAndDocID(mp.col.Description(), docID).WithFieldID(core.COMPOSITE_NAMESPACE),
+ ), nil
- fd, ok := mp.col.Definition().GetFieldByName(field)
- if !ok {
- // If the field is not part of the schema, we can safely ignore it.
- return nil, nil
+ case crdt.IsCollection():
+ return merklecrdt.NewMerkleCollection(
+ mp.txn,
+ schemaVersionKey,
+ keys.NewHeadstoreColKey(mp.col.Description().RootID),
+ ), nil
+
+ default:
+ docID := string(crdt.GetDocID())
+ mp.docIDs[docID] = struct{}{}
+
+ field := crdt.GetFieldName()
+ fd, ok := mp.col.Definition().GetFieldByName(field)
+ if !ok {
+ // If the field is not part of the schema, we can safely ignore it.
+ return nil, nil
+ }
+
+ return merklecrdt.FieldLevelCRDTWithStore(
+ mp.txn,
+ schemaVersionKey,
+ fd.Typ,
+ fd.Kind,
+ base.MakeDataStoreKeyWithCollectionAndDocID(mp.col.Description(), docID).WithFieldID(fd.ID.String()),
+ field,
+ )
}
+}
- mcrdt, err := merklecrdt.FieldLevelCRDTWithStore(
- mp.txn,
- schemaVersionKey,
- fd.Typ,
- fd.Kind,
- mp.dsKey.WithFieldID(fd.ID.String()),
- field,
- )
+func getCollectionFromRootSchema(ctx context.Context, db *db, rootSchema string) (*collection, error) {
+ ctx, txn, err := ensureContextTxn(ctx, db, false)
if err != nil {
return nil, err
}
+ defer txn.Discard(ctx)
- mp.mCRDTs[field] = mcrdt
- return mcrdt, nil
-}
-
-func getCollectionFromRootSchema(ctx context.Context, db *db, rootSchema string) (*collection, error) {
cols, err := db.getCollections(
ctx,
client.CollectionFetchOptions{
@@ -498,8 +513,8 @@ func getCollectionFromRootSchema(ctx context.Context, db *db, rootSchema string)
// getHeadsAsMergeTarget retrieves the heads of the composite DAG for the given document
// and returns them as a merge target.
-func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey keys.DataStoreKey) (mergeTarget, error) {
- cids, err := getHeads(ctx, txn, dsKey)
+func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, key keys.HeadstoreKey) (mergeTarget, error) {
+ cids, err := getHeads(ctx, txn, key)
if err != nil {
return mergeTarget{}, err
@@ -520,8 +535,8 @@ func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey keys.Da
}
// getHeads retrieves the heads associated with the given datastore key.
-func getHeads(ctx context.Context, txn datastore.Txn, dsKey keys.DataStoreKey) ([]cid.Cid, error) {
- headset := clock.NewHeadSet(txn.Headstore(), dsKey.ToHeadStoreKey())
+func getHeads(ctx context.Context, txn datastore.Txn, key keys.HeadstoreKey) ([]cid.Cid, error) {
+ headset := clock.NewHeadSet(txn.Headstore(), key)
cids, _, err := headset.List(ctx)
if err != nil {
diff --git a/internal/db/merge_test.go b/internal/db/merge_test.go
index f9478be536..ee170bfe54 100644
--- a/internal/db/merge_test.go
+++ b/internal/db/merge_test.go
@@ -58,7 +58,7 @@ func TestMerge_SingleBranch_NoError(t *testing.T) {
compInfo2, err := d.generateCompositeUpdate(&lsys, map[string]any{"name": "Johny"}, compInfo)
require.NoError(t, err)
- err = db.executeMerge(ctx, event.Merge{
+ err = db.executeMerge(ctx, col.(*collection), event.Merge{
DocID: docID.String(),
Cid: compInfo2.link.Cid,
SchemaRoot: col.SchemaRoot(),
@@ -103,7 +103,7 @@ func TestMerge_DualBranch_NoError(t *testing.T) {
compInfo2, err := d.generateCompositeUpdate(&lsys, map[string]any{"name": "Johny"}, compInfo)
require.NoError(t, err)
- err = db.executeMerge(ctx, event.Merge{
+ err = db.executeMerge(ctx, col.(*collection), event.Merge{
DocID: docID.String(),
Cid: compInfo2.link.Cid,
SchemaRoot: col.SchemaRoot(),
@@ -113,7 +113,7 @@ func TestMerge_DualBranch_NoError(t *testing.T) {
compInfo3, err := d.generateCompositeUpdate(&lsys, map[string]any{"age": 30}, compInfo)
require.NoError(t, err)
- err = db.executeMerge(ctx, event.Merge{
+ err = db.executeMerge(ctx, col.(*collection), event.Merge{
DocID: docID.String(),
Cid: compInfo3.link.Cid,
SchemaRoot: col.SchemaRoot(),
@@ -161,7 +161,7 @@ func TestMerge_DualBranchWithOneIncomplete_CouldNotFindCID(t *testing.T) {
compInfo2, err := d.generateCompositeUpdate(&lsys, map[string]any{"name": "Johny"}, compInfo)
require.NoError(t, err)
- err = db.executeMerge(ctx, event.Merge{
+ err = db.executeMerge(ctx, col.(*collection), event.Merge{
DocID: docID.String(),
Cid: compInfo2.link.Cid,
SchemaRoot: col.SchemaRoot(),
@@ -180,7 +180,7 @@ func TestMerge_DualBranchWithOneIncomplete_CouldNotFindCID(t *testing.T) {
compInfo3, err := d.generateCompositeUpdate(&lsys, map[string]any{"name": "Johny"}, compInfoUnkown)
require.NoError(t, err)
- err = db.executeMerge(ctx, event.Merge{
+ err = db.executeMerge(ctx, col.(*collection), event.Merge{
DocID: docID.String(),
Cid: compInfo3.link.Cid,
SchemaRoot: col.SchemaRoot(),
@@ -304,15 +304,15 @@ func TestMergeQueue(t *testing.T) {
go q.add(testDocID)
// give time for the goroutine to block
time.Sleep(10 * time.Millisecond)
- require.Len(t, q.docs, 1)
+ require.Len(t, q.keys, 1)
q.done(testDocID)
// give time for the goroutine to add the docID
time.Sleep(10 * time.Millisecond)
q.mutex.Lock()
- require.Len(t, q.docs, 1)
+ require.Len(t, q.keys, 1)
q.mutex.Unlock()
q.done(testDocID)
q.mutex.Lock()
- require.Len(t, q.docs, 0)
+ require.Len(t, q.keys, 0)
q.mutex.Unlock()
}
diff --git a/internal/db/messages.go b/internal/db/messages.go
index 51efba982e..e980eb7d84 100644
--- a/internal/db/messages.go
+++ b/internal/db/messages.go
@@ -22,7 +22,9 @@ import (
)
func (db *db) handleMessages(ctx context.Context, sub *event.Subscription) {
- queue := newMergeQueue()
+ docIDQueue := newMergeQueue()
+ schemaRootQueue := newMergeQueue()
+
// This is used to ensure we only trigger loadAndPublishP2PCollections and loadAndPublishReplicators
// once per db instanciation.
loadOnce := sync.Once{}
@@ -37,17 +39,34 @@ func (db *db) handleMessages(ctx context.Context, sub *event.Subscription) {
switch evt := msg.Data.(type) {
case event.Merge:
go func() {
- // ensure only one merge per docID
- queue.add(evt.DocID)
- defer queue.done(evt.DocID)
+ col, err := getCollectionFromRootSchema(ctx, db, evt.SchemaRoot)
+ if err != nil {
+ log.ErrorContextE(
+ ctx,
+ "Failed to execute merge",
+ err,
+ corelog.Any("Event", evt))
+ return
+ }
+
+ if col.Description().IsBranchable {
+ // As collection commits link to document composite commits, all events
+ // recieved for branchable collections must be processed serially else
+ // they may otherwise cause a transaction conflict.
+ schemaRootQueue.add(evt.SchemaRoot)
+ defer schemaRootQueue.done(evt.SchemaRoot)
+ } else {
+ // ensure only one merge per docID
+ docIDQueue.add(evt.DocID)
+ defer docIDQueue.done(evt.DocID)
+ }
// retry the merge process if a conflict occurs
//
// conficts occur when a user updates a document
// while a merge is in progress.
- var err error
for i := 0; i < db.MaxTxnRetries(); i++ {
- err = db.executeMerge(ctx, evt)
+ err = db.executeMerge(ctx, col, evt)
if errors.Is(err, datastore.ErrTxnConflict) {
continue // retry merge
}
diff --git a/net/peer.go b/net/peer.go
index e4ebfe8573..d59d6fe150 100644
--- a/net/peer.go
+++ b/net/peer.go
@@ -255,9 +255,11 @@ func (p *Peer) handleMessageLoop() {
}
func (p *Peer) handleLog(evt event.Update) error {
- _, err := client.NewDocIDFromString(evt.DocID)
- if err != nil {
- return NewErrFailedToGetDocID(err)
+ if evt.DocID != "" {
+ _, err := client.NewDocIDFromString(evt.DocID)
+ if err != nil {
+ return NewErrFailedToGetDocID(err)
+ }
}
// push to each peer (replicator)
@@ -273,8 +275,10 @@ func (p *Peer) handleLog(evt event.Update) error {
Block: evt.Block,
}
- if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil {
- return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID)
+ if evt.DocID != "" {
+ if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil {
+ return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID)
+ }
}
if err := p.server.publishLog(p.ctx, evt.SchemaRoot, req); err != nil {
diff --git a/net/server.go b/net/server.go
index c83ba3f6be..0be9def0ce 100644
--- a/net/server.go
+++ b/net/server.go
@@ -110,9 +110,12 @@ func (s *server) PushLog(ctx context.Context, req *pushLogRequest) (*pushLogRepl
if err != nil {
return nil, err
}
- docID, err := client.NewDocIDFromString(req.DocID)
- if err != nil {
- return nil, err
+
+ if req.DocID != "" {
+ _, err := client.NewDocIDFromString(req.DocID)
+ if err != nil {
+ return nil, err
+ }
}
byPeer, err := libpeer.Decode(req.Creator)
if err != nil {
@@ -126,11 +129,11 @@ func (s *server) PushLog(ctx context.Context, req *pushLogRequest) (*pushLogRepl
log.InfoContext(ctx, "Received pushlog",
corelog.Any("PeerID", pid.String()),
corelog.Any("Creator", byPeer.String()),
- corelog.Any("DocID", docID.String()))
+ corelog.Any("DocID", req.DocID))
log.InfoContext(ctx, "Starting DAG sync",
corelog.Any("PeerID", pid.String()),
- corelog.Any("DocID", docID.String()))
+ corelog.Any("DocID", req.DocID))
err = syncDAG(ctx, s.peer.bserv, block)
if err != nil {
@@ -139,19 +142,19 @@ func (s *server) PushLog(ctx context.Context, req *pushLogRequest) (*pushLogRepl
log.InfoContext(ctx, "DAG sync complete",
corelog.Any("PeerID", pid.String()),
- corelog.Any("DocID", docID.String()))
+ corelog.Any("DocID", req.DocID))
// Once processed, subscribe to the DocID topic on the pubsub network unless we already
// subscribed to the collection.
- if !s.hasPubSubTopicAndSubscribed(req.SchemaRoot) {
- err = s.addPubSubTopic(docID.String(), true, nil)
+ if !s.hasPubSubTopicAndSubscribed(req.SchemaRoot) && req.DocID != "" {
+ _, err = s.addPubSubTopic(req.DocID, true, nil)
if err != nil {
return nil, err
}
}
s.peer.bus.Publish(event.NewMessage(event.MergeName, event.Merge{
- DocID: docID.String(),
+ DocID: req.DocID,
ByPeer: byPeer,
FromPeer: pid,
Cid: headCID,
@@ -172,9 +175,9 @@ func (s *server) GetHeadLog(
// addPubSubTopic subscribes to a topic on the pubsub network
// A custom message handler can be provided to handle incoming messages. If not provided,
// the default message handler will be used.
-func (s *server) addPubSubTopic(topic string, subscribe bool, handler rpc.MessageHandler) error {
+func (s *server) addPubSubTopic(topic string, subscribe bool, handler rpc.MessageHandler) (pubsubTopic, error) {
if s.peer.ps == nil {
- return nil
+ return pubsubTopic{}, nil
}
log.InfoContext(s.peer.ctx, "Adding pubsub topic",
@@ -188,16 +191,16 @@ func (s *server) addPubSubTopic(topic string, subscribe bool, handler rpc.Messag
// we need to close the existing topic and create a new one.
if !t.subscribed && subscribe {
if err := t.Close(); err != nil {
- return err
+ return pubsubTopic{}, err
}
} else {
- return nil
+ return t, nil
}
}
t, err := rpc.NewTopic(s.peer.ctx, s.peer.ps, s.peer.host.ID(), topic, subscribe)
if err != nil {
- return err
+ return pubsubTopic{}, err
}
if handler == nil {
@@ -206,15 +209,17 @@ func (s *server) addPubSubTopic(topic string, subscribe bool, handler rpc.Messag
t.SetEventHandler(s.pubSubEventHandler)
t.SetMessageHandler(handler)
- s.topics[topic] = pubsubTopic{
+ pst := pubsubTopic{
Topic: t,
subscribed: subscribe,
}
- return nil
+ s.topics[topic] = pst
+ return pst, nil
}
func (s *server) AddPubSubTopic(topicName string, handler rpc.MessageHandler) error {
- return s.addPubSubTopic(topicName, true, handler)
+ _, err := s.addPubSubTopic(topicName, true, handler)
+ return err
}
// hasPubSubTopicAndSubscribed checks if we are subscribed to a topic.
@@ -274,13 +279,23 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pushLogReque
s.mu.Unlock()
if !ok {
subscribe := topic != req.SchemaRoot && !s.hasPubSubTopicAndSubscribed(req.SchemaRoot)
- err := s.addPubSubTopic(topic, subscribe, nil)
+ _, err := s.addPubSubTopic(topic, subscribe, nil)
if err != nil {
return errors.Wrap(fmt.Sprintf("failed to created single use topic %s", topic), err)
}
return s.publishLog(ctx, topic, req)
}
+ if topic == req.SchemaRoot && req.DocID == "" && !t.subscribed {
+ // If the push log request is scoped to the schema and not to a document, subscribe to the
+ // schema.
+ var err error
+ t, err = s.addPubSubTopic(topic, true, nil)
+ if err != nil {
+ return errors.Wrap(fmt.Sprintf("failed to created single use topic %s", topic), err)
+ }
+ }
+
log.InfoContext(ctx, "Publish log",
corelog.String("PeerID", s.peer.PeerID().String()),
corelog.String("Topic", topic))
@@ -356,7 +371,7 @@ func peerIDFromContext(ctx context.Context) (libpeer.ID, error) {
func (s *server) updatePubSubTopics(evt event.P2PTopic) {
for _, topic := range evt.ToAdd {
- err := s.addPubSubTopic(topic, true, nil)
+ _, err := s.addPubSubTopic(topic, true, nil)
if err != nil {
log.ErrorE("Failed to add pubsub topic.", err)
}
diff --git a/tests/integration/acp.go b/tests/integration/acp.go
index 78a5a50997..ce50637d4b 100644
--- a/tests/integration/acp.go
+++ b/tests/integration/acp.go
@@ -215,7 +215,11 @@ func addDocActorRelationshipACP(
}
if action.ExpectedError == "" && !action.ExpectedExistence {
- waitForUpdateEvents(s, actionNodeID, map[string]struct{}{docID: {}})
+ expect := map[string]struct{}{
+ docID: {},
+ }
+
+ waitForUpdateEvents(s, actionNodeID, action.CollectionID, expect)
}
}
diff --git a/tests/integration/events.go b/tests/integration/events.go
index 0e28f3e3df..12fc58f8b7 100644
--- a/tests/integration/events.go
+++ b/tests/integration/events.go
@@ -73,8 +73,8 @@ func waitForReplicatorConfigureEvent(s *state, cfg ConfigureReplicator) {
}
// all previous documents should be merged on the subscriber node
- for key, val := range s.nodes[cfg.SourceNodeID].p2p.actualDocHeads {
- s.nodes[cfg.TargetNodeID].p2p.expectedDocHeads[key] = val.cid
+ for key, val := range s.nodes[cfg.SourceNodeID].p2p.actualDAGHeads {
+ s.nodes[cfg.TargetNodeID].p2p.expectedDAGHeads[key] = val.cid
}
// update node connections and replicators
@@ -153,6 +153,7 @@ func waitForUnsubscribeToCollectionEvent(s *state, action UnsubscribeToCollectio
func waitForUpdateEvents(
s *state,
nodeID immutable.Option[int],
+ collectionIndex int,
docIDs map[string]struct{},
) {
for i := 0; i < len(s.nodes); i++ {
@@ -166,6 +167,11 @@ func waitForUpdateEvents(
}
expect := make(map[string]struct{}, len(docIDs))
+
+ col := node.collections[collectionIndex]
+ if col.Description().IsBranchable {
+ expect[col.SchemaRoot()] = struct{}{}
+ }
for k := range docIDs {
expect[k] = struct{}{}
}
@@ -183,16 +189,10 @@ func waitForUpdateEvents(
require.Fail(s.t, "timeout waiting for update event", "Node %d", i)
}
- if evt.DocID == "" {
- // Todo: This will almost certainly need to change once P2P for collection-level commits
- // is enabled. See: https://github.com/sourcenetwork/defradb/issues/3212
- continue
- }
-
// make sure the event is expected
- _, ok := expect[evt.DocID]
- require.True(s.t, ok, "unexpected document update", "Node %d", i)
- delete(expect, evt.DocID)
+ _, ok := expect[getUpdateEventKey(evt)]
+ require.True(s.t, ok, "unexpected document update", getUpdateEventKey(evt))
+ delete(expect, getUpdateEventKey(evt))
// we only need to update the network state if the nodes
// are configured for networking
@@ -203,7 +203,7 @@ func waitForUpdateEvents(
}
}
-// waitForMergeEvents waits for all expected document heads to be merged to all nodes.
+// waitForMergeEvents waits for all expected heads to be merged to all nodes.
//
// Will fail the test if an event is not received within the expected time interval to prevent tests
// from running forever.
@@ -214,11 +214,11 @@ func waitForMergeEvents(s *state, action WaitForSync) {
continue // node is closed
}
- expect := node.p2p.expectedDocHeads
+ expect := node.p2p.expectedDAGHeads
- // remove any docs that are already merged
- // up to the expected document head
- for key, val := range node.p2p.actualDocHeads {
+ // remove any heads that are already merged
+ // up to the expected head
+ for key, val := range node.p2p.actualDAGHeads {
if head, ok := expect[key]; ok && head.String() == val.cid.String() {
delete(expect, key)
}
@@ -230,13 +230,13 @@ func waitForMergeEvents(s *state, action WaitForSync) {
require.Fail(s.t, "doc index %d out of range", docIndex)
}
docID := s.docIDs[0][docIndex].String()
- actual, hasActual := node.p2p.actualDocHeads[docID]
+ actual, hasActual := node.p2p.actualDAGHeads[docID]
if !hasActual || !actual.decrypted {
expectDecrypted[docID] = struct{}{}
}
}
- // wait for all expected doc heads to be merged
+ // wait for all expected heads to be merged
//
// the order of merges does not matter as we only
// expect the latest head to eventually be merged
@@ -260,11 +260,11 @@ func waitForMergeEvents(s *state, action WaitForSync) {
delete(expectDecrypted, evt.Merge.DocID)
}
- head, ok := expect[evt.Merge.DocID]
+ head, ok := expect[getMergeEventKey(evt.Merge)]
if ok && head.String() == evt.Merge.Cid.String() {
- delete(expect, evt.Merge.DocID)
+ delete(expect, getMergeEventKey(evt.Merge))
}
- node.p2p.actualDocHeads[evt.Merge.DocID] = docHeadState{cid: evt.Merge.Cid, decrypted: evt.Decrypted}
+ node.p2p.actualDAGHeads[getMergeEventKey(evt.Merge)] = docHeadState{cid: evt.Merge.Cid, decrypted: evt.Decrypted}
}
}
}
@@ -284,23 +284,23 @@ func updateNetworkState(s *state, nodeID int, evt event.Update) {
// update the actual document head on the node that updated it
// as the node created the document, it is already decrypted
- node.p2p.actualDocHeads[evt.DocID] = docHeadState{cid: evt.Cid, decrypted: true}
+ node.p2p.actualDAGHeads[getUpdateEventKey(evt)] = docHeadState{cid: evt.Cid, decrypted: true}
// update the expected document heads of replicator targets
for id := range node.p2p.replicators {
// replicator target nodes push updates to source nodes
- s.nodes[id].p2p.expectedDocHeads[evt.DocID] = evt.Cid
+ s.nodes[id].p2p.expectedDAGHeads[getUpdateEventKey(evt)] = evt.Cid
}
// update the expected document heads of connected nodes
for id := range node.p2p.connections {
// connected nodes share updates of documents they have in common
- if _, ok := s.nodes[id].p2p.actualDocHeads[evt.DocID]; ok {
- s.nodes[id].p2p.expectedDocHeads[evt.DocID] = evt.Cid
+ if _, ok := s.nodes[id].p2p.actualDAGHeads[getUpdateEventKey(evt)]; ok {
+ s.nodes[id].p2p.expectedDAGHeads[getUpdateEventKey(evt)] = evt.Cid
}
// peer collection subscribers receive updates from any other subscriber node
if _, ok := s.nodes[id].p2p.peerCollections[collectionID]; ok {
- s.nodes[id].p2p.expectedDocHeads[evt.DocID] = evt.Cid
+ s.nodes[id].p2p.expectedDAGHeads[getUpdateEventKey(evt)] = evt.Cid
}
}
@@ -337,7 +337,7 @@ func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} {
docs, err := parseCreateDocs(action, collection)
require.NoError(s.t, err)
- expect := make(map[string]struct{})
+ expect := make(map[string]struct{}, action.CollectionID+1)
for _, doc := range docs {
expect[doc.ID().String()] = struct{}{}
@@ -361,7 +361,7 @@ func getEventsForUpdateWithFilter(
err := json.Unmarshal([]byte(action.Updater), &docPatch)
require.NoError(s.t, err)
- expect := make(map[string]struct{})
+ expect := make(map[string]struct{}, len(result.DocIDs))
for _, docID := range result.DocIDs {
expect[docID] = struct{}{}
@@ -369,3 +369,27 @@ func getEventsForUpdateWithFilter(
return expect
}
+
+// getUpdateEventKey gets the identifier to which this event is scoped to.
+//
+// For example, if this is scoped to a document, the document ID will be
+// returned. If it is scoped to a schema, the schema root will be returned.
+func getUpdateEventKey(evt event.Update) string {
+ if evt.DocID == "" {
+ return evt.SchemaRoot
+ }
+
+ return evt.DocID
+}
+
+// getMergeEventKey gets the identifier to which this event is scoped to.
+//
+// For example, if this is scoped to a document, the document ID will be
+// returned. If it is scoped to a schema, the schema root will be returned.
+func getMergeEventKey(evt event.Merge) string {
+ if evt.DocID == "" {
+ return evt.SchemaRoot
+ }
+
+ return evt.DocID
+}
diff --git a/tests/integration/query/commits/branchables/peer_index_test.go b/tests/integration/query/commits/branchables/peer_index_test.go
new file mode 100644
index 0000000000..ab03eb3c56
--- /dev/null
+++ b/tests/integration/query/commits/branchables/peer_index_test.go
@@ -0,0 +1,68 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ "github.com/sourcenetwork/immutable"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_SyncsIndexAcrossPeerConnection(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String @index
+ }
+ `,
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.WaitForSync{},
+ testUtils.Request{
+ // This query errors out if the document's index has not been correctly
+ // constructed
+ Request: `query {
+ Users (filter: {name: {_eq: "John"}}){
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "John",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/commits/branchables/peer_test.go b/tests/integration/query/commits/branchables/peer_test.go
index 81ff77a240..6d864ad9d1 100644
--- a/tests/integration/query/commits/branchables/peer_test.go
+++ b/tests/integration/query/commits/branchables/peer_test.go
@@ -18,8 +18,6 @@ import (
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
-// TODO: This test documents an unimplemented feature. Tracked by:
-// https://github.com/sourcenetwork/defradb/issues/3212
func TestQueryCommitsBranchables_SyncsAcrossPeerConnection(t *testing.T) {
test := testUtils.TestCase{
Actions: []any{
@@ -50,15 +48,14 @@ func TestQueryCommitsBranchables_SyncsAcrossPeerConnection(t *testing.T) {
},
testUtils.WaitForSync{},
testUtils.Request{
- NodeID: immutable.Some(0),
Request: `query {
- commits {
- cid
- links {
+ commits {
cid
+ links {
+ cid
+ }
}
- }
- }`,
+ }`,
Results: map[string]any{
"commits": []map[string]any{
{
@@ -91,35 +88,113 @@ func TestQueryCommitsBranchables_SyncsAcrossPeerConnection(t *testing.T) {
},
},
},
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryCommitsBranchables_SyncsMultipleAcrossPeerConnection(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.SubscribeToCollection{
+ NodeID: 1,
+ CollectionIDs: []int{0},
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `{
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `{
+ "name": "Fred",
+ "age": 25
+ }`,
+ },
+ testUtils.WaitForSync{},
testUtils.Request{
- NodeID: immutable.Some(1),
Request: `query {
- commits {
- cid
- links {
+ commits {
cid
+ links {
+ cid
+ }
}
- }
- }`,
+ }`,
Results: map[string]any{
"commits": []map[string]any{
- // Note: The collection commit has not synced.
{
- "cid": testUtils.NewUniqueCid("age"),
+ "cid": testUtils.NewUniqueCid("collection, doc2 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, doc1 create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, doc1 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc1 create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc1 name"),
"links": []map[string]any{},
},
{
- "cid": testUtils.NewUniqueCid("name"),
+ "cid": testUtils.NewUniqueCid("doc1 age"),
"links": []map[string]any{},
},
{
- "cid": testUtils.NewUniqueCid("composite"),
+ "cid": testUtils.NewUniqueCid("doc1 create"),
"links": []map[string]any{
{
- "cid": testUtils.NewUniqueCid("age"),
+ "cid": testUtils.NewUniqueCid("doc1 name"),
},
{
- "cid": testUtils.NewUniqueCid("name"),
+ "cid": testUtils.NewUniqueCid("doc1 age"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 name"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 age"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc2 name"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc2 age"),
},
},
},
diff --git a/tests/integration/query/commits/branchables/peer_update_test.go b/tests/integration/query/commits/branchables/peer_update_test.go
new file mode 100644
index 0000000000..01789a3bc4
--- /dev/null
+++ b/tests/integration/query/commits/branchables/peer_update_test.go
@@ -0,0 +1,260 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package branchables
+
+import (
+ "testing"
+
+ "github.com/sourcenetwork/immutable"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryCommitsBranchables_HandlesConcurrentUpdatesAcrossPeerConnection(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.RandomNetworkingConfig(),
+ testUtils.RandomNetworkingConfig(),
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.UpdateDoc{
+ NodeID: immutable.Some(0),
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
+ testUtils.UpdateDoc{
+ NodeID: immutable.Some(1),
+ Doc: `{
+ "name": "Shahzad"
+ }`,
+ },
+ testUtils.ConnectPeers{
+ SourceNodeID: 1,
+ TargetNodeID: 0,
+ },
+ testUtils.WaitForSync{},
+ testUtils.UpdateDoc{
+ // Update node 1 after the peer connection has been established, this will cause the `Shahzad` commit
+ // to be synced to node 0, as well as the related collection commits.
+ NodeID: immutable.Some(1),
+ Doc: `{
+ "name": "Chris"
+ }`,
+ },
+ testUtils.WaitForSync{},
+ testUtils.UpdateDoc{
+ // Update node 0 after `Chris` and `Shahzad` have synced to node 0. As this update happens after the peer
+ // connection has been established, this will cause the `Fred` and `Addo` doc commits, and their corresponding
+ // collection-level commits to sync to node 1.
+ //
+ // Now, all nodes should have a full history, including the 'offline' changes made before establishing the
+ // peer connection.
+ NodeID: immutable.Some(0),
+ Doc: `{
+ "name": "Addo"
+ }`,
+ },
+ testUtils.WaitForSync{},
+ testUtils.Request{
+ // Strong eventual consistency must now have been established across both nodes, the result of this query
+ // *must* exactly match across both nodes.
+ Request: `query {
+ commits {
+ cid
+ links {
+ cid
+ }
+ }
+ }`,
+ Results: map[string]any{
+ "commits": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, node0 update3"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, node1 update2"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, node1 update1"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node0 update3"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, node1 update1"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node1 update1"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc, create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, node1 update2"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, node0 update1"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node1 update2"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("collection, node0 update1"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("collection, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node0 update1"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node0 update3"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name, node1 update1"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node1 update2"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node1 update2"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name, node0 update1"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node0 update1"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ "links": []map[string]any{},
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node1 update1"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node0 update3"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc, node1 update2"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node1 update1"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node0 update3"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node1 update1"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node1 update1"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, create"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("name, create"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node1 update2"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc, node0 update1"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node1 update2"),
+ },
+ },
+ },
+ {
+ "cid": testUtils.NewUniqueCid("doc, node0 update1"),
+ "links": []map[string]any{
+ {
+ "cid": testUtils.NewUniqueCid("doc, create"),
+ },
+ {
+ "cid": testUtils.NewUniqueCid("name, node0 update1"),
+ },
+ },
+ },
+ },
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Users {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "Addo",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/state.go b/tests/integration/state.go
index c163a2d9d3..c495f80d9e 100644
--- a/tests/integration/state.go
+++ b/tests/integration/state.go
@@ -42,15 +42,21 @@ type p2pState struct {
// The map key is the node id of the subscriber.
peerCollections map[int]struct{}
- // actualDocHeads contains all document heads that exist on a node.
+ // actualDAGHeads contains all DAG heads that exist on a node.
//
// The map key is the doc id. The map value is the doc head.
- actualDocHeads map[string]docHeadState
+ //
+ // This tracks composite commits for documents, and collection commits for
+ // branchable collections
+ actualDAGHeads map[string]docHeadState
- // expectedDocHeads contains all document heads that are expected to exist on a node.
+ // expectedDAGHeads contains all DAG heads that are expected to exist on a node.
//
- // The map key is the doc id. The map value is the doc head.
- expectedDocHeads map[string]cid.Cid
+ // The map key is the doc id. The map value is the DAG head.
+ //
+ // This tracks composite commits for documents, and collection commits for
+ // branchable collections
+ expectedDAGHeads map[string]cid.Cid
}
// docHeadState contains the state of a document head.
@@ -68,8 +74,8 @@ func newP2PState() *p2pState {
connections: make(map[int]struct{}),
replicators: make(map[int]struct{}),
peerCollections: make(map[int]struct{}),
- actualDocHeads: make(map[string]docHeadState),
- expectedDocHeads: make(map[string]cid.Cid),
+ actualDAGHeads: make(map[string]docHeadState),
+ expectedDAGHeads: make(map[string]cid.Cid),
}
}
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index 39c9ea9624..3c0e9baffd 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -1226,7 +1226,7 @@ func createDoc(
s.docIDs[action.CollectionID] = append(s.docIDs[action.CollectionID], docIDs...)
if action.ExpectedError == "" {
- waitForUpdateEvents(s, action.NodeID, getEventsForCreateDoc(s, action))
+ waitForUpdateEvents(s, action.NodeID, action.CollectionID, getEventsForCreateDoc(s, action))
}
}
@@ -1404,10 +1404,11 @@ func deleteDoc(
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
if action.ExpectedError == "" {
- docIDs := map[string]struct{}{
+ expect := map[string]struct{}{
docID.String(): {},
}
- waitForUpdateEvents(s, action.NodeID, docIDs)
+
+ waitForUpdateEvents(s, action.NodeID, action.CollectionID, expect)
}
}
@@ -1452,7 +1453,7 @@ func updateDoc(
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
if action.ExpectedError == "" && !action.SkipLocalUpdateEvent {
- waitForUpdateEvents(s, action.NodeID, getEventsForUpdateDoc(s, action))
+ waitForUpdateEvents(s, action.NodeID, action.CollectionID, getEventsForUpdateDoc(s, action))
}
}
@@ -1552,7 +1553,7 @@ func updateWithFilter(s *state, action UpdateWithFilter) {
assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised)
if action.ExpectedError == "" && !action.SkipLocalUpdateEvent {
- waitForUpdateEvents(s, action.NodeID, getEventsForUpdateWithFilter(s, action, res))
+ waitForUpdateEvents(s, action.NodeID, action.CollectionID, getEventsForUpdateWithFilter(s, action, res))
}
}
From c45b07e3de2284d1622d1c389f7ce401cf89a0f9 Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Wed, 20 Nov 2024 04:48:44 -0500
Subject: [PATCH 26/47] feat: Add support for cid-only time travel queries
(#3256)
## Relevant issue(s)
Resolves #3214
## Description
Adds support for cid-only time travel queries.
Also removes some dead code.
---
internal/db/fetcher/versioned.go | 21 +------
internal/planner/select.go | 13 +++--
.../request/graphql/schema/descriptions.go | 2 +-
.../integration/query/simple/with_cid_test.go | 58 +++++++++++++------
4 files changed, 54 insertions(+), 40 deletions(-)
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index 24f3ab8467..199ca38d21 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -153,33 +153,18 @@ func (vf *VersionedFetcher) Init(
// Start serializes the correct state according to the Key and CID.
func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error {
- if vf.col == nil {
- return client.NewErrUninitializeProperty("VersionedFetcher", "CollectionDescription")
- }
-
- if len(spans) != 1 {
- return ErrSingleSpanOnly
- }
-
// VersionedFetcher only ever recieves a headstore key
//nolint:forcetypeassert
prefix := spans[0].Start.(keys.HeadstoreDocKey)
- dk := prefix.DocID
- cid := prefix.Cid
- if dk == "" {
- return client.NewErrUninitializeProperty("Spans", "DocID")
- } else if !cid.Defined() {
- return client.NewErrUninitializeProperty("Spans", "CID")
- }
vf.ctx = ctx
vf.dsKey = keys.DataStoreKey{
CollectionRootID: vf.col.Description().RootID,
- DocID: dk,
+ DocID: prefix.DocID,
}
- if err := vf.seekTo(cid); err != nil {
- return NewErrFailedToSeek(cid, err)
+ if err := vf.seekTo(prefix.Cid); err != nil {
+ return NewErrFailedToSeek(prefix.Cid, err)
}
return vf.DocumentFetcher.Start(ctx)
diff --git a/internal/planner/select.go b/internal/planner/select.go
index f1b3d05867..56245666cf 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -255,19 +255,24 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
origScan.filter = n.filter
n.filter = nil
- // If we have both a DocID and a CID, then we need to run
- // a TimeTravel (History-Traversing Versioned) query, which means
- // we need to propagate the values to the underlying VersionedFetcher
+ // If we have a CID, then we need to run a TimeTravel (History-Traversing Versioned)
+ // query, which means we need to propagate the values to the underlying VersionedFetcher
if n.selectReq.Cid.HasValue() {
c, err := cid.Decode(n.selectReq.Cid.Value())
if err != nil {
return nil, err
}
+
+ var docID string
+ if len(n.selectReq.DocIDs.Value()) > 0 {
+ docID = n.selectReq.DocIDs.Value()[0]
+ }
+
origScan.Spans(
[]core.Span{
core.NewSpan(
keys.HeadstoreDocKey{
- DocID: n.selectReq.DocIDs.Value()[0],
+ DocID: docID,
Cid: c,
},
keys.HeadstoreDocKey{},
diff --git a/internal/request/graphql/schema/descriptions.go b/internal/request/graphql/schema/descriptions.go
index b667410c2c..07a6873d61 100644
--- a/internal/request/graphql/schema/descriptions.go
+++ b/internal/request/graphql/schema/descriptions.go
@@ -73,7 +73,7 @@ An optional set of docIDs for this field. Only documents with a docID
be ignored.
`
cidArgDescription string = `
-An optional value that specifies the commit ID of the document to return.
+An optional value that specifies the commit ID of a document to return.
This CID does not need to be the most recent for a document, if it
corresponds to an older version of a document the document will be returned
at the state it was in at the time of that commit. If a matching commit is
diff --git a/tests/integration/query/simple/with_cid_test.go b/tests/integration/query/simple/with_cid_test.go
index e4c07987e0..4bf6d5e224 100644
--- a/tests/integration/query/simple/with_cid_test.go
+++ b/tests/integration/query/simple/with_cid_test.go
@@ -13,10 +13,6 @@ package simple
import (
"testing"
- "github.com/sourcenetwork/immutable"
- "github.com/stretchr/testify/require"
-
- "github.com/sourcenetwork/defradb/tests/change_detector"
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -44,20 +40,45 @@ func TestQuerySimpleWithInvalidCid(t *testing.T) {
executeTestCase(t, test)
}
-// This test documents a bug:
-// https://github.com/sourcenetwork/defradb/issues/3214
func TestQuerySimpleWithCid(t *testing.T) {
- if change_detector.Enabled {
- t.Skipf("Change detector does not support requiring panics")
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users (
+ cid: "bafyreib7afkd5hepl45wdtwwpai433bhnbd3ps5m2rv3masctda7b6mmxe"
+ ) {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "John",
+ },
+ },
+ },
+ },
+ },
}
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimpleWithCid_MultipleDocs(t *testing.T) {
test := testUtils.TestCase{
- SupportedClientTypes: immutable.Some(
- []testUtils.ClientType{
- // The CLI/Http clients don't panic in this context
- testUtils.GoClientType,
- },
- ),
Actions: []any{
testUtils.SchemaUpdate{
Schema: `
@@ -71,6 +92,11 @@ func TestQuerySimpleWithCid(t *testing.T) {
"name": "John"
}`,
},
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
testUtils.Request{
Request: `query {
Users (
@@ -90,7 +116,5 @@ func TestQuerySimpleWithCid(t *testing.T) {
},
}
- require.Panics(t, func() {
- testUtils.ExecuteTestCase(t, test)
- })
+ testUtils.ExecuteTestCase(t, test)
}
From fa0d92bd383bc9a491e64ef2e7c11ae1bda187bf Mon Sep 17 00:00:00 2001
From: AndrewSisley
Date: Wed, 20 Nov 2024 05:54:50 -0500
Subject: [PATCH 27/47] feat: Add support for branchable collection
time-traveling (#3260)
## Relevant issue(s)
Resolves #3257
## Description
Add support for branchable collection time-traveling.
Also fixes the docID param which was misbehaving (see commit `Create
document with actual docID not user provided value`).
---
internal/db/fetcher/versioned.go | 103 ++++++-----
internal/planner/select.go | 12 +-
.../request/graphql/schema/descriptions.go | 9 +-
.../query/simple/with_cid_branchable_test.go | 161 ++++++++++++++++++
.../simple/with_cid_doc_id_branchable_test.go | 67 ++++++++
.../query/simple/with_cid_doc_id_test.go | 4 +-
6 files changed, 287 insertions(+), 69 deletions(-)
create mode 100644 tests/integration/query/simple/with_cid_branchable_test.go
create mode 100644 tests/integration/query/simple/with_cid_doc_id_branchable_test.go
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index 199ca38d21..01afe7e2e2 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -16,7 +16,7 @@ import (
"fmt"
"github.com/ipfs/go-cid"
- ds "github.com/ipfs/go-datastore"
+ cidlink "github.com/ipld/go-ipld-prime/linking/cid"
"github.com/sourcenetwork/immutable"
@@ -89,15 +89,11 @@ type VersionedFetcher struct {
root datastore.Rootstore
store datastore.Txn
- dsKey keys.DataStoreKey
-
queuedCids *list.List
acp immutable.Option[acp.ACP]
col client.Collection
- // @todo index *client.IndexDescription
- mCRDTs map[client.FieldID]merklecrdt.MerkleCRDT
}
// Init initializes the VersionedFetcher.
@@ -116,7 +112,6 @@ func (vf *VersionedFetcher) Init(
vf.acp = acp
vf.col = col
vf.queuedCids = list.New()
- vf.mCRDTs = make(map[client.FieldID]merklecrdt.MerkleCRDT)
vf.txn = txn
// create store
@@ -158,10 +153,6 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error
prefix := spans[0].Start.(keys.HeadstoreDocKey)
vf.ctx = ctx
- vf.dsKey = keys.DataStoreKey{
- CollectionRootID: vf.col.Description().RootID,
- DocID: prefix.DocID,
- }
if err := vf.seekTo(prefix.Cid); err != nil {
return NewErrFailedToSeek(prefix.Cid, err)
@@ -170,11 +161,6 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error
return vf.DocumentFetcher.Start(ctx)
}
-// Rootstore returns the rootstore of the VersionedFetcher.
-func (vf *VersionedFetcher) Rootstore() ds.Datastore {
- return vf.root
-}
-
// Start a fetcher with the needed info (cid embedded in a span)
/*
@@ -324,56 +310,63 @@ func (vf *VersionedFetcher) merge(c cid.Cid) error {
return err
}
- link, err := block.GenerateLink()
- if err != nil {
- return err
- }
+ var mcrdt merklecrdt.MerkleCRDT
+ switch {
+ case block.Delta.IsCollection():
+ mcrdt = merklecrdt.NewMerkleCollection(
+ vf.store,
+ keys.NewCollectionSchemaVersionKey(vf.col.Description().SchemaVersionID, vf.col.Description().ID),
+ keys.NewHeadstoreColKey(vf.col.Description().RootID),
+ )
- // first arg 0 is the index for the composite DAG in the mCRDTs cache
- mcrdt, exists := vf.mCRDTs[0]
- if !exists {
+ case block.Delta.IsComposite():
mcrdt = merklecrdt.NewMerkleCompositeDAG(
vf.store,
- keys.CollectionSchemaVersionKey{},
- vf.dsKey.WithFieldID(core.COMPOSITE_NAMESPACE),
+ keys.NewCollectionSchemaVersionKey(block.Delta.GetSchemaVersionID(), vf.col.Description().RootID),
+ keys.DataStoreKey{
+ CollectionRootID: vf.col.Description().RootID,
+ DocID: string(block.Delta.GetDocID()),
+ FieldID: fmt.Sprint(core.COMPOSITE_NAMESPACE),
+ },
)
- vf.mCRDTs[0] = mcrdt
- }
- err = mcrdt.Clock().ProcessBlock(vf.ctx, block, link)
- if err != nil {
- return err
- }
-
- // handle subgraphs
- for _, l := range block.Links {
- // get node
- subBlock, err := vf.getDAGBlock(l.Link.Cid)
- if err != nil {
- return err
- }
- field, ok := vf.col.Definition().GetFieldByName(l.Name)
+ default:
+ field, ok := vf.col.Definition().GetFieldByName(block.Delta.GetFieldName())
if !ok {
- return client.NewErrFieldNotExist(l.Name)
+ return client.NewErrFieldNotExist(block.Delta.GetFieldName())
}
- mcrdt, exists := vf.mCRDTs[field.ID]
- if !exists {
- mcrdt, err = merklecrdt.FieldLevelCRDTWithStore(
- vf.store,
- keys.CollectionSchemaVersionKey{},
- field.Typ,
- field.Kind,
- vf.dsKey.WithFieldID(fmt.Sprint(field.ID)),
- field.Name,
- )
- if err != nil {
- return err
- }
- vf.mCRDTs[field.ID] = mcrdt
+ mcrdt, err = merklecrdt.FieldLevelCRDTWithStore(
+ vf.store,
+ keys.NewCollectionSchemaVersionKey(block.Delta.GetSchemaVersionID(), vf.col.Description().RootID),
+ field.Typ,
+ field.Kind,
+ keys.DataStoreKey{
+ CollectionRootID: vf.col.Description().RootID,
+ DocID: string(block.Delta.GetDocID()),
+ FieldID: fmt.Sprint(field.ID),
+ },
+ field.Name,
+ )
+ if err != nil {
+ return err
}
+ }
- err = mcrdt.Clock().ProcessBlock(vf.ctx, subBlock, l.Link)
+ err = mcrdt.Clock().ProcessBlock(
+ vf.ctx,
+ block,
+ cidlink.Link{
+ Cid: c,
+ },
+ )
+ if err != nil {
+ return err
+ }
+
+ // handle subgraphs
+ for _, l := range block.AllLinks() {
+ err = vf.merge(l.Cid)
if err != nil {
return err
}
diff --git a/internal/planner/select.go b/internal/planner/select.go
index 56245666cf..6078d67650 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -263,17 +263,15 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
return nil, err
}
- var docID string
- if len(n.selectReq.DocIDs.Value()) > 0 {
- docID = n.selectReq.DocIDs.Value()[0]
- }
-
+ // This exists because the fetcher interface demands a []Span, yet the versioned
+ // fetcher type (that will be the only one consuming this []Span) does not use it
+ // as either a span, or even a prefix. And with this design limitation this is
+ // currently the least bad way of passing the cid in to the fetcher.
origScan.Spans(
[]core.Span{
core.NewSpan(
keys.HeadstoreDocKey{
- DocID: docID,
- Cid: c,
+ Cid: c,
},
keys.HeadstoreDocKey{},
),
diff --git a/internal/request/graphql/schema/descriptions.go b/internal/request/graphql/schema/descriptions.go
index 07a6873d61..b7c0373abc 100644
--- a/internal/request/graphql/schema/descriptions.go
+++ b/internal/request/graphql/schema/descriptions.go
@@ -73,11 +73,10 @@ An optional set of docIDs for this field. Only documents with a docID
be ignored.
`
cidArgDescription string = `
-An optional value that specifies the commit ID of a document to return.
- This CID does not need to be the most recent for a document, if it
- corresponds to an older version of a document the document will be returned
- at the state it was in at the time of that commit. If a matching commit is
- not found then an empty set will be returned.
+An optional value that specifies the commit ID of a document or a branchable collection.
+ This CID does not need to be the most recent. If it corresponds to an older version
+ the document(s) will be returned at the state they were in at the time of that commit.
+ If a matching commit is not found then an empty set will be returned.
`
singleFieldFilterArgDescription string = `
An optional filter for this join, if the related record does
diff --git a/tests/integration/query/simple/with_cid_branchable_test.go b/tests/integration/query/simple/with_cid_branchable_test.go
new file mode 100644
index 0000000000..58590a5d5a
--- /dev/null
+++ b/tests/integration/query/simple/with_cid_branchable_test.go
@@ -0,0 +1,161 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package simple
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQuerySimpleWithCidOfBranchableCollection_FirstCid(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
+ testUtils.UpdateDoc{
+ Doc: `{
+ "name": "Freddddd"
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users (
+ cid: "bafyreiewwsnu2ld5qlntamdm77ayb7xtmxz3p5difvaaakaome7zbtpo4u"
+ ) {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "Fred",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimpleWithCidOfBranchableCollection_MiddleCid(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
+ testUtils.UpdateDoc{
+ Doc: `{
+ "name": "Freddddd"
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users (
+ cid: "bafyreifpamlyhcbriztgbhds5ctgi5rm6w5wcar2py7246lo6j5v7iusxm"
+ ) {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "Freddddd",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimpleWithCidOfBranchableCollection_LastCid(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
+ testUtils.UpdateDoc{
+ Doc: `{
+ "name": "Freddddd"
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users (
+ cid: "bafyreigmt6ytph32jjxts2bij7fkne5ntionldsnklp35vcamvvl2x3a5i"
+ ) {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "Freddddd",
+ },
+ {
+ "name": "John",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_cid_doc_id_branchable_test.go b/tests/integration/query/simple/with_cid_doc_id_branchable_test.go
new file mode 100644
index 0000000000..18b8a55ad6
--- /dev/null
+++ b/tests/integration/query/simple/with_cid_doc_id_branchable_test.go
@@ -0,0 +1,67 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package simple
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQuerySimpleWithCidOfBranchableCollectionAndDocID(t *testing.T) {
+ test := testUtils.TestCase{
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users @branchable {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "Fred"
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "name": "John"
+ }`,
+ },
+ testUtils.UpdateDoc{
+ Doc: `{
+ "name": "Freddddd"
+ }`,
+ },
+ testUtils.Request{
+ // This is the cid of the collection-commit when the second doc (John) is created.
+ // Without the docID param both John and Fred should be returned.
+ Request: `query {
+ Users (
+ cid: "bafyreiboen2mw2unu4fty2pyyd5nicqi57vcdahrrag6bjm54md5myj54u",
+ docID: "bae-3a7df128-bfa9-559a-a9c5-96f2bf6d1038"
+ ) {
+ name
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "name": "Fred",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go
index 8c6476b1e5..29a630ac19 100644
--- a/tests/integration/query/simple/with_cid_doc_id_test.go
+++ b/tests/integration/query/simple/with_cid_doc_id_test.go
@@ -335,7 +335,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) {
Request: `query {
Users (
cid: "bafyreihsqayh6zvmjrvmma3sjmrb4bkeiyy6l56nt6y2t2tm4xajkif3gu",
- docID: "bae-d8cb53d4-ac5a-5c55-8306-64df633d400d"
+ docID: "bae-bc5464e4-26a6-5307-b516-aada0abeb089"
) {
name
points
@@ -389,7 +389,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) {
Request: `query {
Users (
cid: "bafyreigkdjnvkpqfjoqoke3aqc3b6ibb45xjuxx5djpk7c6tart2lw3dcm",
- docID: "bae-d420ebcd-023a-5800-ae2e-8ea89442318e"
+ docID: "bae-2c7c40a7-92c1-5ed4-8a00-9e8595514945"
) {
name
points
From 3fa579ec10bdfdbc738cb2e80b2266a71913ec77 Mon Sep 17 00:00:00 2001
From: Fred Carle
Date: Wed, 20 Nov 2024 14:08:49 -0500
Subject: [PATCH 28/47] fix: Prevent over span (#3258)
## Relevant issue(s)
Resolves #3242
## Description
This PR solves the situation where deleted documents in the immediate
next collection by ID were returned on a full collection query. The
reason for this behaviour was due to the fetcher start method redefining
the spans based on wanting deleted docs or not and was defining an end
key that might have been "prefix-ended" based on a shorter prefix (i.e.
collection instead of instance type). ~~The solution was to always
redefine the end key as the prefix end of the start key.~~
To fix this we removed the concept of spans and replaced it with a list
of prefixes. This results in the fetcher being asked, for example, for
all docs in collection 1 with a prefix of `/data/1` instead of a span
from `/data/1` to `/data/2`. Furthermore, when the fetcher checks if it
need to get deleted docs or non-deleted docs, the resulting prefix
becomes `/data/1/< v or d >` instead of the span from `/data/1/< v or d
>` to `/data/2/< v or d >` (the span is wrong).
The first commit documents the bug with an integration test.
---
internal/core/data.go | 228 ------
internal/core/data_test.go | 704 ------------------
internal/db/collection_get.go | 3 +-
internal/db/collection_index.go | 7 +-
internal/db/fetcher/errors.go | 1 -
internal/db/fetcher/fetcher.go | 99 +--
internal/db/fetcher/indexer.go | 8 +-
internal/db/fetcher/mocks/fetcher.go | 30 +-
internal/db/fetcher/versioned.go | 10 +-
internal/lens/fetcher.go | 4 +-
internal/planner/arbitrary_join.go | 7 +-
internal/planner/average.go | 12 +-
internal/planner/commit.go | 28 +-
internal/planner/count.go | 3 +-
internal/planner/create.go | 15 +-
internal/planner/delete.go | 6 +-
internal/planner/explain.go | 2 +-
internal/planner/group.go | 5 +-
internal/planner/lens.go | 5 +-
internal/planner/limit.go | 9 +-
internal/planner/max.go | 15 +-
internal/planner/min.go | 15 +-
internal/planner/multi.go | 10 +-
internal/planner/operation.go | 5 +-
internal/planner/order.go | 3 +-
internal/planner/pipe.go | 9 +-
internal/planner/planner.go | 5 +-
internal/planner/scan.go | 44 +-
internal/planner/select.go | 34 +-
internal/planner/sum.go | 3 +-
internal/planner/top.go | 5 +-
internal/planner/type_join.go | 13 +-
internal/planner/update.go | 4 +-
internal/planner/upsert.go | 8 +-
internal/planner/values.go | 7 +-
internal/planner/view.go | 6 +-
.../integration/explain/default/basic_test.go | 7 +-
.../explain/default/dagscan_test.go | 28 +-
.../explain/default/delete_test.go | 52 +-
.../default/group_with_doc_id_child_test.go | 7 +-
.../explain/default/group_with_doc_id_test.go | 19 +-
.../default/group_with_filter_child_test.go | 14 +-
.../explain/default/group_with_filter_test.go | 7 +-
.../explain/default/top_with_average_test.go | 14 +-
.../explain/default/top_with_count_test.go | 14 +-
.../explain/default/top_with_max_test.go | 14 +-
.../explain/default/top_with_min_test.go | 14 +-
.../explain/default/top_with_sum_test.go | 14 +-
.../explain/default/type_join_many_test.go | 14 +-
.../explain/default/type_join_one_test.go | 35 +-
.../explain/default/type_join_test.go | 42 +-
.../type_join_with_filter_doc_id_test.go | 19 +-
.../default/type_join_with_filter_test.go | 14 +-
.../explain/default/update_test.go | 38 +-
.../explain/default/upsert_test.go | 7 +-
.../explain/default/with_average_join_test.go | 42 +-
.../explain/default/with_average_test.go | 7 +-
.../explain/default/with_count_join_test.go | 42 +-
.../explain/default/with_count_test.go | 7 +-
.../default/with_filter_doc_id_test.go | 45 +-
.../explain/default/with_filter_test.go | 49 +-
.../explain/default/with_max_join_test.go | 56 +-
.../explain/default/with_max_test.go | 7 +-
.../explain/default/with_min_join_test.go | 56 +-
.../explain/default/with_min_test.go | 7 +-
.../explain/default/with_sum_join_test.go | 56 +-
.../explain/default/with_sum_test.go | 7 +-
.../integration/explain/simple/basic_test.go | 7 +-
.../inline_array/with_max_doc_id_test.go | 2 +-
.../inline_array/with_min_doc_id_test.go | 2 +-
tests/integration/query/simple/simple_test.go | 78 ++
.../migrations/query/with_doc_id_test.go | 2 +-
72 files changed, 514 insertions(+), 1703 deletions(-)
delete mode 100644 internal/core/data.go
delete mode 100644 internal/core/data_test.go
diff --git a/internal/core/data.go b/internal/core/data.go
deleted file mode 100644
index d84186826c..0000000000
--- a/internal/core/data.go
+++ /dev/null
@@ -1,228 +0,0 @@
-// Copyright 2022 Democratized Data Foundation
-//
-// Use of this software is governed by the Business Source License
-// included in the file licenses/BSL.txt.
-//
-// As of the Change Date specified in that file, in accordance with
-// the Business Source License, use of this software will be governed
-// by the Apache License, Version 2.0, included in the file
-// licenses/APL.txt.
-
-package core
-
-import (
- "strings"
-
- "github.com/sourcenetwork/defradb/internal/keys"
-)
-
-// Span is a range of keys from [Start, End).
-type Span struct {
- // Start represents the starting key of the Span.
- Start keys.Walkable
-
- // End represents the ending key of the Span.
- End keys.Walkable
-}
-
-// NewSpan creates a new Span from the provided start and end keys.
-func NewSpan(start, end keys.Walkable) Span {
- return Span{
- Start: start,
- End: end,
- }
-}
-
-// SpanComparisonResult is the result of comparing two spans.
-type SpanComparisonResult uint
-
-const (
- Before SpanComparisonResult = iota
- StartBeforeEndEqualToStart
- StartBeforeEndWithin
- StartBeforeEndEqual
- StartBeforeEndAfter
- StartEqualEndWithin
- Equal
- StartEqualEndAfter
- StartWithinEndWithin
- StartWithinEndAfter
- StartWithinEndEqual
- StartEqualToEndEndAfter
- After
-)
-
-// Compares two spans returning how the compare to each other.
-// If the end of one span is adjacent to the other (with no gap possible)
-// then those ends are considered equal.
-func (this Span) Compare(other Span) SpanComparisonResult {
- if this == other {
- return Equal
- }
-
- thisStart := this.Start.ToString()
- thisEnd := this.End.ToString()
- otherStart := other.Start.ToString()
- otherEnd := other.End.ToString()
-
- if thisStart < otherStart {
- if thisEnd == otherStart || isAdjacent(this.End, other.Start) {
- return StartBeforeEndEqualToStart
- }
-
- if thisEnd < otherStart {
- return Before
- }
-
- if thisEnd < otherEnd || strings.HasPrefix(thisEnd, otherEnd) {
- return StartBeforeEndWithin
- }
-
- if thisEnd == otherEnd {
- return StartBeforeEndEqual
- }
-
- if thisEnd > otherEnd {
- return StartBeforeEndAfter
- }
- }
-
- if thisStart == otherStart {
- if thisEnd < otherEnd || strings.HasPrefix(thisEnd, otherEnd) {
- return StartEqualEndWithin
- }
-
- if thisEnd == otherEnd {
- return Equal
- }
-
- if thisEnd > otherEnd {
- return StartEqualEndAfter
- }
- }
-
- if thisStart < otherEnd {
- if thisEnd < otherEnd || strings.HasPrefix(thisEnd, otherEnd) {
- return StartWithinEndWithin
- }
-
- if thisEnd == otherEnd {
- return StartWithinEndEqual
- }
-
- if thisEnd > otherEnd {
- return StartWithinEndAfter
- }
- }
-
- if thisStart == otherEnd || isAdjacent(this.Start, other.End) {
- return StartEqualToEndEndAfter
- }
-
- return After
-}
-
-func isAdjacent(this keys.Walkable, other keys.Walkable) bool {
- return len(this.ToString()) == len(other.ToString()) &&
- (this.PrefixEnd().ToString() == other.ToString() ||
- this.ToString() == other.PrefixEnd().ToString())
-}
-
-// Merges an unordered, potentially overlapping and/or duplicated collection of Spans into
-// a unique set in ascending order, where overlapping spans are merged into a single span.
-// Will handle spans with keys of different lengths, where one might be a prefix of another.
-// Adjacent spans will also be merged.
-func MergeAscending(spans []Span) []Span {
- if len(spans) <= 1 {
- return spans
- }
-
- uniqueSpans := []Span{}
-
- for _, span := range spans {
- uniqueSpanFound := false
-
- i := 0
- for i < len(uniqueSpans) {
- uniqueSpan := uniqueSpans[i]
- switch span.Compare(uniqueSpan) {
- case Before:
- // Shift all remaining unique spans one place to the right
- newArray := make([]Span, len(uniqueSpans)+1)
- for j := len(uniqueSpans); j > i; j-- {
- newArray[j] = uniqueSpans[i]
- }
-
- // Then we insert
- newArray[i] = NewSpan(span.Start, span.End)
-
- // Move the values prior to the new one across
- for j := 0; j < i; j++ {
- newArray[j] = uniqueSpans[j]
- }
- uniqueSpans = newArray
- uniqueSpanFound = true
- // Exit the unique-span loop, this span has been handled
- i = len(uniqueSpans)
- case StartBeforeEndEqualToStart, StartBeforeEndWithin, StartBeforeEndEqual:
- uniqueSpans[i] = NewSpan(span.Start, uniqueSpan.End)
- uniqueSpanFound = true
- i++
- case StartBeforeEndAfter:
- uniqueSpans = removeBefore(uniqueSpans, i, span.End.ToString())
- uniqueSpans[i] = NewSpan(span.Start, span.End)
- uniqueSpanFound = true
- // Exit the unique-span loop, this span has been handled
- i = len(uniqueSpans)
- case StartEqualEndWithin, Equal, StartWithinEndWithin, StartWithinEndEqual:
- uniqueSpanFound = true
- // Do nothing, span is contained within an existing unique-span
- i = len(uniqueSpans)
- case StartEqualEndAfter, StartWithinEndAfter, StartEqualToEndEndAfter:
- uniqueSpans = removeBefore(uniqueSpans, i, span.End.ToString())
- uniqueSpans[i] = NewSpan(uniqueSpan.Start, span.End)
- uniqueSpanFound = true
- // Exit the unique-span loop, this span has been handled
- i = len(uniqueSpans)
- case After:
- i++
- }
- }
-
- if !uniqueSpanFound {
- uniqueSpans = append(uniqueSpans, span)
- }
- }
-
- return uniqueSpans
-}
-
-// Removes any items from the collection (given index onwards) who's end key is smaller
-// than the given value. The returned collection will be a different instance.
-func removeBefore(spans []Span, startIndex int, end string) []Span {
- indexOfLastMatchingItem := -1
- for i := startIndex; i < len(spans); i++ {
- if spans[i].End.ToString() <= end {
- indexOfLastMatchingItem = i
- }
- }
-
- if indexOfLastMatchingItem == -1 {
- return spans
- }
-
- numberOfItemsToRemove := indexOfLastMatchingItem - startIndex
- result := make([]Span, len(spans)-numberOfItemsToRemove)
- // Add the items preceding the removed items
- for i := 0; i < startIndex; i++ {
- result[i] = spans[i]
- }
-
- j := startIndex + numberOfItemsToRemove
- // Add the items following the removed items
- for i := indexOfLastMatchingItem + 1; i < len(spans); i++ {
- result[j] = spans[i]
- }
-
- return result
-}
diff --git a/internal/core/data_test.go b/internal/core/data_test.go
deleted file mode 100644
index 154441819e..0000000000
--- a/internal/core/data_test.go
+++ /dev/null
@@ -1,704 +0,0 @@
-// Copyright 2022 Democratized Data Foundation
-//
-// Use of this software is governed by the Business Source License
-// included in the file licenses/BSL.txt.
-//
-// As of the Change Date specified in that file, in accordance with
-// the Business Source License, use of this software will be governed
-// by the Apache License, Version 2.0, included in the file
-// licenses/APL.txt.
-
-package core
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-
- "github.com/sourcenetwork/defradb/internal/keys"
-)
-
-func TestMergeAscending_ReturnsEmpty_GivenEmpty(t *testing.T) {
- input := []Span{}
-
- result := MergeAscending(input)
-
- assert.Empty(t, result)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenSingle(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- input := []Span{NewSpan(start1, end1)}
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k5")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
-
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 2)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end2, result[0].End)
- assert.Equal(t, start1, result[1].Start)
- assert.Equal(t, end1, result[1].End)
-}
-
-func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k7")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k8")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end3 := keys.MustNewDataStoreKey("/1/p/0/k5")
-
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- // Span 3 should be returned between one and two
- assert.Equal(t, start3, result[1].Start)
- assert.Equal(t, end3, result[1].End)
- assert.Equal(t, start2, result[2].Start)
- assert.Equal(t, end2, result[2].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k3")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k3.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k4.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k3.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k5")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k4")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k3")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1.1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start2, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
-
-func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k6")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k7")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k9")
- end3 := keys.MustNewDataStoreKey("/1/p/0/ka")
- start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
- end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
- start5 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- NewSpan(start4, end4),
- NewSpan(start5, end5),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- // Spans 2 and 3 are within span 5
- assert.Equal(t, start5, result[1].Start)
- assert.Equal(t, end5, result[1].End)
- assert.Equal(t, start4, result[2].Start)
- assert.Equal(t, end4, result[2].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k1.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start1, end1),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k1.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2.5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k2")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k1.2")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
-
-func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCoveringMiddleSpans(
- t *testing.T,
-) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k7")
- end3 := keys.MustNewDataStoreKey("/1/p/0/k8")
- start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
- end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
- start5 := keys.MustNewDataStoreKey("/1/p/0/k4") // equal to start2
- end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- NewSpan(start4, end4),
- NewSpan(start5, end5),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- // Spans 2 and 3 are within span 5
- assert.Equal(t, start5, result[1].Start)
- assert.Equal(t, end5, result[1].End)
- assert.Equal(t, start4, result[2].Start)
- assert.Equal(t, end4, result[2].End)
-}
-
-func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCoveringMiddleSpans(
- t *testing.T,
-) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k7")
- end3 := keys.MustNewDataStoreKey("/1/p/0/k8")
- start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
- end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
- start5 := keys.MustNewDataStoreKey("/1/p/0/k4.5") // within span2
- end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- NewSpan(start4, end4),
- NewSpan(start5, end5),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- assert.Equal(t, start2, result[1].Start)
- assert.Equal(t, end5, result[1].End)
- assert.Equal(t, start4, result[2].Start)
- assert.Equal(t, end4, result[2].End)
-}
-
-func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpanCoveringMiddleSpans(
- t *testing.T,
-) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k7")
- end3 := keys.MustNewDataStoreKey("/1/p/0/k8")
- start4 := keys.MustNewDataStoreKey("/1/p/0/kc")
- end4 := keys.MustNewDataStoreKey("/1/p/0/kd")
- start5 := keys.MustNewDataStoreKey("/1/p/0/k5") // span2's end
- end5 := keys.MustNewDataStoreKey("/1/p/0/ka")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- NewSpan(start4, end4),
- NewSpan(start5, end5),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- assert.Equal(t, start2, result[1].Start)
- assert.Equal(t, end5, result[1].End)
- assert.Equal(t, start4, result[2].Start)
- assert.Equal(t, end4, result[2].End)
-}
-
-func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndEndAfterSpanCoveringMiddleSpans(
- t *testing.T,
-) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k6")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k8")
- end3 := keys.MustNewDataStoreKey("/1/p/0/k9")
- start4 := keys.MustNewDataStoreKey("/1/p/0/kd")
- end4 := keys.MustNewDataStoreKey("/1/p/0/ke")
- start5 := keys.MustNewDataStoreKey("/1/p/0/k5") // adjacent but before span2's end
- end5 := keys.MustNewDataStoreKey("/1/p/0/kb")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- NewSpan(start4, end4),
- NewSpan(start5, end5),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- assert.Equal(t, start2, result[1].Start)
- assert.Equal(t, end5, result[1].End)
- assert.Equal(t, start4, result[2].Start)
- assert.Equal(t, end4, result[2].End)
-}
-
-func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEndAfterSpanCoveringMiddleSpans(
- t *testing.T,
-) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- start3 := keys.MustNewDataStoreKey("/1/p/0/k8")
- end3 := keys.MustNewDataStoreKey("/1/p/0/k9")
- start4 := keys.MustNewDataStoreKey("/1/p/0/kd")
- end4 := keys.MustNewDataStoreKey("/1/p/0/ke")
- start5 := keys.MustNewDataStoreKey("/1/p/0/k6") // adjacent and after span2's end
- end5 := keys.MustNewDataStoreKey("/1/p/0/kb")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- NewSpan(start3, end3),
- NewSpan(start4, end4),
- NewSpan(start5, end5),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 3)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- assert.Equal(t, start2, result[1].Start)
- assert.Equal(t, end5, result[1].End)
- assert.Equal(t, start4, result[2].Start)
- assert.Equal(t, end4, result[2].End)
-}
-
-func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k1")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k2")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k4")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 2)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end1, result[0].End)
- assert.Equal(t, start2, result[1].Start)
- assert.Equal(t, end2, result[1].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k6")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentAndAfter(
- t *testing.T,
-) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k7")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k5")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k8")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
-
-func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndAfterEndEndAfter(t *testing.T) {
- start1 := keys.MustNewDataStoreKey("/1/p/0/k3")
- end1 := keys.MustNewDataStoreKey("/1/p/0/k6")
- start2 := keys.MustNewDataStoreKey("/1/p/0/k7")
- end2 := keys.MustNewDataStoreKey("/1/p/0/k8")
- input := []Span{
- NewSpan(start1, end1),
- NewSpan(start2, end2),
- }
-
- result := MergeAscending(input)
-
- assert.Len(t, result, 1)
- assert.Equal(t, start1, result[0].Start)
- assert.Equal(t, end2, result[0].End)
-}
diff --git a/internal/db/collection_get.go b/internal/db/collection_get.go
index e68df05df4..8360cc0915 100644
--- a/internal/db/collection_get.go
+++ b/internal/db/collection_get.go
@@ -15,7 +15,6 @@ import (
"github.com/sourcenetwork/defradb/acp/identity"
"github.com/sourcenetwork/defradb/client"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
"github.com/sourcenetwork/defradb/internal/keys"
@@ -73,7 +72,7 @@ func (c *collection) get(
// construct target DS key from DocID.
targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(c.Description(), primaryKey.DocID)
// run the doc fetcher
- err = df.Start(ctx, core.NewSpan(targetKey, targetKey.PrefixEnd()))
+ err = df.Start(ctx, targetKey)
if err != nil {
_ = df.Close()
return nil, err
diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go
index f268e14f2e..3c5da18c58 100644
--- a/internal/db/collection_index.go
+++ b/internal/db/collection_index.go
@@ -24,7 +24,6 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/datastore"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/description"
"github.com/sourcenetwork/defradb/internal/db/fetcher"
@@ -316,10 +315,8 @@ func (c *collection) iterateAllDocs(
if err != nil {
return errors.Join(err, df.Close())
}
- start := base.MakeDataStoreKeyWithCollectionDescription(c.Description())
- spans := core.NewSpan(start, start.PrefixEnd())
-
- err = df.Start(ctx, spans)
+ prefix := base.MakeDataStoreKeyWithCollectionDescription(c.Description())
+ err = df.Start(ctx, prefix)
if err != nil {
return errors.Join(err, df.Close())
}
diff --git a/internal/db/fetcher/errors.go b/internal/db/fetcher/errors.go
index 0a8a7d21b4..8836d3982d 100644
--- a/internal/db/fetcher/errors.go
+++ b/internal/db/fetcher/errors.go
@@ -45,7 +45,6 @@ var (
ErrVFetcherFailedToGetDagLink = errors.New(errVFetcherFailedToGetDagLink)
ErrFailedToGetDagNode = errors.New(errFailedToGetDagNode)
ErrMissingMapper = errors.New(errMissingMapper)
- ErrSingleSpanOnly = errors.New("spans must contain only a single entry")
ErrInvalidInOperatorValue = errors.New(errInvalidInOperatorValue)
ErrInvalidFilterOperator = errors.New(errInvalidFilterOperator)
ErrUnexpectedTypeValue = errors.New(errUnexpectedTypeValue)
diff --git a/internal/db/fetcher/fetcher.go b/internal/db/fetcher/fetcher.go
index 0ca828c4b2..422de57ad7 100644
--- a/internal/db/fetcher/fetcher.go
+++ b/internal/db/fetcher/fetcher.go
@@ -13,6 +13,7 @@ package fetcher
import (
"bytes"
"context"
+ "slices"
"strings"
"github.com/bits-and-blooms/bitset"
@@ -72,7 +73,7 @@ type Fetcher interface {
reverse bool,
showDeleted bool,
) error
- Start(ctx context.Context, spans ...core.Span) error
+ Start(ctx context.Context, prefixes ...keys.Walkable) error
FetchNext(ctx context.Context) (EncodedDocument, ExecInfo, error)
Close() error
}
@@ -97,10 +98,10 @@ type DocumentFetcher struct {
reverse bool
deletedDocs bool
- txn datastore.Txn
- spans []core.Span
- order []dsq.Order
- curSpanIndex int
+ txn datastore.Txn
+ prefixes []keys.DataStoreKey
+ order []dsq.Order
+ curPrefixIndex int
filter *mapper.Filter
ranFilter bool // did we run the filter
@@ -243,21 +244,21 @@ func (df *DocumentFetcher) init(
return nil
}
-func (df *DocumentFetcher) Start(ctx context.Context, spans ...core.Span) error {
- err := df.start(ctx, spans, false)
+func (df *DocumentFetcher) Start(ctx context.Context, prefixes ...keys.Walkable) error {
+ err := df.start(ctx, prefixes, false)
if err != nil {
return err
}
if df.deletedDocFetcher != nil {
- return df.deletedDocFetcher.start(ctx, spans, true)
+ return df.deletedDocFetcher.start(ctx, prefixes, true)
}
return nil
}
// Start implements DocumentFetcher.
-func (df *DocumentFetcher) start(ctx context.Context, spans []core.Span, withDeleted bool) error {
+func (df *DocumentFetcher) start(ctx context.Context, prefixes []keys.Walkable, withDeleted bool) error {
if df.col == nil {
return client.NewErrUninitializeProperty("DocumentFetcher", "CollectionDescription")
}
@@ -267,44 +268,46 @@ func (df *DocumentFetcher) start(ctx context.Context, spans []core.Span, withDel
df.deletedDocs = withDeleted
- if len(spans) == 0 { // no specified spans so create a prefix scan key for the entire collection
- start := base.MakeDataStoreKeyWithCollectionDescription(df.col.Description())
+ if len(prefixes) == 0 { // no specified prefixes so create a prefix scan key for the entire collection
+ prefix := base.MakeDataStoreKeyWithCollectionDescription(df.col.Description())
if withDeleted {
- start = start.WithDeletedFlag()
+ prefix = prefix.WithDeletedFlag()
} else {
- start = start.WithValueFlag()
+ prefix = prefix.WithValueFlag()
}
- df.spans = []core.Span{core.NewSpan(start, start.PrefixEnd())}
+ df.prefixes = []keys.DataStoreKey{prefix}
} else {
- valueSpans := make([]core.Span, len(spans))
- for i, span := range spans {
+ valuePrefixes := make([]keys.DataStoreKey, len(prefixes))
+ prefixCache := make(map[string]struct{})
+ for i, prefix := range prefixes {
+ // if we have a duplicate prefix, skip it
+ if _, exists := prefixCache[prefix.ToString()]; exists {
+ continue
+ }
+ prefixCache[prefix.ToString()] = struct{}{}
if withDeleted {
// DocumentFetcher only ever recieves document keys
//nolint:forcetypeassert
- valueSpans[i] = core.NewSpan(
- span.Start.(keys.DataStoreKey).WithDeletedFlag(),
- span.End.(keys.DataStoreKey).WithDeletedFlag(),
- )
+ valuePrefixes[i] = prefix.(keys.DataStoreKey).WithDeletedFlag()
} else {
// DocumentFetcher only ever recieves document keys
//nolint:forcetypeassert
- valueSpans[i] = core.NewSpan(
- span.Start.(keys.DataStoreKey).WithValueFlag(),
- span.End.(keys.DataStoreKey).WithValueFlag(),
- )
+ valuePrefixes[i] = prefix.(keys.DataStoreKey).WithValueFlag()
}
}
- spans := core.MergeAscending(valueSpans)
+ slices.SortFunc(valuePrefixes, func(a, b keys.DataStoreKey) int {
+ return strings.Compare(a.ToString(), b.ToString())
+ })
+
if df.reverse {
- for i, j := 0, len(spans)-1; i < j; i, j = i+1, j-1 {
- spans[i], spans[j] = spans[j], spans[i]
+ for i, j := 0, len(valuePrefixes)-1; i < j; i, j = i+1, j-1 {
+ valuePrefixes[i], valuePrefixes[j] = valuePrefixes[j], valuePrefixes[i]
}
}
- df.spans = spans
+ df.prefixes = valuePrefixes
}
-
- df.curSpanIndex = -1
+ df.curPrefixIndex = -1
if df.reverse {
df.order = []dsq.Order{dsq.OrderByKeyDescending{}}
@@ -312,13 +315,13 @@ func (df *DocumentFetcher) start(ctx context.Context, spans []core.Span, withDel
df.order = []dsq.Order{dsq.OrderByKey{}}
}
- _, err := df.startNextSpan(ctx)
+ _, err := df.startNextPrefix(ctx)
return err
}
-func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) {
- nextSpanIndex := df.curSpanIndex + 1
- if nextSpanIndex >= len(df.spans) {
+func (df *DocumentFetcher) startNextPrefix(ctx context.Context) (bool, error) {
+ nextPrefixIndex := df.curPrefixIndex + 1
+ if nextPrefixIndex >= len(df.prefixes) {
return false, nil
}
@@ -339,12 +342,12 @@ func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) {
}
}
- span := df.spans[nextSpanIndex]
- df.kvResultsIter, err = df.kvIter.IteratePrefix(ctx, span.Start.ToDS(), span.End.ToDS())
+ prefix := df.prefixes[nextPrefixIndex]
+ df.kvResultsIter, err = df.kvIter.IteratePrefix(ctx, prefix.ToDS(), prefix.PrefixEnd().ToDS())
if err != nil {
return false, err
}
- df.curSpanIndex = nextSpanIndex
+ df.curPrefixIndex = nextPrefixIndex
_, _, err = df.nextKey(ctx, false)
return err == nil, err
@@ -353,7 +356,7 @@ func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) {
// nextKey gets the next kv. It sets both kv and kvEnd internally.
// It returns true if the current doc is completed.
// The first call to nextKey CANNOT have seekNext be true (ErrFailedToSeek)
-func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone bool, docDone bool, err error) {
+func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (prefixDone bool, docDone bool, err error) {
// safety against seekNext on first call
if seekNext && df.kv == nil {
return false, false, ErrFailedToSeek
@@ -363,13 +366,13 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone
curKey := df.kv.Key
curKey.FieldID = "" // clear field so prefixEnd applies to docID
seekKey := curKey.PrefixEnd().ToString()
- spanDone, df.kv, err = df.seekKV(seekKey)
+ prefixDone, df.kv, err = df.seekKV(seekKey)
// handle any internal errors
if err != nil {
return false, false, err
}
} else {
- spanDone, df.kv, err = df.nextKV()
+ prefixDone, df.kv, err = df.nextKV()
// handle any internal errors
if err != nil {
return false, false, err
@@ -379,21 +382,21 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone
if df.kv != nil && (df.kv.Key.InstanceType != keys.ValueKey && df.kv.Key.InstanceType != keys.DeletedKey) {
// We can only ready value values, if we escape the collection's value keys
// then we must be done and can stop reading
- spanDone = true
+ prefixDone = true
}
- df.kvEnd = spanDone
+ df.kvEnd = prefixDone
if df.kvEnd {
err = df.kvResultsIter.Close()
if err != nil {
return false, false, err
}
- moreSpans, err := df.startNextSpan(ctx)
+ morePrefixes, err := df.startNextPrefix(ctx)
if err != nil {
return false, false, err
}
df.isReadingDocument = false
- return !moreSpans, true, nil
+ return !morePrefixes, true, nil
}
// check if we've crossed document boundries
@@ -406,7 +409,7 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone
// nextKV is a lower-level utility compared to nextKey. The differences are as follows:
// - It directly interacts with the KVIterator.
-// - Returns true if the entire iterator/span is exhausted
+// - Returns true if the entire iterator/prefix is exhausted
// - Returns a kv pair instead of internally updating
func (df *DocumentFetcher) nextKV() (iterDone bool, kv *keyValue, err error) {
done, dsKey, res, err := df.nextKVRaw()
@@ -458,7 +461,7 @@ func (df *DocumentFetcher) seekKV(key string) (bool, *keyValue, error) {
// nextKV is a lower-level utility compared to nextKey. The differences are as follows:
// - It directly interacts with the KVIterator.
-// - Returns true if the entire iterator/span is exhausted
+// - Returns true if the entire iterator/prefix is exhausted
// - Returns a kv pair instead of internally updating
func (df *DocumentFetcher) nextKVRaw() (bool, keys.DataStoreKey, dsq.Result, error) {
res, available := df.kvResultsIter.NextSync()
@@ -658,7 +661,7 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec
// if we don't pass the filter (ran and pass) or if we don't have access to document then
// there is no point in collecting other select fields, so we seek to the next doc.
- spansDone, docDone, err := df.nextKey(ctx, !df.passedPermissionCheck || !df.passedFilter && df.ranFilter)
+ prefixsDone, docDone, err := df.nextKey(ctx, !df.passedPermissionCheck || !df.passedFilter && df.ranFilter)
if err != nil {
return nil, ExecInfo{}, err
@@ -693,7 +696,7 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec
}
}
- if spansDone {
+ if prefixsDone {
return nil, df.execInfo, nil
}
}
diff --git a/internal/db/fetcher/indexer.go b/internal/db/fetcher/indexer.go
index 3f7b82b6e0..71881a31ab 100644
--- a/internal/db/fetcher/indexer.go
+++ b/internal/db/fetcher/indexer.go
@@ -21,6 +21,7 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -124,9 +125,9 @@ outer:
return err
}
-func (f *IndexFetcher) Start(ctx context.Context, spans ...core.Span) error {
+func (f *IndexFetcher) Start(ctx context.Context, prefixes ...keys.Walkable) error {
if f.indexIter == nil {
- return f.docFetcher.Start(ctx, spans...)
+ return f.docFetcher.Start(ctx, prefixes...)
}
return f.indexIter.Init(ctx, f.txn.Datastore())
}
@@ -192,8 +193,7 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo
if len(f.docFields) > 0 {
targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(f.col.Description(), string(f.doc.id))
- span := core.NewSpan(targetKey, targetKey.PrefixEnd())
- err := f.docFetcher.Start(ctx, span)
+ err := f.docFetcher.Start(ctx, targetKey)
if err != nil {
return nil, ExecInfo{}, err
}
diff --git a/internal/db/fetcher/mocks/fetcher.go b/internal/db/fetcher/mocks/fetcher.go
index 396bf67345..a75d1c8ba9 100644
--- a/internal/db/fetcher/mocks/fetcher.go
+++ b/internal/db/fetcher/mocks/fetcher.go
@@ -18,6 +18,8 @@ import (
immutable "github.com/sourcenetwork/immutable"
+ keys "github.com/sourcenetwork/defradb/internal/keys"
+
mapper "github.com/sourcenetwork/defradb/internal/planner/mapper"
mock "github.com/stretchr/testify/mock"
@@ -201,11 +203,11 @@ func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Op
return _c
}
-// Start provides a mock function with given fields: ctx, spans
-func (_m *Fetcher) Start(ctx context.Context, spans ...core.Span) error {
- _va := make([]interface{}, len(spans))
- for _i := range spans {
- _va[_i] = spans[_i]
+// Start provides a mock function with given fields: ctx, prefixes
+func (_m *Fetcher) Start(ctx context.Context, prefixes ...keys.Walkable) error {
+ _va := make([]interface{}, len(prefixes))
+ for _i := range prefixes {
+ _va[_i] = prefixes[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx)
@@ -217,8 +219,8 @@ func (_m *Fetcher) Start(ctx context.Context, spans ...core.Span) error {
}
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, ...core.Span) error); ok {
- r0 = rf(ctx, spans...)
+ if rf, ok := ret.Get(0).(func(context.Context, ...keys.Walkable) error); ok {
+ r0 = rf(ctx, prefixes...)
} else {
r0 = ret.Error(0)
}
@@ -233,18 +235,18 @@ type Fetcher_Start_Call struct {
// Start is a helper method to define mock.On call
// - ctx context.Context
-// - spans ...core.Span
-func (_e *Fetcher_Expecter) Start(ctx interface{}, spans ...interface{}) *Fetcher_Start_Call {
+// - prefixes ...keys.Walkable
+func (_e *Fetcher_Expecter) Start(ctx interface{}, prefixes ...interface{}) *Fetcher_Start_Call {
return &Fetcher_Start_Call{Call: _e.mock.On("Start",
- append([]interface{}{ctx}, spans...)...)}
+ append([]interface{}{ctx}, prefixes...)...)}
}
-func (_c *Fetcher_Start_Call) Run(run func(ctx context.Context, spans ...core.Span)) *Fetcher_Start_Call {
+func (_c *Fetcher_Start_Call) Run(run func(ctx context.Context, prefixes ...keys.Walkable)) *Fetcher_Start_Call {
_c.Call.Run(func(args mock.Arguments) {
- variadicArgs := make([]core.Span, len(args)-1)
+ variadicArgs := make([]keys.Walkable, len(args)-1)
for i, a := range args[1:] {
if a != nil {
- variadicArgs[i] = a.(core.Span)
+ variadicArgs[i] = a.(keys.Walkable)
}
}
run(args[0].(context.Context), variadicArgs...)
@@ -257,7 +259,7 @@ func (_c *Fetcher_Start_Call) Return(_a0 error) *Fetcher_Start_Call {
return _c
}
-func (_c *Fetcher_Start_Call) RunAndReturn(run func(context.Context, ...core.Span) error) *Fetcher_Start_Call {
+func (_c *Fetcher_Start_Call) RunAndReturn(run func(context.Context, ...keys.Walkable) error) *Fetcher_Start_Call {
_c.Call.Return(run)
return _c
}
diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go
index 01afe7e2e2..c362a9c9b3 100644
--- a/internal/db/fetcher/versioned.go
+++ b/internal/db/fetcher/versioned.go
@@ -147,10 +147,10 @@ func (vf *VersionedFetcher) Init(
}
// Start serializes the correct state according to the Key and CID.
-func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error {
+func (vf *VersionedFetcher) Start(ctx context.Context, prefixes ...keys.Walkable) error {
// VersionedFetcher only ever recieves a headstore key
//nolint:forcetypeassert
- prefix := spans[0].Start.(keys.HeadstoreDocKey)
+ prefix := prefixes[0].(keys.HeadstoreDocKey)
vf.ctx = ctx
@@ -161,16 +161,16 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error
return vf.DocumentFetcher.Start(ctx)
}
-// Start a fetcher with the needed info (cid embedded in a span)
+// Start a fetcher with the needed info (cid embedded in a prefix)
/*
1. Init with DocID (VersionedFetched is scoped to a single doc)
2. - Create transient stores (head, data, block)
-3. Start with a given Txn and CID span set (length 1 for now)
+3. Start with a given Txn and CID prefix set (length 1 for now)
4. call traverse with the target cid
5.
-err := VersionFetcher.Start(txn, spans) {
+err := VersionFetcher.Start(txn, prefixes) {
vf.traverse(cid)
}
*/
diff --git a/internal/lens/fetcher.go b/internal/lens/fetcher.go
index a441c357bd..1729aecdb4 100644
--- a/internal/lens/fetcher.go
+++ b/internal/lens/fetcher.go
@@ -127,8 +127,8 @@ historyLoop:
)
}
-func (f *lensedFetcher) Start(ctx context.Context, spans ...core.Span) error {
- return f.source.Start(ctx, spans...)
+func (f *lensedFetcher) Start(ctx context.Context, prefixes ...keys.Walkable) error {
+ return f.source.Start(ctx, prefixes...)
}
func (f *lensedFetcher) FetchNext(ctx context.Context) (fetcher.EncodedDocument, fetcher.ExecInfo, error) {
diff --git a/internal/planner/arbitrary_join.go b/internal/planner/arbitrary_join.go
index e668287028..2d510f1bdc 100644
--- a/internal/planner/arbitrary_join.go
+++ b/internal/planner/arbitrary_join.go
@@ -15,6 +15,7 @@ import (
"strings"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -79,13 +80,13 @@ func (n *dataSource) Start() error {
return nil
}
-func (n *dataSource) Spans(spans []core.Span) {
+func (n *dataSource) Prefixes(prefixes []keys.Walkable) {
if n.parentSource != nil {
- n.parentSource.Spans(spans)
+ n.parentSource.Prefixes(prefixes)
}
if n.childSource != nil {
- n.childSource.Spans(spans)
+ n.childSource.Prefixes(prefixes)
}
}
diff --git a/internal/planner/average.go b/internal/planner/average.go
index 9fe8803bee..c5274b5b6f 100644
--- a/internal/planner/average.go
+++ b/internal/planner/average.go
@@ -13,7 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -64,11 +64,11 @@ func (n *averageNode) Init() error {
return n.plan.Init()
}
-func (n *averageNode) Kind() string { return "averageNode" }
-func (n *averageNode) Start() error { return n.plan.Start() }
-func (n *averageNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
-func (n *averageNode) Close() error { return n.plan.Close() }
-func (n *averageNode) Source() planNode { return n.plan }
+func (n *averageNode) Kind() string { return "averageNode" }
+func (n *averageNode) Start() error { return n.plan.Start() }
+func (n *averageNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
+func (n *averageNode) Close() error { return n.plan.Close() }
+func (n *averageNode) Source() planNode { return n.plan }
func (n *averageNode) Next() (bool, error) {
n.execInfo.iterations++
diff --git a/internal/planner/commit.go b/internal/planner/commit.go
index c73944b250..348a4b4e6a 100644
--- a/internal/planner/commit.go
+++ b/internal/planner/commit.go
@@ -91,13 +91,13 @@ func (n *dagScanNode) Start() error {
return nil
}
-// Spans needs to parse the given span set. dagScanNode only
-// cares about the first value in the span set. The value is
+// Prefixes needs to parse the given prefix set. dagScanNode only
+// cares about the first value in the prefix set. The value is
// either a CID or a DocID.
// If its a CID, set the node CID val
// if its a DocID, set the node Key val (headset)
-func (n *dagScanNode) Spans(spans []core.Span) {
- if len(spans) == 0 {
+func (n *dagScanNode) Prefixes(prefixes []keys.Walkable) {
+ if len(prefixes) == 0 {
return
}
@@ -108,9 +108,9 @@ func (n *dagScanNode) Spans(spans []core.Span) {
fieldID = core.COMPOSITE_NAMESPACE
}
- for _, span := range spans {
+ for _, prefix := range prefixes {
var start keys.HeadstoreDocKey
- switch s := span.Start.(type) {
+ switch s := prefix.(type) {
case keys.DataStoreKey:
start = s.ToHeadStoreKey()
case keys.HeadstoreDocKey:
@@ -145,20 +145,14 @@ func (n *dagScanNode) simpleExplain() (map[string]any, error) {
simpleExplainMap["cid"] = nil
}
- // Build the explanation of the spans attribute.
- spansExplainer := []map[string]any{}
+ // Build the explanation of the prefixes attribute.
+ prefixesExplainer := []string{}
// Note: n.headset is `nil` for single commit selection query, so must check for it.
if n.prefix.HasValue() {
- spansExplainer = append(
- spansExplainer,
- map[string]any{
- "start": n.prefix.Value().ToString(),
- "end": n.prefix.Value().PrefixEnd().ToString(),
- },
- )
+ prefixesExplainer = append(prefixesExplainer, keys.PrettyPrint(n.prefix.Value()))
}
- // Add the built spans attribute, if it was valid.
- simpleExplainMap[spansLabel] = spansExplainer
+ // Add the built prefixes attribute, if it was valid.
+ simpleExplainMap[prefixesLabel] = prefixesExplainer
return simpleExplainMap, nil
}
diff --git a/internal/planner/count.go b/internal/planner/count.go
index efc2a20c36..b71fcab1e5 100644
--- a/internal/planner/count.go
+++ b/internal/planner/count.go
@@ -22,6 +22,7 @@ import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -62,7 +63,7 @@ func (n *countNode) Init() error {
func (n *countNode) Start() error { return n.plan.Start() }
-func (n *countNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *countNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
func (n *countNode) Close() error { return n.plan.Close() }
diff --git a/internal/planner/create.go b/internal/planner/create.go
index 1b03857a13..58bd079e19 100644
--- a/internal/planner/create.go
+++ b/internal/planner/create.go
@@ -13,9 +13,9 @@ package planner
import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
- "github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/encryption"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -56,13 +56,12 @@ func (n *createNode) Kind() string { return "createNode" }
func (n *createNode) Init() error { return nil }
-func docIDsToSpans(ids []string, desc client.CollectionDescription) []core.Span {
- spans := make([]core.Span, len(ids))
+func docIDsToPrefixes(ids []string, desc client.CollectionDescription) []keys.Walkable {
+ prefixes := make([]keys.Walkable, len(ids))
for i, id := range ids {
- docID := base.MakeDataStoreKeyWithCollectionAndDocID(desc, id)
- spans[i] = core.NewSpan(docID, docID.PrefixEnd())
+ prefixes[i] = base.MakeDataStoreKeyWithCollectionAndDocID(desc, id)
}
- return spans
+ return prefixes
}
func documentsToDocIDs(docs ...*client.Document) []string {
@@ -96,7 +95,7 @@ func (n *createNode) Next() (bool, error) {
return false, err
}
- n.results.Spans(docIDsToSpans(documentsToDocIDs(n.docs...), n.collection.Description()))
+ n.results.Prefixes(docIDsToPrefixes(documentsToDocIDs(n.docs...), n.collection.Description()))
err = n.results.Init()
if err != nil {
@@ -115,7 +114,7 @@ func (n *createNode) Next() (bool, error) {
return next, err
}
-func (n *createNode) Spans(spans []core.Span) { /* no-op */ }
+func (n *createNode) Prefixes(prefixes []keys.Walkable) { /* no-op */ }
func (n *createNode) Close() error {
return n.results.Close()
diff --git a/internal/planner/delete.go b/internal/planner/delete.go
index 9142a76868..fa35b16ed7 100644
--- a/internal/planner/delete.go
+++ b/internal/planner/delete.go
@@ -13,7 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -67,8 +67,8 @@ func (n *deleteNode) Next() (bool, error) {
return true, nil
}
-func (n *deleteNode) Spans(spans []core.Span) {
- n.source.Spans(spans)
+func (n *deleteNode) Prefixes(prefixes []keys.Walkable) {
+ n.source.Prefixes(prefixes)
}
func (n *deleteNode) Kind() string {
diff --git a/internal/planner/explain.go b/internal/planner/explain.go
index 860ea39df1..3a6be07f94 100644
--- a/internal/planner/explain.go
+++ b/internal/planner/explain.go
@@ -66,7 +66,7 @@ const (
limitLabel = "limit"
offsetLabel = "offset"
sourcesLabel = "sources"
- spansLabel = "spans"
+ prefixesLabel = "prefixes"
)
// buildDebugExplainGraph dumps the entire plan graph as is, with all the plan nodes.
diff --git a/internal/planner/group.go b/internal/planner/group.go
index 2491740e81..900f0ff412 100644
--- a/internal/planner/group.go
+++ b/internal/planner/group.go
@@ -14,6 +14,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -127,9 +128,9 @@ func (n *groupNode) Start() error {
return nil
}
-func (n *groupNode) Spans(spans []core.Span) {
+func (n *groupNode) Prefixes(prefixes []keys.Walkable) {
for _, dataSource := range n.dataSources {
- dataSource.Spans(spans)
+ dataSource.Prefixes(prefixes)
}
}
diff --git a/internal/planner/lens.go b/internal/planner/lens.go
index 618642b5df..25f2254237 100644
--- a/internal/planner/lens.go
+++ b/internal/planner/lens.go
@@ -16,6 +16,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// viewNode applies a lens transform to data yielded from the source node.
@@ -61,8 +62,8 @@ func (n *lensNode) Start() error {
return n.source.Start()
}
-func (n *lensNode) Spans(spans []core.Span) {
- n.source.Spans(spans)
+func (n *lensNode) Prefixes(prefixes []keys.Walkable) {
+ n.source.Prefixes(prefixes)
}
func (n *lensNode) Next() (bool, error) {
diff --git a/internal/planner/limit.go b/internal/planner/limit.go
index 5281a7e215..3ccdfbeac7 100644
--- a/internal/planner/limit.go
+++ b/internal/planner/limit.go
@@ -13,6 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -59,10 +60,10 @@ func (n *limitNode) Init() error {
return n.plan.Init()
}
-func (n *limitNode) Start() error { return n.plan.Start() }
-func (n *limitNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
-func (n *limitNode) Close() error { return n.plan.Close() }
-func (n *limitNode) Value() core.Doc { return n.plan.Value() }
+func (n *limitNode) Start() error { return n.plan.Start() }
+func (n *limitNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
+func (n *limitNode) Close() error { return n.plan.Close() }
+func (n *limitNode) Value() core.Doc { return n.plan.Value() }
func (n *limitNode) Next() (bool, error) {
n.execInfo.iterations++
diff --git a/internal/planner/max.go b/internal/planner/max.go
index e4db8fa526..c3eb6b488e 100644
--- a/internal/planner/max.go
+++ b/internal/planner/max.go
@@ -17,6 +17,7 @@ import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -54,13 +55,13 @@ func (p *Planner) Max(
}, nil
}
-func (n *maxNode) Kind() string { return "maxNode" }
-func (n *maxNode) Init() error { return n.plan.Init() }
-func (n *maxNode) Start() error { return n.plan.Start() }
-func (n *maxNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
-func (n *maxNode) Close() error { return n.plan.Close() }
-func (n *maxNode) Source() planNode { return n.plan }
-func (n *maxNode) SetPlan(p planNode) { n.plan = p }
+func (n *maxNode) Kind() string { return "maxNode" }
+func (n *maxNode) Init() error { return n.plan.Init() }
+func (n *maxNode) Start() error { return n.plan.Start() }
+func (n *maxNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
+func (n *maxNode) Close() error { return n.plan.Close() }
+func (n *maxNode) Source() planNode { return n.plan }
+func (n *maxNode) SetPlan(p planNode) { n.plan = p }
func (n *maxNode) simpleExplain() (map[string]any, error) {
sourceExplanations := make([]map[string]any, len(n.aggregateMapping))
diff --git a/internal/planner/min.go b/internal/planner/min.go
index 163ca2894d..99278785bc 100644
--- a/internal/planner/min.go
+++ b/internal/planner/min.go
@@ -17,6 +17,7 @@ import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -54,13 +55,13 @@ func (p *Planner) Min(
}, nil
}
-func (n *minNode) Kind() string { return "minNode" }
-func (n *minNode) Init() error { return n.plan.Init() }
-func (n *minNode) Start() error { return n.plan.Start() }
-func (n *minNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
-func (n *minNode) Close() error { return n.plan.Close() }
-func (n *minNode) Source() planNode { return n.plan }
-func (n *minNode) SetPlan(p planNode) { n.plan = p }
+func (n *minNode) Kind() string { return "minNode" }
+func (n *minNode) Init() error { return n.plan.Init() }
+func (n *minNode) Start() error { return n.plan.Start() }
+func (n *minNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
+func (n *minNode) Close() error { return n.plan.Close() }
+func (n *minNode) Source() planNode { return n.plan }
+func (n *minNode) SetPlan(p planNode) { n.plan = p }
func (n *minNode) simpleExplain() (map[string]any, error) {
sourceExplanations := make([]map[string]any, len(n.aggregateMapping))
diff --git a/internal/planner/multi.go b/internal/planner/multi.go
index 579f169344..37d7aa435d 100644
--- a/internal/planner/multi.go
+++ b/internal/planner/multi.go
@@ -91,9 +91,9 @@ func (p *parallelNode) Start() error {
})
}
-func (p *parallelNode) Spans(spans []core.Span) {
+func (p *parallelNode) Prefixes(prefixes []keys.Walkable) {
_ = p.applyToPlans(func(n planNode) error {
- n.Spans(spans)
+ n.Prefixes(prefixes)
return nil
})
}
@@ -156,9 +156,9 @@ func (p *parallelNode) nextAppend(index int, plan planNode) (bool, error) {
return false, nil
}
- // pass the doc key as a reference through the spans interface
- spans := []core.Span{core.NewSpan(keys.DataStoreKey{DocID: key}, keys.DataStoreKey{})}
- plan.Spans(spans)
+ // pass the doc key as a reference through the prefixes interface
+ prefixes := []keys.Walkable{keys.DataStoreKey{DocID: key}}
+ plan.Prefixes(prefixes)
err := plan.Init()
if err != nil {
return false, err
diff --git a/internal/planner/operation.go b/internal/planner/operation.go
index 6f351f92a1..faa2f0618b 100644
--- a/internal/planner/operation.go
+++ b/internal/planner/operation.go
@@ -13,6 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -28,9 +29,9 @@ type operationNode struct {
isDone bool
}
-func (n *operationNode) Spans(spans []core.Span) {
+func (n *operationNode) Prefixes(prefixes []keys.Walkable) {
for _, child := range n.children {
- child.Spans(spans)
+ child.Prefixes(prefixes)
}
}
diff --git a/internal/planner/order.go b/internal/planner/order.go
index 0a69ba5453..04c8fd52ea 100644
--- a/internal/planner/order.go
+++ b/internal/planner/order.go
@@ -14,6 +14,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -98,7 +99,7 @@ func (n *orderNode) Init() error {
}
func (n *orderNode) Start() error { return n.plan.Start() }
-func (n *orderNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *orderNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
func (n *orderNode) Value() core.Doc {
return n.valueIter.Value()
diff --git a/internal/planner/pipe.go b/internal/planner/pipe.go
index b9331fce45..22026abc69 100644
--- a/internal/planner/pipe.go
+++ b/internal/planner/pipe.go
@@ -13,6 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/container"
+ "github.com/sourcenetwork/defradb/internal/keys"
)
// A lazily loaded cache-node that allows retrieval of cached documents at arbitrary indexes.
@@ -51,10 +52,10 @@ func (n *pipeNode) Init() error {
return n.source.Init()
}
-func (n *pipeNode) Start() error { return n.source.Start() }
-func (n *pipeNode) Spans(spans []core.Span) { n.source.Spans(spans) }
-func (n *pipeNode) Close() error { return n.source.Close() }
-func (n *pipeNode) Source() planNode { return n.source }
+func (n *pipeNode) Start() error { return n.source.Start() }
+func (n *pipeNode) Prefixes(prefixes []keys.Walkable) { n.source.Prefixes(prefixes) }
+func (n *pipeNode) Close() error { return n.source.Close() }
+func (n *pipeNode) Source() planNode { return n.source }
func (n *pipeNode) Next() (bool, error) {
// we need to load all docs up until the requested point - this allows us to
diff --git a/internal/planner/planner.go b/internal/planner/planner.go
index 77dac1c7a2..8390c6d5a5 100644
--- a/internal/planner/planner.go
+++ b/internal/planner/planner.go
@@ -22,6 +22,7 @@ import (
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/internal/connor"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/filter"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -34,9 +35,9 @@ type planNode interface {
// Starts any internal logic or processes required by the planNode. Should be called *after* Init().
Start() error
- // Spans sets the planNodes target spans. This is primarily only used for a scanNode,
+ // Prefixes sets the planNodes target prefixes. This is primarily only used for a scanNode,
// but based on the tree structure, may need to be propagated Eg. From a selectNode -> scanNode.
- Spans([]core.Span)
+ Prefixes([]keys.Walkable)
// Next processes the next result doc from the request. Can only be called *after* Start().
// Can't be called again if any previous call returns false.
diff --git a/internal/planner/scan.go b/internal/planner/scan.go
index c00cda401c..eed370d0ae 100644
--- a/internal/planner/scan.go
+++ b/internal/planner/scan.go
@@ -46,8 +46,8 @@ type scanNode struct {
showDeleted bool
- spans []core.Span
- reverse bool
+ prefixes []keys.Walkable
+ reverse bool
filter *mapper.Filter
slct *mapper.Select
@@ -202,12 +202,12 @@ func (n *scanNode) Start() error {
}
func (n *scanNode) initScan() error {
- if len(n.spans) == 0 {
- start := base.MakeDataStoreKeyWithCollectionDescription(n.col.Description())
- n.spans = []core.Span{core.NewSpan(start, start.PrefixEnd())}
+ if len(n.prefixes) == 0 {
+ prefix := base.MakeDataStoreKeyWithCollectionDescription(n.col.Description())
+ n.prefixes = []keys.Walkable{prefix}
}
- err := n.fetcher.Start(n.p.ctx, n.spans...)
+ err := n.fetcher.Start(n.p.ctx, n.prefixes...)
if err != nil {
return err
}
@@ -221,7 +221,7 @@ func (n *scanNode) initScan() error {
func (n *scanNode) Next() (bool, error) {
n.execInfo.iterations++
- if len(n.spans) == 0 {
+ if len(n.prefixes) == 0 {
return false, nil
}
@@ -249,8 +249,8 @@ func (n *scanNode) Next() (bool, error) {
return true, nil
}
-func (n *scanNode) Spans(spans []core.Span) {
- n.spans = spans
+func (n *scanNode) Prefixes(prefixes []keys.Walkable) {
+ n.prefixes = prefixes
}
func (n *scanNode) Close() error {
@@ -259,19 +259,13 @@ func (n *scanNode) Close() error {
func (n *scanNode) Source() planNode { return nil }
-// explainSpans explains the spans attribute.
-func (n *scanNode) explainSpans() []map[string]any {
- spansExplainer := []map[string]any{}
- for _, span := range n.spans {
- spanExplainer := map[string]any{
- "start": keys.PrettyPrint(span.Start),
- "end": keys.PrettyPrint(span.End),
- }
-
- spansExplainer = append(spansExplainer, spanExplainer)
+// explainPrefixes explains the prefixes attribute.
+func (n *scanNode) explainPrefixes() []string {
+ prefixes := make([]string, len(n.prefixes))
+ for i, prefix := range n.prefixes {
+ prefixes[i] = keys.PrettyPrint(prefix)
}
-
- return spansExplainer
+ return prefixes
}
func (n *scanNode) simpleExplain() (map[string]any, error) {
@@ -288,8 +282,8 @@ func (n *scanNode) simpleExplain() (map[string]any, error) {
simpleExplainMap[collectionNameLabel] = n.col.Name().Value()
simpleExplainMap[collectionIDLabel] = n.col.Description().IDString()
- // Add the spans attribute.
- simpleExplainMap[spansLabel] = n.explainSpans()
+ // Add the prefixes attribute.
+ simpleExplainMap[prefixesLabel] = n.explainPrefixes()
return simpleExplainMap, nil
}
@@ -418,8 +412,8 @@ func (n *multiScanNode) Value() core.Doc {
return n.scanNode.documentIterator.Value()
}
-func (n *multiScanNode) Spans(spans []core.Span) {
- n.scanNode.Spans(spans)
+func (n *multiScanNode) Prefixes(prefixes []keys.Walkable) {
+ n.scanNode.Prefixes(prefixes)
}
func (n *multiScanNode) Source() planNode {
diff --git a/internal/planner/select.go b/internal/planner/select.go
index 6078d67650..d0e816cfb9 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -69,7 +69,7 @@ func (n *selectTopNode) Start() error { return n.planNode.Start() }
func (n *selectTopNode) Next() (bool, error) { return n.planNode.Next() }
-func (n *selectTopNode) Spans(spans []core.Span) { n.planNode.Spans(spans) }
+func (n *selectTopNode) Prefixes(prefixes []keys.Walkable) { n.planNode.Prefixes(prefixes) }
func (n *selectTopNode) Value() core.Doc { return n.planNode.Value() }
@@ -181,8 +181,8 @@ func (n *selectNode) Next() (bool, error) {
}
}
-func (n *selectNode) Spans(spans []core.Span) {
- n.source.Spans(spans)
+func (n *selectNode) Prefixes(prefixes []keys.Walkable) {
+ n.source.Prefixes(prefixes)
}
func (n *selectNode) Close() error {
@@ -263,33 +263,29 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
return nil, err
}
- // This exists because the fetcher interface demands a []Span, yet the versioned
- // fetcher type (that will be the only one consuming this []Span) does not use it
- // as either a span, or even a prefix. And with this design limitation this is
+ // This exists because the fetcher interface demands a []Prefixes, yet the versioned
+ // fetcher type (that will be the only one consuming this []Prefixes) does not use it
+ // as a prefix. And with this design limitation this is
// currently the least bad way of passing the cid in to the fetcher.
- origScan.Spans(
- []core.Span{
- core.NewSpan(
- keys.HeadstoreDocKey{
- Cid: c,
- },
- keys.HeadstoreDocKey{},
- ),
+ origScan.Prefixes(
+ []keys.Walkable{
+ keys.HeadstoreDocKey{
+ Cid: c,
+ },
},
)
} else if n.selectReq.DocIDs.HasValue() {
// If we *just* have a DocID(s), run a FindByDocID(s) optimization
- // if we have a FindByDocID filter, create a span for it
+ // if we have a FindByDocID filter, create a prefix for it
// and propagate it to the scanNode
// @todo: When running the optimizer, check if the filter object
// contains a _docID equality condition, and upgrade it to a point lookup
// instead of a prefix scan + filter via the Primary Index (0), like here:
- spans := make([]core.Span, len(n.selectReq.DocIDs.Value()))
+ prefixes := make([]keys.Walkable, len(n.selectReq.DocIDs.Value()))
for i, docID := range n.selectReq.DocIDs.Value() {
- docIDIndexKey := base.MakeDataStoreKeyWithCollectionAndDocID(sourcePlan.collection.Description(), docID)
- spans[i] = core.NewSpan(docIDIndexKey, docIDIndexKey.PrefixEnd())
+ prefixes[i] = base.MakeDataStoreKeyWithCollectionAndDocID(sourcePlan.collection.Description(), docID)
}
- origScan.Spans(spans)
+ origScan.Prefixes(prefixes)
}
}
diff --git a/internal/planner/sum.go b/internal/planner/sum.go
index da6010704d..c790cba60d 100644
--- a/internal/planner/sum.go
+++ b/internal/planner/sum.go
@@ -16,6 +16,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -149,7 +150,7 @@ func (n *sumNode) Init() error {
func (n *sumNode) Start() error { return n.plan.Start() }
-func (n *sumNode) Spans(spans []core.Span) { n.plan.Spans(spans) }
+func (n *sumNode) Prefixes(prefixes []keys.Walkable) { n.plan.Prefixes(prefixes) }
func (n *sumNode) Close() error { return n.plan.Close() }
diff --git a/internal/planner/top.go b/internal/planner/top.go
index 518a96af50..6224b6d62d 100644
--- a/internal/planner/top.go
+++ b/internal/planner/top.go
@@ -13,6 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -35,7 +36,7 @@ type topLevelNode struct {
isInRecurse bool
}
-func (n *topLevelNode) Spans(spans []core.Span) {
+func (n *topLevelNode) Prefixes(prefixes []keys.Walkable) {
if n.isInRecurse {
return
}
@@ -45,7 +46,7 @@ func (n *topLevelNode) Spans(spans []core.Span) {
}()
for _, child := range n.children {
- child.Spans(spans)
+ child.Prefixes(prefixes)
}
}
diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go
index a9063d07d6..5e7b83d237 100644
--- a/internal/planner/type_join.go
+++ b/internal/planner/type_join.go
@@ -18,6 +18,7 @@ import (
"github.com/sourcenetwork/defradb/internal/connor"
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/filter"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -114,8 +115,8 @@ func (n *typeIndexJoin) Start() error {
return n.joinPlan.Start()
}
-func (n *typeIndexJoin) Spans(spans []core.Span) {
- n.joinPlan.Spans(spans)
+func (n *typeIndexJoin) Prefixes(prefixes []keys.Walkable) {
+ n.joinPlan.Prefixes(prefixes)
}
func (n *typeIndexJoin) Next() (bool, error) {
@@ -444,9 +445,9 @@ func fetchDocWithID(node planNode, docID string) (bool, error) {
}
dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(scan.col.Description(), docID)
- spans := []core.Span{core.NewSpan(dsKey, dsKey.PrefixEnd())}
+ prefixes := []keys.Walkable{dsKey}
- node.Spans(spans)
+ node.Prefixes(prefixes)
if err := node.Init(); err != nil {
return false, NewErrSubTypeInit(err)
@@ -502,8 +503,8 @@ func (join *invertibleTypeJoin) Close() error {
return join.childSide.plan.Close()
}
-func (join *invertibleTypeJoin) Spans(spans []core.Span) {
- join.parentSide.plan.Spans(spans)
+func (join *invertibleTypeJoin) Prefixes(prefixes []keys.Walkable) {
+ join.parentSide.plan.Prefixes(prefixes)
}
func (join *invertibleTypeJoin) Source() planNode { return join.parentSide.plan }
diff --git a/internal/planner/update.go b/internal/planner/update.go
index 4340625bf8..e313e17e18 100644
--- a/internal/planner/update.go
+++ b/internal/planner/update.go
@@ -13,7 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -107,7 +107,7 @@ func (n *updateNode) Next() (bool, error) {
func (n *updateNode) Kind() string { return "updateNode" }
-func (n *updateNode) Spans(spans []core.Span) { n.results.Spans(spans) }
+func (n *updateNode) Prefixes(prefixes []keys.Walkable) { n.results.Prefixes(prefixes) }
func (n *updateNode) Init() error { return n.results.Init() }
diff --git a/internal/planner/upsert.go b/internal/planner/upsert.go
index 331d1e4171..09855a81f4 100644
--- a/internal/planner/upsert.go
+++ b/internal/planner/upsert.go
@@ -13,7 +13,7 @@ package planner
import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
- "github.com/sourcenetwork/defradb/internal/core"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -73,7 +73,7 @@ func (n *upsertNode) Next() (bool, error) {
if err != nil {
return false, err
}
- n.source.Spans(docIDsToSpans(documentsToDocIDs(doc), n.collection.Description()))
+ n.source.Prefixes(docIDsToPrefixes(documentsToDocIDs(doc), n.collection.Description()))
}
err = n.source.Init()
if err != nil {
@@ -96,8 +96,8 @@ func (n *upsertNode) Kind() string {
return "upsertNode"
}
-func (n *upsertNode) Spans(spans []core.Span) {
- n.source.Spans(spans)
+func (n *upsertNode) Prefixes(prefixes []keys.Walkable) {
+ n.source.Prefixes(prefixes)
}
func (n *upsertNode) Init() error {
diff --git a/internal/planner/values.go b/internal/planner/values.go
index 4028f52594..de630679d6 100644
--- a/internal/planner/values.go
+++ b/internal/planner/values.go
@@ -16,6 +16,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/db/container"
+ "github.com/sourcenetwork/defradb/internal/keys"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -46,9 +47,9 @@ func (p *Planner) newContainerValuesNode(ordering []mapper.OrderCondition) *valu
}
}
-func (n *valuesNode) Init() error { return nil }
-func (n *valuesNode) Start() error { return nil }
-func (n *valuesNode) Spans(spans []core.Span) {}
+func (n *valuesNode) Init() error { return nil }
+func (n *valuesNode) Start() error { return nil }
+func (n *valuesNode) Prefixes(prefixes []keys.Walkable) {}
func (n *valuesNode) Kind() string {
return "valuesNode"
diff --git a/internal/planner/view.go b/internal/planner/view.go
index b834d74323..87b7faba11 100644
--- a/internal/planner/view.go
+++ b/internal/planner/view.go
@@ -74,8 +74,8 @@ func (n *viewNode) Start() error {
return n.source.Start()
}
-func (n *viewNode) Spans(spans []core.Span) {
- n.source.Spans(spans)
+func (n *viewNode) Prefixes(prefixes []keys.Walkable) {
+ n.source.Prefixes(prefixes)
}
func (n *viewNode) Next() (bool, error) {
@@ -217,7 +217,7 @@ func (n *cachedViewFetcher) Start() error {
return nil
}
-func (n *cachedViewFetcher) Spans(spans []core.Span) {
+func (n *cachedViewFetcher) Prefixes(prefixes []keys.Walkable) {
// no-op
}
diff --git a/tests/integration/explain/default/basic_test.go b/tests/integration/explain/default/basic_test.go
index 69089cd15d..d74d5e38e2 100644
--- a/tests/integration/explain/default/basic_test.go
+++ b/tests/integration/explain/default/basic_test.go
@@ -71,11 +71,8 @@ func TestDefaultExplainRequestWithFullBasicGraph(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/dagscan_test.go b/tests/integration/explain/default/dagscan_test.go
index c19058c258..45b894af2d 100644
--- a/tests/integration/explain/default/dagscan_test.go
+++ b/tests/integration/explain/default/dagscan_test.go
@@ -58,11 +58,8 @@ func TestDefaultExplainCommitsDagScanQueryOp(t *testing.T) {
ExpectedAttributes: dataMap{
"cid": nil,
"fieldId": "1",
- "spans": []dataMap{
- {
- "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
- "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/2",
- },
+ "prefixes": []string{
+ "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
},
},
},
@@ -101,11 +98,8 @@ func TestDefaultExplainCommitsDagScanQueryOpWithoutField(t *testing.T) {
ExpectedAttributes: dataMap{
"cid": nil,
"fieldId": nil,
- "spans": []dataMap{
- {
- "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84",
- "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e85",
- },
+ "prefixes": []string{
+ "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84",
},
},
},
@@ -145,11 +139,8 @@ func TestDefaultExplainLatestCommitsDagScanQueryOp(t *testing.T) {
ExpectedAttributes: dataMap{
"cid": nil,
"fieldId": "1",
- "spans": []dataMap{
- {
- "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
- "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/2",
- },
+ "prefixes": []string{
+ "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/1",
},
},
},
@@ -189,11 +180,8 @@ func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) {
ExpectedAttributes: dataMap{
"cid": nil,
"fieldId": "C",
- "spans": []dataMap{
- {
- "start": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/C",
- "end": "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/D",
- },
+ "prefixes": []string{
+ "/d/bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84/C",
},
},
},
diff --git a/tests/integration/explain/default/delete_test.go b/tests/integration/explain/default/delete_test.go
index 39b3b732d5..16005cea30 100644
--- a/tests/integration/explain/default/delete_test.go
+++ b/tests/integration/explain/default/delete_test.go
@@ -76,11 +76,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilter(t *testing.T) {
"_eq": "Shahzad",
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -127,11 +124,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t *
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "end": "/4",
- "start": "/3",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -180,11 +174,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
@@ -237,15 +228,9 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- },
- {
- "end": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67g",
- "start": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
+ "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
},
},
},
@@ -292,11 +277,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "end": "/4",
- "start": "/3",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -380,15 +362,9 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T)
},
},
},
- "spans": []dataMap{
- {
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- },
- {
- "end": "/3/tesu",
- "start": "/3/test",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
+ "/3/test",
},
},
},
diff --git a/tests/integration/explain/default/group_with_doc_id_child_test.go b/tests/integration/explain/default/group_with_doc_id_child_test.go
index 38a8d27fe5..4e43f27465 100644
--- a/tests/integration/explain/default/group_with_doc_id_child_test.go
+++ b/tests/integration/explain/default/group_with_doc_id_child_test.go
@@ -65,11 +65,8 @@ func TestDefaultExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/group_with_doc_id_test.go b/tests/integration/explain/default/group_with_doc_id_test.go
index 8146a26566..9c1e4614e7 100644
--- a/tests/integration/explain/default/group_with_doc_id_test.go
+++ b/tests/integration/explain/default/group_with_doc_id_test.go
@@ -59,11 +59,8 @@ func TestDefaultExplainRequestWithDocIDOnParentGroupBy(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3/bae-6a4c5bc5-b044-5a03-a868-8260af6f2254",
- "end": "/3/bae-6a4c5bc5-b044-5a03-a868-8260af6f2255",
- },
+ "prefixes": []string{
+ "/3/bae-6a4c5bc5-b044-5a03-a868-8260af6f2254",
},
},
},
@@ -125,15 +122,9 @@ func TestDefaultExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) {
"_eq": int32(20),
},
},
- "spans": []dataMap{
- {
- "start": "/3/bae-6a4c5bc5-b044-5a03-a868-8260af6f2254",
- "end": "/3/bae-6a4c5bc5-b044-5a03-a868-8260af6f2255",
- },
- {
- "start": "/3/bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed",
- "end": "/3/bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeee",
- },
+ "prefixes": []string{
+ "/3/bae-6a4c5bc5-b044-5a03-a868-8260af6f2254",
+ "/3/bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed",
},
},
},
diff --git a/tests/integration/explain/default/group_with_filter_child_test.go b/tests/integration/explain/default/group_with_filter_child_test.go
index e6f4a42a0d..ff49db2655 100644
--- a/tests/integration/explain/default/group_with_filter_child_test.go
+++ b/tests/integration/explain/default/group_with_filter_child_test.go
@@ -67,11 +67,8 @@ func TestDefaultExplainRequestWithFilterOnInnerGroupSelection(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -140,11 +137,8 @@ func TestDefaultExplainRequestWithFilterOnParentGroupByAndInnerGroupSelection(t
},
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/group_with_filter_test.go b/tests/integration/explain/default/group_with_filter_test.go
index 23651934e3..bef0be6ccb 100644
--- a/tests/integration/explain/default/group_with_filter_test.go
+++ b/tests/integration/explain/default/group_with_filter_test.go
@@ -63,11 +63,8 @@ func TestDefaultExplainRequestWithFilterOnGroupByParent(t *testing.T) {
"_gt": int32(63),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/top_with_average_test.go b/tests/integration/explain/default/top_with_average_test.go
index 1921a142f7..e76730a49f 100644
--- a/tests/integration/explain/default/top_with_average_test.go
+++ b/tests/integration/explain/default/top_with_average_test.go
@@ -76,11 +76,8 @@ func TestDefaultExplainTopLevelAverageRequest(t *testing.T) {
"_ne": nil,
},
},
- "spans": []dataMap{
- {
- "end": "/4",
- "start": "/3",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -168,11 +165,8 @@ func TestDefaultExplainTopLevelAverageRequestWithFilter(t *testing.T) {
"_ne": nil,
},
},
- "spans": []dataMap{
- {
- "end": "/4",
- "start": "/3",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/top_with_count_test.go b/tests/integration/explain/default/top_with_count_test.go
index 6ac039f764..09ffbf409a 100644
--- a/tests/integration/explain/default/top_with_count_test.go
+++ b/tests/integration/explain/default/top_with_count_test.go
@@ -62,11 +62,8 @@ func TestDefaultExplainTopLevelCountRequest(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -126,11 +123,8 @@ func TestDefaultExplainTopLevelCountRequestWithFilter(t *testing.T) {
"_gt": int32(26),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/top_with_max_test.go b/tests/integration/explain/default/top_with_max_test.go
index 16d53b5007..0342941dcb 100644
--- a/tests/integration/explain/default/top_with_max_test.go
+++ b/tests/integration/explain/default/top_with_max_test.go
@@ -66,11 +66,8 @@ func TestDefaultExplain_WithTopLevelMaxRequest_Succeeds(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -132,11 +129,8 @@ func TestDefaultExplain_WithTopLevelMaxRequestWithFilter_Succeeds(t *testing.T)
"_gt": int32(26),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/top_with_min_test.go b/tests/integration/explain/default/top_with_min_test.go
index b212953a60..41a5394763 100644
--- a/tests/integration/explain/default/top_with_min_test.go
+++ b/tests/integration/explain/default/top_with_min_test.go
@@ -66,11 +66,8 @@ func TestDefaultExplain_WithTopLevelMinRequest_Succeeds(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -132,11 +129,8 @@ func TestDefaultExplain_WithTopLevelMinRequestWithFilter_Succeeds(t *testing.T)
"_gt": int32(26),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/top_with_sum_test.go b/tests/integration/explain/default/top_with_sum_test.go
index a2927beb25..0de7bd5466 100644
--- a/tests/integration/explain/default/top_with_sum_test.go
+++ b/tests/integration/explain/default/top_with_sum_test.go
@@ -66,11 +66,8 @@ func TestDefaultExplainTopLevelSumRequest(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -132,11 +129,8 @@ func TestDefaultExplainTopLevelSumRequestWithFilter(t *testing.T) {
"_gt": int32(26),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/type_join_many_test.go b/tests/integration/explain/default/type_join_many_test.go
index 3790d43dfa..d21af2de24 100644
--- a/tests/integration/explain/default/type_join_many_test.go
+++ b/tests/integration/explain/default/type_join_many_test.go
@@ -71,11 +71,8 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -94,11 +91,8 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "1",
"collectionName": "Article",
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/type_join_one_test.go b/tests/integration/explain/default/type_join_one_test.go
index 97b17fbf1d..35c6de1001 100644
--- a/tests/integration/explain/default/type_join_one_test.go
+++ b/tests/integration/explain/default/type_join_one_test.go
@@ -72,11 +72,8 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -95,11 +92,8 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) {
"filter": nil,
"collectionID": "4",
"collectionName": "AuthorContact",
- "spans": []dataMap{
- {
- "start": "/4",
- "end": "/5",
- },
+ "prefixes": []string{
+ "/4",
},
},
},
@@ -182,11 +176,8 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -215,11 +206,8 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) {
"filter": nil,
"collectionID": "4",
"collectionName": "AuthorContact",
- "spans": []dataMap{
- {
- "start": "/4",
- "end": "/5",
- },
+ "prefixes": []string{
+ "/4",
},
},
},
@@ -237,11 +225,8 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) {
"filter": nil,
"collectionID": "5",
"collectionName": "ContactAddress",
- "spans": []dataMap{
- {
- "start": "/5",
- "end": "/6",
- },
+ "prefixes": []string{
+ "/5",
},
},
},
diff --git a/tests/integration/explain/default/type_join_test.go b/tests/integration/explain/default/type_join_test.go
index c88c7980be..918fe04d30 100644
--- a/tests/integration/explain/default/type_join_test.go
+++ b/tests/integration/explain/default/type_join_test.go
@@ -105,11 +105,8 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -129,11 +126,8 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "4",
"collectionName": "AuthorContact",
- "spans": []dataMap{
- {
- "start": "/4",
- "end": "/5",
- },
+ "prefixes": []string{
+ "/4",
},
},
},
@@ -163,11 +157,8 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -187,11 +178,8 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "1",
"collectionName": "Article",
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
@@ -222,11 +210,8 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -240,11 +225,8 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) {
"filter": nil,
"collectionID": "4",
"collectionName": "AuthorContact",
- "spans": []dataMap{
- {
- "start": "/4",
- "end": "/5",
- },
+ "prefixes": []string{
+ "/4",
},
},
},
diff --git a/tests/integration/explain/default/type_join_with_filter_doc_id_test.go b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go
index 8a29156009..5dc9cbabf2 100644
--- a/tests/integration/explain/default/type_join_with_filter_doc_id_test.go
+++ b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go
@@ -85,15 +85,9 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T)
"_eq": "John Grisham",
},
},
- "spans": []dataMap{
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8f",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e",
},
},
},
@@ -182,11 +176,8 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) {
"_eq": "Cornelia Funke",
},
},
- "spans": []dataMap{
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
diff --git a/tests/integration/explain/default/type_join_with_filter_test.go b/tests/integration/explain/default/type_join_with_filter_test.go
index 1c7a35c1ba..275b7c2c8b 100644
--- a/tests/integration/explain/default/type_join_with_filter_test.go
+++ b/tests/integration/explain/default/type_join_with_filter_test.go
@@ -78,11 +78,8 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilter(t *testing.T) {
"_eq": "John Grisham",
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -168,11 +165,8 @@ func TestDefaultExplainRequestWithManyRelatedFilters(t *testing.T) {
"_eq": "Cornelia Funke",
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/update_test.go b/tests/integration/explain/default/update_test.go
index 8941a710ec..26f9c538b6 100644
--- a/tests/integration/explain/default/update_test.go
+++ b/tests/integration/explain/default/update_test.go
@@ -87,11 +87,8 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T)
"_eq": true,
},
},
- "spans": []dataMap{
- {
- "end": "/4",
- "start": "/3",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -151,15 +148,9 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "end": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67g",
- "start": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
- },
- {
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- },
+ "prefixes": []string{
+ "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
@@ -215,11 +206,8 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
- "end": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67g",
- },
+ "prefixes": []string{
+ "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
},
},
},
@@ -292,15 +280,9 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T)
"_eq": true,
},
},
- "spans": []dataMap{
- {
- "start": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
- "end": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67g",
- },
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
+ "prefixes": []string{
+ "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
diff --git a/tests/integration/explain/default/upsert_test.go b/tests/integration/explain/default/upsert_test.go
index 7cc38294e8..8ff2a964e6 100644
--- a/tests/integration/explain/default/upsert_test.go
+++ b/tests/integration/explain/default/upsert_test.go
@@ -87,11 +87,8 @@ func TestDefaultExplainMutationRequest_WithUpsert_Succeeds(t *testing.T) {
"_eq": "Bob",
},
},
- "spans": []dataMap{
- {
- "end": "/4",
- "start": "/3",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/with_average_join_test.go b/tests/integration/explain/default/with_average_join_test.go
index 5d65408540..455f0023b3 100644
--- a/tests/integration/explain/default/with_average_join_test.go
+++ b/tests/integration/explain/default/with_average_join_test.go
@@ -114,11 +114,8 @@ func TestDefaultExplainRequestWithAverageOnJoinedField(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -134,11 +131,8 @@ func TestDefaultExplainRequestWithAverageOnJoinedField(t *testing.T) {
"_ne": nil,
},
},
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -273,11 +267,8 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -293,11 +284,8 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes
"_ne": nil,
},
},
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -319,11 +307,8 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -340,11 +325,8 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes
"_ne": nil,
},
},
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/with_average_test.go b/tests/integration/explain/default/with_average_test.go
index 71a66aa6f8..132632cb4c 100644
--- a/tests/integration/explain/default/with_average_test.go
+++ b/tests/integration/explain/default/with_average_test.go
@@ -94,11 +94,8 @@ func TestDefaultExplainRequestWithAverageOnArrayField(t *testing.T) {
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
diff --git a/tests/integration/explain/default/with_count_join_test.go b/tests/integration/explain/default/with_count_join_test.go
index a406855b71..a4c7e16f28 100644
--- a/tests/integration/explain/default/with_count_join_test.go
+++ b/tests/integration/explain/default/with_count_join_test.go
@@ -84,11 +84,8 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedField(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -100,11 +97,8 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedField(t *testing.T) {
"filter": nil,
"collectionID": "2",
"collectionName": "Book",
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -195,11 +189,8 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t *
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -211,11 +202,8 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t *
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -237,11 +225,8 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t *
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -253,11 +238,8 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t *
"collectionID": "1",
"collectionName": "Article",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/with_count_test.go b/tests/integration/explain/default/with_count_test.go
index 311c52d599..8c5e0f5d76 100644
--- a/tests/integration/explain/default/with_count_test.go
+++ b/tests/integration/explain/default/with_count_test.go
@@ -72,11 +72,8 @@ func TestDefaultExplainRequestWithCountOnInlineArrayField(t *testing.T) {
"filter": nil,
"collectionID": "2",
"collectionName": "Book",
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
diff --git a/tests/integration/explain/default/with_filter_doc_id_test.go b/tests/integration/explain/default/with_filter_doc_id_test.go
index 3e08aedb3b..a453716293 100644
--- a/tests/integration/explain/default/with_filter_doc_id_test.go
+++ b/tests/integration/explain/default/with_filter_doc_id_test.go
@@ -53,11 +53,8 @@ func TestDefaultExplainRequestWithDocIDFilter(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
@@ -105,11 +102,8 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
@@ -163,15 +157,9 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *te
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
},
@@ -225,15 +213,9 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing.
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
- "end": "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9e",
- },
- {
- "start": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
- "end": "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67g",
- },
+ "prefixes": []string{
+ "/3/bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
+ "/3/bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f",
},
},
},
@@ -289,11 +271,8 @@ func TestDefaultExplainRequestWithMatchingIDFilter(t *testing.T) {
"_eq": "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d",
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/explain/default/with_filter_test.go b/tests/integration/explain/default/with_filter_test.go
index 96e99e19ac..b22afeb60d 100644
--- a/tests/integration/explain/default/with_filter_test.go
+++ b/tests/integration/explain/default/with_filter_test.go
@@ -48,11 +48,8 @@ func TestDefaultExplainRequestWithStringEqualFilter(t *testing.T) {
"_eq": "Lone",
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -95,11 +92,8 @@ func TestDefaultExplainRequestWithIntegerEqualFilter(t *testing.T) {
"_eq": int32(26),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -142,11 +136,8 @@ func TestDefaultExplainRequestWithGreaterThanFilter(t *testing.T) {
"_gt": int32(20),
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -198,11 +189,8 @@ func TestDefaultExplainRequestWithLogicalCompoundAndFilter(t *testing.T) {
},
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -254,11 +242,8 @@ func TestDefaultExplainRequestWithLogicalCompoundOrFilter(t *testing.T) {
},
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -305,11 +290,8 @@ func TestDefaultExplainRequestWithMatchInsideList(t *testing.T) {
},
},
},
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -354,11 +336,8 @@ func TestDefaultExplainRequest_WithJSONEqualFilter_Succeeds(t *testing.T) {
},
},
},
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/with_max_join_test.go b/tests/integration/explain/default/with_max_join_test.go
index a282f9f134..f6c69f470f 100644
--- a/tests/integration/explain/default/with_max_join_test.go
+++ b/tests/integration/explain/default/with_max_join_test.go
@@ -88,11 +88,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedField_Succeeds(t *testing
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -104,11 +101,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedField_Succeeds(t *testing
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -183,11 +177,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithFilter_Succeeds(
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -203,11 +194,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithFilter_Succeeds(
"_eq": "To my dear readers",
},
},
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
@@ -300,11 +288,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -316,11 +301,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -342,11 +324,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -358,11 +337,8 @@ func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "1",
"collectionName": "Article",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/with_max_test.go b/tests/integration/explain/default/with_max_test.go
index 823e3d5def..1c3f90e7c8 100644
--- a/tests/integration/explain/default/with_max_test.go
+++ b/tests/integration/explain/default/with_max_test.go
@@ -73,11 +73,8 @@ func TestDefaultExplainRequest_WithMaxOnInlineArrayField_ChildFieldWillBeEmpty(t
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
diff --git a/tests/integration/explain/default/with_min_join_test.go b/tests/integration/explain/default/with_min_join_test.go
index 2e12bf1788..5e1fa6c1bb 100644
--- a/tests/integration/explain/default/with_min_join_test.go
+++ b/tests/integration/explain/default/with_min_join_test.go
@@ -88,11 +88,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedField_Succeeds(t *testing
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -104,11 +101,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedField_Succeeds(t *testing
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -183,11 +177,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithFilter_Succeeds(
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -203,11 +194,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithFilter_Succeeds(
"_eq": "To my dear readers",
},
},
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
@@ -300,11 +288,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -316,11 +301,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -342,11 +324,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -358,11 +337,8 @@ func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithManySources_Succ
"collectionID": "1",
"collectionName": "Article",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/with_min_test.go b/tests/integration/explain/default/with_min_test.go
index 63da42909e..aabf3c6903 100644
--- a/tests/integration/explain/default/with_min_test.go
+++ b/tests/integration/explain/default/with_min_test.go
@@ -73,11 +73,8 @@ func TestDefaultExplainRequest_WithMinOnInlineArrayField_ChildFieldWillBeEmpty(t
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
diff --git a/tests/integration/explain/default/with_sum_join_test.go b/tests/integration/explain/default/with_sum_join_test.go
index d2b7b6dc64..47a4f1fb96 100644
--- a/tests/integration/explain/default/with_sum_join_test.go
+++ b/tests/integration/explain/default/with_sum_join_test.go
@@ -88,11 +88,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) {
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -104,11 +101,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) {
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -183,11 +177,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithFilter(t *testing
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -203,11 +194,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithFilter(t *testing
"_eq": "To my dear readers",
},
},
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
@@ -300,11 +288,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -316,11 +301,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
@@ -342,11 +324,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te
"collectionID": "3",
"collectionName": "Author",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
@@ -358,11 +337,8 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te
"collectionID": "1",
"collectionName": "Article",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/1",
- "end": "/2",
- },
+ "prefixes": []string{
+ "/1",
},
},
},
diff --git a/tests/integration/explain/default/with_sum_test.go b/tests/integration/explain/default/with_sum_test.go
index 2f1673ce3a..aac2836c81 100644
--- a/tests/integration/explain/default/with_sum_test.go
+++ b/tests/integration/explain/default/with_sum_test.go
@@ -73,11 +73,8 @@ func TestDefaultExplainRequestWithSumOnInlineArrayField_ChildFieldWillBeEmpty(t
"collectionID": "2",
"collectionName": "Book",
"filter": nil,
- "spans": []dataMap{
- {
- "start": "/2",
- "end": "/3",
- },
+ "prefixes": []string{
+ "/2",
},
},
},
diff --git a/tests/integration/explain/simple/basic_test.go b/tests/integration/explain/simple/basic_test.go
index f061785fde..24969ac6ba 100644
--- a/tests/integration/explain/simple/basic_test.go
+++ b/tests/integration/explain/simple/basic_test.go
@@ -47,11 +47,8 @@ func TestSimpleExplainRequest(t *testing.T) {
"filter": nil,
"collectionID": "3",
"collectionName": "Author",
- "spans": []dataMap{
- {
- "start": "/3",
- "end": "/4",
- },
+ "prefixes": []string{
+ "/3",
},
},
},
diff --git a/tests/integration/query/inline_array/with_max_doc_id_test.go b/tests/integration/query/inline_array/with_max_doc_id_test.go
index 3a473db9f2..449d673073 100644
--- a/tests/integration/query/inline_array/with_max_doc_id_test.go
+++ b/tests/integration/query/inline_array/with_max_doc_id_test.go
@@ -16,7 +16,7 @@ import (
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
-// This test is meant to provide coverage of the planNode.Spans
+// This test is meant to provide coverage of the planNode.Prefixes
// func by targeting a specific docID in the parent select.
func TestQueryInlineNillableFloatArray_WithDocIDAndMax_Succeeds(t *testing.T) {
test := testUtils.TestCase{
diff --git a/tests/integration/query/inline_array/with_min_doc_id_test.go b/tests/integration/query/inline_array/with_min_doc_id_test.go
index b8ad3d2c98..0c34388a36 100644
--- a/tests/integration/query/inline_array/with_min_doc_id_test.go
+++ b/tests/integration/query/inline_array/with_min_doc_id_test.go
@@ -16,7 +16,7 @@ import (
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
-// This test is meant to provide coverage of the planNode.Spans
+// This test is meant to provide coverage of the planNode.Prefixes
// func by targeting a specific docID in the parent select.
func TestQueryInlineNillableFloatArray_WithDocIDAndMin_Succeeds(t *testing.T) {
test := testUtils.TestCase{
diff --git a/tests/integration/query/simple/simple_test.go b/tests/integration/query/simple/simple_test.go
index eff5b09d65..98e8bca42a 100644
--- a/tests/integration/query/simple/simple_test.go
+++ b/tests/integration/query/simple/simple_test.go
@@ -214,3 +214,81 @@ func TestQuerySimpleWithDefaultValue(t *testing.T) {
executeTestCase(t, test)
}
+
+// This test is to ensure that deleted docs from the next collection ID are not returned in the query results.
+// It documents the fixing of the bug described in #3242.
+func TestQuerySimple_WithDeletedDocsInCollection2_ShouldNotYieldDeletedDocsOnCollection1Query(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Deleted docs in collection 2 should not yield deleted docs on collection 1 query",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User {
+ name: String
+ }
+ type Friend {
+ name: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Shahzad",
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "John",
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ DocMap: map[string]any{
+ "name": "Andy",
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ User {
+ _docID
+ }
+ }`,
+ Results: map[string]any{
+ "User": []map[string]any{
+ {
+ "_docID": testUtils.NewDocIndex(0, 1),
+ },
+ {
+ "_docID": testUtils.NewDocIndex(0, 0),
+ },
+ },
+ },
+ },
+ testUtils.DeleteDoc{
+ CollectionID: 1,
+ DocID: 0,
+ },
+ testUtils.Request{
+ Request: `query {
+ User {
+ _docID
+ }
+ }`,
+ Results: map[string]any{
+ "User": []map[string]any{
+ {
+ "_docID": testUtils.NewDocIndex(0, 1),
+ },
+ {
+ "_docID": testUtils.NewDocIndex(0, 0),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/schema/migrations/query/with_doc_id_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go
index bb28b90ac8..73258cf932 100644
--- a/tests/integration/schema/migrations/query/with_doc_id_test.go
+++ b/tests/integration/schema/migrations/query/with_doc_id_test.go
@@ -20,7 +20,7 @@ import (
"github.com/sourcenetwork/defradb/tests/lenses"
)
-// This test asserts that spans are being passed correctly through the new Lens fetcher.
+// This test asserts that prefixes are being passed correctly through the new Lens fetcher.
func TestSchemaMigrationQueryByDocID(t *testing.T) {
test := testUtils.TestCase{
Description: "Test schema migration, query by docID",
From cdc57382ff96fa92f7492fa1476769e9cf33ee1f Mon Sep 17 00:00:00 2001
From: Emmanuel Ferdman
Date: Thu, 21 Nov 2024 15:53:23 +0200
Subject: [PATCH 29/47] docs: Update discord link (#3231)
## Relevant issue(s)
Resolves #3236
## Description
Small PR - adds `https://` to the Discord link to prevent GitHub from
interpreting it as an internal repository link.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [ ] I made sure the repository-held documentation is changed
accordingly.
- [ ] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [ ] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
(*replace*) Describe the tests performed to verify the changes. Provide
instructions to reproduce them.
Specify the platform(s) on which this was tested:
- *(modify the list accordingly*)
- Arch Linux
- Debian Linux
- MacOS
- Windows
Signed-off-by: Emmanuel Ferdman
---
CONTRIBUTING.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e63fc6afe4..a495502a22 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -5,7 +5,7 @@ This document will guide you through the process of contributing to the project.
All contributions are appreciated, whether it's identifying problems, highlighting missing features, or contributing to the codebase in simple or complex ways.
-You are encouraged to join the [Source Network Discord](discord.gg/w7jYQVJ) to discuss ideas, ask questions, and find inspiration for future developments.
+You are encouraged to join the [Source Network Discord](https://discord.gg/w7jYQVJ) to discuss ideas, ask questions, and find inspiration for future developments.
## Getting started
To get started, clone the repository, build, and run it:
From 6cacff13cbd26020835e9e9d82859377fc474856 Mon Sep 17 00:00:00 2001
From: Islam Aliev
Date: Thu, 21 Nov 2024 22:39:36 +0100
Subject: [PATCH 30/47] refactor: Add unified JSON interface (#3265)
## Relevant issue(s)
Resolves #3264
## Description
Introduces a common interface for JSON values that would allow later to
do better processing of json objects.
---
client/document.go | 70 +--
client/document_test.go | 6 +-
client/json.go | 433 +++++++++++++++
client/json_test.go | 519 ++++++++++++++++++
client/normal_new.go | 2 +-
client/normal_scalar.go | 17 +-
client/normal_value.go | 2 +-
client/normal_value_test.go | 55 +-
client/normal_void.go | 2 +-
.../i3265-unified-json-types.md | 4 +
.../field_kinds/field_kind_json_test.go | 20 +-
tests/integration/query/json/with_ge_test.go | 4 +-
tests/integration/query/json/with_gt_test.go | 2 +-
tests/integration/query/json/with_lt_test.go | 2 +-
.../integration/query/json/with_nlike_test.go | 8 +-
tests/integration/utils.go | 7 +-
16 files changed, 987 insertions(+), 166 deletions(-)
create mode 100644 client/json.go
create mode 100644 client/json_test.go
create mode 100644 docs/data_format_changes/i3265-unified-json-types.md
diff --git a/client/document.go b/client/document.go
index 4abadcac52..b4ae927522 100644
--- a/client/document.go
+++ b/client/document.go
@@ -358,11 +358,11 @@ func validateFieldSchema(val any, field FieldDefinition) (NormalValue, error) {
return NewNormalNillableIntArray(v), nil
case FieldKind_NILLABLE_JSON:
- v, err := getJSON(val)
+ v, err := NewJSON(val)
if err != nil {
return nil, err
}
- return NewNormalJSON(&JSON{v}), nil
+ return NewNormalJSON(v), nil
}
return nil, NewErrUnhandledType("FieldKind", field.Kind)
@@ -438,72 +438,6 @@ func getDateTime(v any) (time.Time, error) {
return time.Parse(time.RFC3339, s)
}
-// getJSON converts the given value to a valid JSON value.
-//
-// If the value is of type *fastjson.Value it needs to be
-// manually parsed. All other values are valid JSON.
-func getJSON(v any) (any, error) {
- val, ok := v.(*fastjson.Value)
- if !ok {
- return v, nil
- }
- switch val.Type() {
- case fastjson.TypeArray:
- arr, err := val.Array()
- if err != nil {
- return nil, err
- }
- out := make([]any, len(arr))
- for i, v := range arr {
- c, err := getJSON(v)
- if err != nil {
- return nil, err
- }
- out[i] = c
- }
- return out, nil
-
- case fastjson.TypeObject:
- obj, err := val.Object()
- if err != nil {
- return nil, err
- }
- out := make(map[string]any)
- obj.Visit(func(key []byte, v *fastjson.Value) {
- c, e := getJSON(v)
- out[string(key)] = c
- err = errors.Join(err, e)
- })
- return out, err
-
- case fastjson.TypeFalse:
- return false, nil
-
- case fastjson.TypeTrue:
- return true, nil
-
- case fastjson.TypeNumber:
- out, err := val.Int64()
- if err == nil {
- return out, nil
- }
- return val.Float64()
-
- case fastjson.TypeString:
- out, err := val.StringBytes()
- if err != nil {
- return nil, err
- }
- return string(out), nil
-
- case fastjson.TypeNull:
- return nil, nil
-
- default:
- return nil, NewErrInvalidJSONPayload(v)
- }
-}
-
func getArray[T any](
v any,
typeGetter func(any) (T, error),
diff --git a/client/document_test.go b/client/document_test.go
index b74af54b27..4f4dc9aa48 100644
--- a/client/document_test.go
+++ b/client/document_test.go
@@ -194,13 +194,13 @@ func TestNewFromJSON_WithValidJSONFieldValue_NoError(t *testing.T) {
assert.Equal(t, doc.values[doc.fields["Age"]].IsDocument(), false)
assert.Equal(t, doc.values[doc.fields["Custom"]].Value(), map[string]any{
"string": "maple",
- "int": int64(260),
+ "int": float64(260),
"float": float64(3.14),
"false": false,
"true": true,
"null": nil,
- "array": []any{"one", int64(1)},
- "object": map[string]any{"one": int64(1)},
+ "array": []any{"one", float64(1)},
+ "object": map[string]any{"one": float64(1)},
})
assert.Equal(t, doc.values[doc.fields["Custom"]].IsDocument(), false)
}
diff --git a/client/json.go b/client/json.go
new file mode 100644
index 0000000000..23a23de2b1
--- /dev/null
+++ b/client/json.go
@@ -0,0 +1,433 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package client
+
+import (
+ "encoding/json"
+ "io"
+
+ "github.com/valyala/fastjson"
+ "golang.org/x/exp/constraints"
+)
+
+// JSON represents a JSON value that can be any valid JSON type: object, array, number, string, boolean, or null.
+// It provides type-safe access to the underlying value through various accessor methods.
+type JSON interface {
+ json.Marshaler
+ // Array returns the value as a JSON array along with a boolean indicating if the value is an array.
+ // Returns nil and false if the value is not an array.
+ Array() ([]JSON, bool)
+
+ // Object returns the value as a JSON object along with a boolean indicating if the value is an object.
+ // Returns nil and false if the value is not an object.
+ Object() (map[string]JSON, bool)
+
+ // Number returns the value as a number along with a boolean indicating if the value is a number.
+ // Returns 0 and false if the value is not a number.
+ Number() (float64, bool)
+
+ // String returns the value as a string along with a boolean indicating if the value is a string.
+ // Returns empty string and false if the value is not a string.
+ String() (string, bool)
+
+ // Bool returns the value as a boolean along with a boolean indicating if the value is a boolean.
+ // Returns false and false if the value is not a boolean.
+ Bool() (bool, bool)
+
+ // IsNull returns true if the value is null, false otherwise.
+ IsNull() bool
+
+ // Value returns the value that JSON represents.
+ // The type will be one of: map[string]JSON, []JSON, float64, string, bool, or nil.
+ Value() any
+
+ // Unwrap returns the underlying value with all nested JSON values unwrapped.
+ // For objects and arrays, this recursively unwraps all nested JSON values.
+ Unwrap() any
+
+ // Marshal writes the JSON value to the writer.
+ // Returns an error if marshaling fails.
+ Marshal(w io.Writer) error
+}
+
+type jsonVoid struct{}
+
+func (v jsonVoid) Object() (map[string]JSON, bool) {
+ return nil, false
+}
+
+func (v jsonVoid) Array() ([]JSON, bool) {
+ return nil, false
+}
+
+func (v jsonVoid) Number() (float64, bool) {
+ return 0, false
+}
+
+func (v jsonVoid) String() (string, bool) {
+ return "", false
+}
+
+func (v jsonVoid) Bool() (bool, bool) {
+ return false, false
+}
+
+func (v jsonVoid) IsNull() bool {
+ return false
+}
+
+type jsonBase[T any] struct {
+ jsonVoid
+ val T
+}
+
+func (v jsonBase[T]) Value() any {
+ return v.val
+}
+
+func (v jsonBase[T]) Unwrap() any {
+ return v.val
+}
+
+func (v jsonBase[T]) Marshal(w io.Writer) error {
+ return json.NewEncoder(w).Encode(v.val)
+}
+
+func (v jsonBase[T]) MarshalJSON() ([]byte, error) {
+ return json.Marshal(v.val)
+}
+
+type jsonObject struct {
+ jsonBase[map[string]JSON]
+}
+
+var _ JSON = jsonObject{}
+
+func (obj jsonObject) Object() (map[string]JSON, bool) {
+ return obj.val, true
+}
+
+func (obj jsonObject) MarshalJSON() ([]byte, error) {
+ return json.Marshal(obj.val)
+}
+
+func (obj jsonObject) Unwrap() any {
+ result := make(map[string]any, len(obj.jsonBase.val))
+ for k, v := range obj.val {
+ result[k] = v.Unwrap()
+ }
+ return result
+}
+
+type jsonArray struct {
+ jsonBase[[]JSON]
+}
+
+var _ JSON = jsonArray{}
+
+func (arr jsonArray) Array() ([]JSON, bool) {
+ return arr.val, true
+}
+
+func (arr jsonArray) MarshalJSON() ([]byte, error) {
+ return json.Marshal(arr.val)
+}
+
+func (arr jsonArray) Unwrap() any {
+ result := make([]any, len(arr.jsonBase.val))
+ for i := range arr.val {
+ result[i] = arr.val[i].Unwrap()
+ }
+ return result
+}
+
+type jsonNumber struct {
+ jsonBase[float64]
+}
+
+var _ JSON = jsonNumber{}
+
+func (n jsonNumber) Number() (float64, bool) {
+ return n.val, true
+}
+
+func (n jsonNumber) MarshalJSON() ([]byte, error) {
+ return json.Marshal(n.val)
+}
+
+type jsonString struct {
+ jsonBase[string]
+}
+
+var _ JSON = jsonString{}
+
+func (s jsonString) String() (string, bool) {
+ return s.val, true
+}
+
+func (s jsonString) MarshalJSON() ([]byte, error) {
+ return json.Marshal(s.val)
+}
+
+type jsonBool struct {
+ jsonBase[bool]
+}
+
+var _ JSON = jsonBool{}
+
+func (b jsonBool) Bool() (bool, bool) {
+ return b.val, true
+}
+
+func (b jsonBool) MarshalJSON() ([]byte, error) {
+ return json.Marshal(b.val)
+}
+
+type jsonNull struct {
+ jsonVoid
+}
+
+var _ JSON = jsonNull{}
+
+func (n jsonNull) IsNull() bool {
+ return true
+}
+
+func (n jsonNull) Value() any {
+ return nil
+}
+
+func (n jsonNull) Unwrap() any {
+ return nil
+}
+
+func (n jsonNull) Marshal(w io.Writer) error {
+ return json.NewEncoder(w).Encode(nil)
+}
+
+func (n jsonNull) MarshalJSON() ([]byte, error) {
+ return json.Marshal(nil)
+}
+
+func newJSONObject(val map[string]JSON) JSON {
+ return jsonObject{jsonBase[map[string]JSON]{val: val}}
+}
+
+func newJSONArray(val []JSON) JSON {
+ return jsonArray{jsonBase[[]JSON]{val: val}}
+}
+
+func newJSONNumber(val float64) JSON {
+ return jsonNumber{jsonBase[float64]{val: val}}
+}
+
+func newJSONString(val string) JSON {
+ return jsonString{jsonBase[string]{val: val}}
+}
+
+func newJSONBool(val bool) JSON {
+ return jsonBool{jsonBase[bool]{val: val}}
+}
+
+func newJSONNull() JSON {
+ return jsonNull{}
+}
+
+// ParseJSONBytes parses the given JSON bytes into a JSON value.
+// Returns error if the input is not valid JSON.
+func ParseJSONBytes(data []byte) (JSON, error) {
+ var p fastjson.Parser
+ v, err := p.ParseBytes(data)
+ if err != nil {
+ return nil, err
+ }
+ return NewJSONFromFastJSON(v), nil
+}
+
+// ParseJSONString parses the given JSON string into a JSON value.
+// Returns error if the input is not valid JSON.
+func ParseJSONString(data string) (JSON, error) {
+ // we could have called ParseJSONBytes([]byte(data), but this would copy the string to a byte slice.
+ // fastjson.Parser.ParseBytes casts the bytes slice to a string internally, so we can avoid the extra copy.
+ var p fastjson.Parser
+ v, err := p.Parse(data)
+ if err != nil {
+ return nil, err
+ }
+ return NewJSONFromFastJSON(v), nil
+}
+
+// NewJSON creates a JSON value from a Go value.
+// The Go value must be one of:
+// - nil (becomes JSON null)
+// - *fastjson.Value
+// - string
+// - map[string]any
+// - bool
+// - numeric types (int8 through int64, uint8 through uint64, float32, float64)
+// - slice of any above type
+// - []any
+// Returns error if the input cannot be converted to JSON.
+func NewJSON(v any) (JSON, error) {
+ if v == nil {
+ return newJSONNull(), nil
+ }
+ switch val := v.(type) {
+ case *fastjson.Value:
+ return NewJSONFromFastJSON(val), nil
+ case string:
+ return newJSONString(val), nil
+ case map[string]any:
+ return NewJSONFromMap(val)
+ case bool:
+ return newJSONBool(val), nil
+ case int8:
+ return newJSONNumber(float64(val)), nil
+ case int16:
+ return newJSONNumber(float64(val)), nil
+ case int32:
+ return newJSONNumber(float64(val)), nil
+ case int64:
+ return newJSONNumber(float64(val)), nil
+ case int:
+ return newJSONNumber(float64(val)), nil
+ case uint8:
+ return newJSONNumber(float64(val)), nil
+ case uint16:
+ return newJSONNumber(float64(val)), nil
+ case uint32:
+ return newJSONNumber(float64(val)), nil
+ case uint64:
+ return newJSONNumber(float64(val)), nil
+ case uint:
+ return newJSONNumber(float64(val)), nil
+ case float32:
+ return newJSONNumber(float64(val)), nil
+ case float64:
+ return newJSONNumber(val), nil
+
+ case []bool:
+ return newJSONBoolArray(val), nil
+ case []int8:
+ return newJSONNumberArray(val), nil
+ case []int16:
+ return newJSONNumberArray(val), nil
+ case []int32:
+ return newJSONNumberArray(val), nil
+ case []int64:
+ return newJSONNumberArray(val), nil
+ case []int:
+ return newJSONNumberArray(val), nil
+ case []uint8:
+ return newJSONNumberArray(val), nil
+ case []uint16:
+ return newJSONNumberArray(val), nil
+ case []uint32:
+ return newJSONNumberArray(val), nil
+ case []uint64:
+ return newJSONNumberArray(val), nil
+ case []uint:
+ return newJSONNumberArray(val), nil
+ case []float32:
+ return newJSONNumberArray(val), nil
+ case []float64:
+ return newJSONNumberArray(val), nil
+ case []string:
+ return newJSONStringArray(val), nil
+
+ case []any:
+ return newJsonArrayFromAnyArray(val)
+ }
+
+ return nil, NewErrInvalidJSONPayload(v)
+}
+
+func newJsonArrayFromAnyArray(arr []any) (JSON, error) {
+ result := make([]JSON, len(arr))
+ for i := range arr {
+ jsonVal, err := NewJSON(arr[i])
+ if err != nil {
+ return nil, err
+ }
+ result[i] = jsonVal
+ }
+ return newJSONArray(result), nil
+}
+
+func newJSONBoolArray(v []bool) JSON {
+ arr := make([]JSON, len(v))
+ for i := range v {
+ arr[i] = newJSONBool(v[i])
+ }
+ return newJSONArray(arr)
+}
+
+func newJSONNumberArray[T constraints.Integer | constraints.Float](v []T) JSON {
+ arr := make([]JSON, len(v))
+ for i := range v {
+ arr[i] = newJSONNumber(float64(v[i]))
+ }
+ return newJSONArray(arr)
+}
+
+func newJSONStringArray(v []string) JSON {
+ arr := make([]JSON, len(v))
+ for i := range v {
+ arr[i] = newJSONString(v[i])
+ }
+ return newJSONArray(arr)
+}
+
+// NewJSONFromFastJSON creates a JSON value from a fastjson.Value.
+func NewJSONFromFastJSON(v *fastjson.Value) JSON {
+ switch v.Type() {
+ case fastjson.TypeObject:
+ fastObj := v.GetObject()
+ obj := make(map[string]JSON, fastObj.Len())
+ fastObj.Visit(func(k []byte, v *fastjson.Value) {
+ obj[string(k)] = NewJSONFromFastJSON(v)
+ })
+ return newJSONObject(obj)
+ case fastjson.TypeArray:
+ fastArr := v.GetArray()
+ arr := make([]JSON, len(fastArr))
+ for i := range fastArr {
+ arr[i] = NewJSONFromFastJSON(fastArr[i])
+ }
+ return newJSONArray(arr)
+ case fastjson.TypeNumber:
+ return newJSONNumber(v.GetFloat64())
+ case fastjson.TypeString:
+ return newJSONString(string(v.GetStringBytes()))
+ case fastjson.TypeTrue:
+ return newJSONBool(true)
+ case fastjson.TypeFalse:
+ return newJSONBool(false)
+ case fastjson.TypeNull:
+ return newJSONNull()
+ }
+ return nil
+}
+
+// NewJSONFromMap creates a JSON object from a map[string]any.
+// The map values must be valid Go values that can be converted to JSON.
+// Returns error if any map value cannot be converted to JSON.
+func NewJSONFromMap(data map[string]any) (JSON, error) {
+ obj := make(map[string]JSON, len(data))
+ for k, v := range data {
+ jsonVal, err := NewJSON(v)
+ if err != nil {
+ return nil, err
+ }
+ obj[k] = jsonVal
+ }
+ return newJSONObject(obj), nil
+}
diff --git a/client/json_test.go b/client/json_test.go
new file mode 100644
index 0000000000..9ac4d3b781
--- /dev/null
+++ b/client/json_test.go
@@ -0,0 +1,519 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package client
+
+import (
+ "bytes"
+ "encoding/json"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+ "github.com/valyala/fastjson"
+)
+
+func TestParseJSONAndMarshal_WithValidInput_ShouldMarshal(t *testing.T) {
+ tests := []struct {
+ name string
+ fromFunc func(string) (JSON, error)
+ }{
+ {
+ name: "FromBytes",
+ fromFunc: func(data string) (JSON, error) { return ParseJSONBytes([]byte(data)) },
+ },
+ {
+ name: "FromString",
+ fromFunc: ParseJSONString,
+ },
+ {
+ name: "FromFastJSON",
+ fromFunc: func(data string) (JSON, error) {
+ var p fastjson.Parser
+ v, err := p.Parse(data)
+ if err != nil {
+ return nil, err
+ }
+ return NewJSONFromFastJSON(v), nil
+ },
+ },
+ {
+ name: "FromMap",
+ fromFunc: func(data string) (JSON, error) {
+ var result map[string]any
+ if err := json.Unmarshal([]byte(data), &result); err != nil {
+ return nil, err
+ }
+ return NewJSONFromMap(result)
+ },
+ },
+ }
+
+ data := `{"key1": "value1", "key2": 2, "key3": true, "key4": null, "key5": ["item1", 2, false]}`
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ jsonObj, err := tt.fromFunc(data)
+ require.NoError(t, err, "fromFunc failed with error %v", err)
+
+ var buf bytes.Buffer
+ err = jsonObj.Marshal(&buf)
+ require.NoError(t, err, "jsonObj.Marshal(&buf) failed with error %v", err)
+
+ actualStr := strings.ReplaceAll(buf.String(), "\n", "")
+ expectedStr := strings.ReplaceAll(data, " ", "")
+ require.Equal(t, actualStr, expectedStr, "Expected %s, got %s", expectedStr, actualStr)
+
+ rawJSON, err := jsonObj.MarshalJSON()
+ require.NoError(t, err, "jsonObj.MarshalJSON() failed with error %v", err)
+ actualStr = strings.ReplaceAll(string(rawJSON), "\n", "")
+ require.Equal(t, actualStr, expectedStr, "Expected %s, got %s", expectedStr, actualStr)
+ })
+ }
+}
+
+func TestNewJSONAndMarshal_WithInvalidInput_ShouldFail(t *testing.T) {
+ tests := []struct {
+ name string
+ fromFunc func(string) (JSON, error)
+ }{
+ {
+ name: "FromBytes",
+ fromFunc: func(data string) (JSON, error) { return ParseJSONBytes([]byte(data)) },
+ },
+ {
+ name: "FromString",
+ fromFunc: ParseJSONString,
+ },
+ {
+ name: "FromMap",
+ fromFunc: func(data string) (JSON, error) {
+ var result map[string]any
+ if err := json.Unmarshal([]byte(data), &result); err != nil {
+ return nil, err
+ }
+ return NewJSONFromMap(result)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ _, err := tt.fromFunc(`{"key1": "value1}`)
+ require.Error(t, err, "Expected error, but got nil")
+ })
+ }
+}
+
+func TestNewJSONFomString_WithInvalidInput_Error(t *testing.T) {
+ _, err := ParseJSONString("str")
+ require.Error(t, err, "Expected error, but got nil")
+}
+
+func TestJSONObject_Methods_ShouldWorkAsExpected(t *testing.T) {
+ m := map[string]JSON{
+ "key": newJSONString("value"),
+ "nested": newJSONObject(map[string]JSON{
+ "inner": newJSONNumber(42),
+ "array": newJSONArray([]JSON{newJSONString("test"), newJSONBool(true)}),
+ }),
+ }
+ obj := newJSONObject(m)
+ expectedUnwrapped := map[string]any{
+ "key": "value",
+ "nested": map[string]any{
+ "inner": float64(42),
+ "array": []any{"test", true},
+ },
+ }
+
+ // Positive tests
+ val, ok := obj.Object()
+ require.True(t, ok)
+ require.Equal(t, m, val)
+ require.Equal(t, m, obj.Value())
+ require.Equal(t, expectedUnwrapped, obj.Unwrap())
+
+ // Negative tests
+ _, ok = obj.Array()
+ require.False(t, ok)
+ _, ok = obj.Number()
+ require.False(t, ok)
+ _, ok = obj.String()
+ require.False(t, ok)
+ _, ok = obj.Bool()
+ require.False(t, ok)
+ require.False(t, obj.IsNull())
+}
+
+func TestJSONArray_Methods_ShouldWorkAsExpected(t *testing.T) {
+ arr := []JSON{
+ newJSONString("item1"),
+ newJSONObject(map[string]JSON{
+ "key": newJSONString("value"),
+ "num": newJSONNumber(42),
+ }),
+ newJSONNumber(2),
+ }
+ jsonArr := newJSONArray(arr)
+ expectedUnwrapped := []any{
+ "item1",
+ map[string]any{
+ "key": "value",
+ "num": float64(42),
+ },
+ float64(2),
+ }
+
+ // Positive tests
+ val, ok := jsonArr.Array()
+ require.True(t, ok)
+ require.Equal(t, arr, val)
+ require.Equal(t, arr, jsonArr.Value())
+ require.Equal(t, expectedUnwrapped, jsonArr.Unwrap())
+
+ // Negative tests
+ _, ok = jsonArr.Object()
+ require.False(t, ok)
+ _, ok = jsonArr.Number()
+ require.False(t, ok)
+ _, ok = jsonArr.String()
+ require.False(t, ok)
+ _, ok = jsonArr.Bool()
+ require.False(t, ok)
+ require.False(t, jsonArr.IsNull())
+}
+
+func TestJSONNumber_Methods_ShouldWorkAsExpected(t *testing.T) {
+ num := newJSONNumber(2.5)
+ expected := 2.5
+
+ // Positive tests
+ val, ok := num.Number()
+ require.True(t, ok)
+ require.Equal(t, expected, val)
+ require.Equal(t, expected, num.Value())
+ require.Equal(t, expected, num.Unwrap())
+
+ // Negative tests
+ _, ok = num.Object()
+ require.False(t, ok)
+ _, ok = num.Array()
+ require.False(t, ok)
+ _, ok = num.String()
+ require.False(t, ok)
+ _, ok = num.Bool()
+ require.False(t, ok)
+ require.False(t, num.IsNull())
+}
+
+func TestJSONString_Methods_ShouldWorkAsExpected(t *testing.T) {
+ str := newJSONString("value")
+ expected := "value"
+
+ // Positive tests
+ val, ok := str.String()
+ require.True(t, ok)
+ require.Equal(t, expected, val)
+ require.Equal(t, expected, str.Value())
+ require.Equal(t, expected, str.Unwrap())
+
+ // Negative tests
+ _, ok = str.Object()
+ require.False(t, ok)
+ _, ok = str.Array()
+ require.False(t, ok)
+ _, ok = str.Number()
+ require.False(t, ok)
+ _, ok = str.Bool()
+ require.False(t, ok)
+ require.False(t, str.IsNull())
+}
+
+func TestJSONBool_Methods_ShouldWorkAsExpected(t *testing.T) {
+ b := newJSONBool(true)
+ expected := true
+
+ // Positive tests
+ val, ok := b.Bool()
+ require.True(t, ok)
+ require.Equal(t, expected, val)
+ require.Equal(t, expected, b.Value())
+ require.Equal(t, expected, b.Unwrap())
+
+ // Negative tests
+ _, ok = b.Object()
+ require.False(t, ok)
+ _, ok = b.Array()
+ require.False(t, ok)
+ _, ok = b.Number()
+ require.False(t, ok)
+ _, ok = b.String()
+ require.False(t, ok)
+ require.False(t, b.IsNull())
+}
+
+func TestJSONNull_Methods_ShouldWorkAsExpected(t *testing.T) {
+ null := newJSONNull()
+
+ // Positive tests
+ require.True(t, null.IsNull())
+ require.Nil(t, null.Value())
+ require.Nil(t, null.Unwrap())
+
+ // Negative tests
+ _, ok := null.Object()
+ require.False(t, ok)
+ _, ok = null.Array()
+ require.False(t, ok)
+ _, ok = null.Number()
+ require.False(t, ok)
+ _, ok = null.String()
+ require.False(t, ok)
+ _, ok = null.Bool()
+ require.False(t, ok)
+}
+
+func TestNewJSONAndMarshalJSON(t *testing.T) {
+ tests := []struct {
+ name string
+ input any
+ expected JSON
+ expectedJSON string
+ expectError bool
+ }{
+ {
+ name: "Nil",
+ input: nil,
+ expected: newJSONNull(),
+ expectedJSON: "null",
+ },
+ {
+ name: "FastJSON",
+ input: fastjson.MustParse(`{"key": "value"}`),
+ expected: newJSONObject(map[string]JSON{"key": newJSONString("value")}),
+ expectedJSON: `{"key":"value"}`,
+ },
+ {
+ name: "Map",
+ input: map[string]any{"key": "value"},
+ expected: newJSONObject(map[string]JSON{"key": newJSONString("value")}),
+ expectedJSON: `{"key":"value"}`,
+ },
+ {
+ name: "Bool",
+ input: true,
+ expected: newJSONBool(true),
+ expectedJSON: "true",
+ },
+ {
+ name: "String",
+ input: "str",
+ expected: newJSONString("str"),
+ expectedJSON: `"str"`,
+ },
+ {
+ name: "Int8",
+ input: int8(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Int16",
+ input: int16(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Int32",
+ input: int32(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Int64",
+ input: int64(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Int",
+ input: 42,
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Uint8",
+ input: uint8(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Uint16",
+ input: uint16(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Uint32",
+ input: uint32(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Uint64",
+ input: uint64(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Uint",
+ input: uint(42),
+ expected: newJSONNumber(42),
+ expectedJSON: "42",
+ },
+ {
+ name: "Float32",
+ input: float32(2.5),
+ expected: newJSONNumber(2.5),
+ expectedJSON: "2.5",
+ },
+ {
+ name: "Float64",
+ input: float64(2.5),
+ expected: newJSONNumber(2.5),
+ expectedJSON: "2.5",
+ },
+ {
+ name: "BoolArray",
+ input: []bool{true, false},
+ expected: newJSONArray([]JSON{newJSONBool(true), newJSONBool(false)}),
+ expectedJSON: "[true,false]",
+ },
+ {
+ name: "StringArray",
+ input: []string{"a", "b", "c"},
+ expected: newJSONArray([]JSON{newJSONString("a"), newJSONString("b"), newJSONString("c")}),
+ expectedJSON: `["a","b","c"]`,
+ },
+ {
+ name: "AnyArray",
+ input: []any{"a", 1, true},
+ expected: newJSONArray([]JSON{newJSONString("a"), newJSONNumber(1), newJSONBool(true)}),
+ expectedJSON: `["a",1,true]`,
+ },
+ {
+ name: "Int8Array",
+ input: []int8{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Int16Array",
+ input: []int16{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Int32Array",
+ input: []int32{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Int64Array",
+ input: []int64{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "IntArray",
+ input: []int{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Uint8Array",
+ input: []uint8{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Uint16Array",
+ input: []uint16{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Uint32Array",
+ input: []uint32{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Uint64Array",
+ input: []uint64{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "UintArray",
+ input: []uint{1, 2, 3},
+ expected: newJSONArray([]JSON{newJSONNumber(1), newJSONNumber(2), newJSONNumber(3)}),
+ expectedJSON: "[1,2,3]",
+ },
+ {
+ name: "Float32Array",
+ input: []float32{1.0, 2.25, 3.5},
+ expected: newJSONArray([]JSON{newJSONNumber(1.0), newJSONNumber(2.25), newJSONNumber(3.5)}),
+ expectedJSON: "[1,2.25,3.5]",
+ },
+ {
+ name: "Float64Array",
+ input: []float64{1.0, 2.25, 3.5},
+ expected: newJSONArray([]JSON{newJSONNumber(1.0), newJSONNumber(2.25), newJSONNumber(3.5)}),
+ expectedJSON: "[1,2.25,3.5]",
+ },
+ {
+ name: "AnyArrayWithInvalidElement",
+ input: []any{"valid", make(chan int)}, // channels can't be converted to JSON
+ expectError: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result, err := NewJSON(tt.input)
+ if tt.expectError {
+ require.Error(t, err, "Expected error, but got nil")
+ return
+ }
+ require.NoError(t, err, "NewJSON failed with error %v", err)
+ require.Equal(t, result, tt.expected)
+
+ if !tt.expectError {
+ jsonBytes, err := result.MarshalJSON()
+ require.NoError(t, err, "MarshalJSON failed with error %v", err)
+ require.Equal(t, tt.expectedJSON, string(jsonBytes))
+ }
+ })
+ }
+}
+
+func TestNewJSONFromMap_WithInvalidValue_ShouldFail(t *testing.T) {
+ // Map with an invalid value (channel cannot be converted to JSON)
+ input := map[string]any{
+ "valid": "value",
+ "invalid": make(chan int),
+ }
+
+ _, err := NewJSONFromMap(input)
+ require.Error(t, err)
+}
diff --git a/client/normal_new.go b/client/normal_new.go
index bcd0f00929..8eb1b9f24c 100644
--- a/client/normal_new.go
+++ b/client/normal_new.go
@@ -64,7 +64,7 @@ func NewNormalValue(val any) (NormalValue, error) {
return NewNormalTime(v), nil
case *Document:
return NewNormalDocument(v), nil
- case *JSON:
+ case JSON:
return NewNormalJSON(v), nil
case immutable.Option[bool]:
diff --git a/client/normal_scalar.go b/client/normal_scalar.go
index ae92fbe3a6..cc6d9054db 100644
--- a/client/normal_scalar.go
+++ b/client/normal_scalar.go
@@ -17,13 +17,6 @@ import (
"golang.org/x/exp/constraints"
)
-// JSON contains a valid JSON value.
-//
-// The inner type can be any valid normal value or normal value array.
-type JSON struct {
- inner any
-}
-
// NormalValue is dummy implementation of NormalValue to be embedded in other types.
type baseNormalValue[T any] struct {
NormalVoid
@@ -126,15 +119,15 @@ func (v normalDocument) Document() (*Document, bool) {
}
type normalJSON struct {
- baseNormalValue[*JSON]
+ baseNormalValue[JSON]
}
-func (v normalJSON) JSON() (*JSON, bool) {
+func (v normalJSON) JSON() (JSON, bool) {
return v.val, true
}
func (v normalJSON) Unwrap() any {
- return v.val.inner
+ return v.val.Unwrap()
}
func newNormalInt(val int64) NormalValue {
@@ -181,8 +174,8 @@ func NewNormalDocument(val *Document) NormalValue {
}
// NewNormalJSON creates a new NormalValue that represents a `JSON` value.
-func NewNormalJSON(val *JSON) NormalValue {
- return normalJSON{baseNormalValue[*JSON]{val: val}}
+func NewNormalJSON(val JSON) NormalValue {
+ return normalJSON{baseNormalValue[JSON]{val: val}}
}
func areNormalScalarsEqual[T comparable](val T, f func() (T, bool)) bool {
diff --git a/client/normal_value.go b/client/normal_value.go
index 081814ffe2..3dc66a83fd 100644
--- a/client/normal_value.go
+++ b/client/normal_value.go
@@ -64,7 +64,7 @@ type NormalValue interface {
Document() (*Document, bool)
// JSON returns the value as JSON. The second return flag is true if the value is JSON.
// Otherwise it will return nil and false.
- JSON() (*JSON, bool)
+ JSON() (JSON, bool)
// NillableBool returns the value as a nillable bool.
// The second return flag is true if the value is [immutable.Option[bool]].
diff --git a/client/normal_value_test.go b/client/normal_value_test.go
index bcea59e046..773727c72a 100644
--- a/client/normal_value_test.go
+++ b/client/normal_value_test.go
@@ -78,8 +78,8 @@ const (
// Otherwise, it returns the input itself.
func extractValue(input any) any {
// unwrap JSON inner values
- if v, ok := input.(*JSON); ok {
- return v.inner
+ if v, ok := input.(JSON); ok {
+ return v.Unwrap()
}
inputVal := reflect.ValueOf(input)
@@ -171,7 +171,7 @@ func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) {
BytesType: func(v any) NormalValue { return NewNormalBytes(v.([]byte)) },
TimeType: func(v any) NormalValue { return NewNormalTime(v.(time.Time)) },
DocumentType: func(v any) NormalValue { return NewNormalDocument(v.(*Document)) },
- JSONType: func(v any) NormalValue { return NewNormalJSON(v.(*JSON)) },
+ JSONType: func(v any) NormalValue { return NewNormalJSON(v.(JSON)) },
NillableBoolType: func(v any) NormalValue { return NewNormalNillableBool(v.(immutable.Option[bool])) },
NillableIntType: func(v any) NormalValue { return NewNormalNillableInt(v.(immutable.Option[int64])) },
@@ -293,7 +293,7 @@ func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) {
},
{
nType: JSONType,
- input: &JSON{nil},
+ input: newJSONNumber(2),
},
{
nType: NillableBoolType,
@@ -842,53 +842,6 @@ func TestNormalValue_NewNormalValueFromAnyArray(t *testing.T) {
}
}
-func TestNormalValue_NewNormalJSON(t *testing.T) {
- var expect *JSON
- var actual *JSON
-
- expect = &JSON{nil}
- normal := NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-
- expect = &JSON{"hello"}
- normal = NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-
- expect = &JSON{true}
- normal = NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-
- expect = &JSON{int64(10)}
- normal = NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-
- expect = &JSON{float64(3.14)}
- normal = NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-
- expect = &JSON{map[string]any{"one": 1}}
- normal = NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-
- expect = &JSON{[]any{1, "two"}}
- normal = NewNormalJSON(expect)
-
- actual, _ = normal.JSON()
- assert.Equal(t, expect, actual)
-}
-
func TestNormalValue_NewNormalInt(t *testing.T) {
i64 := int64(2)
v := NewNormalInt(i64)
diff --git a/client/normal_void.go b/client/normal_void.go
index 3238a25ad2..a9078e5328 100644
--- a/client/normal_void.go
+++ b/client/normal_void.go
@@ -65,7 +65,7 @@ func (NormalVoid) Document() (*Document, bool) {
return nil, false
}
-func (NormalVoid) JSON() (*JSON, bool) {
+func (NormalVoid) JSON() (JSON, bool) {
return nil, false
}
diff --git a/docs/data_format_changes/i3265-unified-json-types.md b/docs/data_format_changes/i3265-unified-json-types.md
new file mode 100644
index 0000000000..979f75d869
--- /dev/null
+++ b/docs/data_format_changes/i3265-unified-json-types.md
@@ -0,0 +1,4 @@
+# Unified JSON Types
+
+Applied a common interface to all JSON types which made it use float64 for all numbers.
+This in turned caused encoded data to change because CBOR encoding of float64 is different from int64.
diff --git a/tests/integration/mutation/create/field_kinds/field_kind_json_test.go b/tests/integration/mutation/create/field_kinds/field_kind_json_test.go
index b578bf3928..5cb6fdd966 100644
--- a/tests/integration/mutation/create/field_kinds/field_kind_json_test.go
+++ b/tests/integration/mutation/create/field_kinds/field_kind_json_test.go
@@ -39,7 +39,6 @@ func TestMutationCreate_WithJSONFieldGivenObjectValue_Succeeds(t *testing.T) {
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -47,10 +46,9 @@ func TestMutationCreate_WithJSONFieldGivenObjectValue_Succeeds(t *testing.T) {
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-a948a3b2-3e89-5654-b0f0-71685a66b4d7",
"custom": map[string]any{
"tree": "maple",
- "age": uint64(250),
+ "age": float64(250),
},
"name": "John",
},
@@ -84,7 +82,6 @@ func TestMutationCreate_WithJSONFieldGivenListOfScalarsValue_Succeeds(t *testing
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -92,8 +89,7 @@ func TestMutationCreate_WithJSONFieldGivenListOfScalarsValue_Succeeds(t *testing
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-90fd8b1b-bd11-56b5-a78c-2fb6f7b4dca0",
- "custom": []any{"maple", uint64(250)},
+ "custom": []any{"maple", float64(250)},
"name": "John",
},
},
@@ -129,7 +125,6 @@ func TestMutationCreate_WithJSONFieldGivenListOfObjectsValue_Succeeds(t *testing
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -137,7 +132,6 @@ func TestMutationCreate_WithJSONFieldGivenListOfObjectsValue_Succeeds(t *testing
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-dd7c12f5-a7c5-55c6-8b35-ece853ae7f9e",
"custom": []any{
map[string]any{"tree": "maple"},
map[string]any{"tree": "oak"},
@@ -174,7 +168,6 @@ func TestMutationCreate_WithJSONFieldGivenIntValue_Succeeds(t *testing.T) {
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -182,8 +175,7 @@ func TestMutationCreate_WithJSONFieldGivenIntValue_Succeeds(t *testing.T) {
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-59731737-8793-5794-a9a5-0ed0ad696d5c",
- "custom": uint64(250),
+ "custom": float64(250),
"name": "John",
},
},
@@ -216,7 +208,6 @@ func TestMutationCreate_WithJSONFieldGivenStringValue_Succeeds(t *testing.T) {
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -224,7 +215,6 @@ func TestMutationCreate_WithJSONFieldGivenStringValue_Succeeds(t *testing.T) {
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-608582c3-979e-5f34-80f8-a70fce875d05",
"custom": "hello",
"name": "John",
},
@@ -258,7 +248,6 @@ func TestMutationCreate_WithJSONFieldGivenBooleanValue_Succeeds(t *testing.T) {
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -266,7 +255,6 @@ func TestMutationCreate_WithJSONFieldGivenBooleanValue_Succeeds(t *testing.T) {
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-0c4b39cf-433c-5a9c-9bed-1e2796c35d14",
"custom": true,
"name": "John",
},
@@ -300,7 +288,6 @@ func TestMutationCreate_WithJSONFieldGivenNullValue_Succeeds(t *testing.T) {
testUtils.Request{
Request: `query {
Users {
- _docID
name
custom
}
@@ -308,7 +295,6 @@ func TestMutationCreate_WithJSONFieldGivenNullValue_Succeeds(t *testing.T) {
Results: map[string]any{
"Users": []map[string]any{
{
- "_docID": "bae-f405f600-56d9-5de4-8d02-75fdced35e3b",
"custom": nil,
"name": "John",
},
diff --git a/tests/integration/query/json/with_ge_test.go b/tests/integration/query/json/with_ge_test.go
index bfb574170e..4a9afc403e 100644
--- a/tests/integration/query/json/with_ge_test.go
+++ b/tests/integration/query/json/with_ge_test.go
@@ -270,10 +270,10 @@ func TestQueryJSON_WithGreaterEqualFilterWithNestedNullValue_ShouldFilter(t *tes
Results: map[string]any{
"Users": []map[string]any{
{
- "Name": "John",
+ "Name": "David",
},
{
- "Name": "David",
+ "Name": "John",
},
},
},
diff --git a/tests/integration/query/json/with_gt_test.go b/tests/integration/query/json/with_gt_test.go
index 3a2972320b..07d08ce7ca 100644
--- a/tests/integration/query/json/with_gt_test.go
+++ b/tests/integration/query/json/with_gt_test.go
@@ -182,7 +182,7 @@ func TestQueryJSON_WithGreaterThanFilterBlockWithNestedGreaterValue_ShouldFilter
{
"Name": "John",
"Custom": map[string]any{
- "age": uint64(21),
+ "age": float64(21),
},
},
},
diff --git a/tests/integration/query/json/with_lt_test.go b/tests/integration/query/json/with_lt_test.go
index 14a422d5ad..636139c05d 100644
--- a/tests/integration/query/json/with_lt_test.go
+++ b/tests/integration/query/json/with_lt_test.go
@@ -178,7 +178,7 @@ func TestQueryJSON_WithLesserThanFilterBlockWithNestedGreaterValue_ShouldFilter(
{
"Name": "Bob",
"Custom": map[string]any{
- "age": uint64(19),
+ "age": float64(19),
},
},
},
diff --git a/tests/integration/query/json/with_nlike_test.go b/tests/integration/query/json/with_nlike_test.go
index db0615b2ca..6de741f61b 100644
--- a/tests/integration/query/json/with_nlike_test.go
+++ b/tests/integration/query/json/with_nlike_test.go
@@ -65,16 +65,16 @@ func TestQueryJSON_WithNotLikeFilter_ShouldFilter(t *testing.T) {
Results: map[string]any{
"Users": []map[string]any{
{
- "custom": uint64(32),
+ "custom": map[string]any{"one": float64(1)},
},
{
- "custom": "Viserys I Targaryen, King of the Andals",
+ "custom": float64(32),
},
{
- "custom": map[string]any{"one": uint64(1)},
+ "custom": []any{float64(1), float64(2)},
},
{
- "custom": []any{uint64(1), uint64(2)},
+ "custom": "Viserys I Targaryen, King of the Andals",
},
{
"custom": false,
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index 3c0e9baffd..dfd3096bd3 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -114,7 +114,6 @@ func init() {
// mutation type.
mutationType = CollectionSaveMutationType
}
- mutationType = GQLRequestMutationType
if value, ok := os.LookupEnv(viewTypeEnvName); ok {
viewType = ViewType(value)
@@ -1987,7 +1986,7 @@ func assertRequestResultDocs(
) bool {
// compare results
require.Equal(s.t, len(expectedResults), len(actualResults),
- s.testCase.Description+" \n(number of results don't match)")
+ s.testCase.Description+" \n(number of results don't match for %s)", stack)
for actualDocIndex, actualDoc := range actualResults {
stack.pushArray(actualDocIndex)
@@ -1998,9 +1997,9 @@ func assertRequestResultDocs(
len(expectedDoc),
len(actualDoc),
fmt.Sprintf(
- "%s \n(number of properties for item at index %v don't match)",
+ "%s \n(number of properties don't match for %s)",
s.testCase.Description,
- actualDocIndex,
+ stack,
),
)
From a61dcd7a4e0d4f4e07f7b6ea8825a4c01efbf623 Mon Sep 17 00:00:00 2001
From: Chris Quigley
Date: Fri, 22 Nov 2024 12:19:34 -0500
Subject: [PATCH 31/47] fix: Resolve CORS errors in OpenAPI tab of Playground
(#3263)
## Relevant issue(s)
Resolves #2458
## Description
There was a bug in the Playground's OpenAPI tool. This tool would list
the URL of the endpoints as localhost instead of 127.0.0.1 (which is
what the startup Defradb message shows.) The result was that attempting
to execute one of the queries returned a CORS error.
This was a one-liner fix. I adjusted openapi.go to use
`http://127.0.0.1:9181/api/v0` as a string literal for the URL, instead
of `http://localhost:9181/api/v0`
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in nts,
## How has this been tested?
The platform(s) on which this was tested:
- Windows
---
docs/website/references/http/openapi.json | 2 +-
http/openapi.go | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json
index 03c6a6513d..1f28b84a92 100644
--- a/docs/website/references/http/openapi.json
+++ b/docs/website/references/http/openapi.json
@@ -2363,7 +2363,7 @@
"servers": [
{
"description": "Local DefraDB instance",
- "url": "http://localhost:9181/api/v0"
+ "url": "/api/v0"
}
],
"tags": [
diff --git a/http/openapi.go b/http/openapi.go
index 850ce081f4..b217036182 100644
--- a/http/openapi.go
+++ b/http/openapi.go
@@ -111,7 +111,7 @@ func NewOpenAPISpec() (*openapi3.T, error) {
Servers: openapi3.Servers{
&openapi3.Server{
Description: "Local DefraDB instance",
- URL: "http://localhost:9181/api/v0",
+ URL: "/api/v0",
},
},
ExternalDocs: &openapi3.ExternalDocs{
From ad2fcaeb97ef7a8eaddae48b3a7b9e0bf51b181c Mon Sep 17 00:00:00 2001
From: Shahzad Lone
Date: Mon, 25 Nov 2024 14:13:42 -0500
Subject: [PATCH 32/47] ci: Fix the gql mutation running in all tests (#3267)
## Relevant issue(s)
Resolves #3266
## Description
- Remove the overwriting
- Improve logging
---
tests/integration/utils.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index dfd3096bd3..bf3daffff3 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -2251,7 +2251,7 @@ func skipIfClientTypeUnsupported(
}
if len(filteredClients) == 0 {
- t.Skipf("test does not support any given client type. Type: %v", supportedClientTypes)
+ t.Skipf("test does not support any given client type. Supported Type: %v", supportedClientTypes.Value())
}
return filteredClients
@@ -2292,7 +2292,7 @@ func skipIfDatabaseTypeUnsupported(
}
if len(filteredDatabases) == 0 {
- t.Skipf("test does not support any given database type. Type: %v", filteredDatabases)
+ t.Skipf("test does not support any given database type. Supported Type: %v", supportedDatabaseTypes.Value())
}
return filteredDatabases
From e2c657e52932d2c3338fd08383f0528a51ac93a1 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 25 Nov 2024 14:41:27 -0500
Subject: [PATCH 33/47] bot: Update dependencies (bulk dependabot PRs)
25-11-2024 (#3273)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3272 bot: Bump @typescript-eslint/eslint-plugin from 8.14.0 to 8.15.0
in /playground
#3271 bot: Bump typescript from 5.6.3 to 5.7.2 in /playground
#3261 bot: Bump cosmossdk.io/math from 1.3.0 to 1.4.0
#3251 bot: Bump github.com/ipfs/boxo from 0.24.2 to 0.24.3
#3250 bot: Bump github.com/libp2p/go-libp2p-kad-dht from 0.27.0 to
0.28.1
#3249 bot: Bump golang.org/x/crypto from 0.28.0 to 0.29.0
⚠️ The following PRs were resolved manually due to merge conflicts:
#3270 bot: Bump @typescript-eslint/parser from 8.14.0 to 8.15.0 in
/playground
#3248 bot: Bump eslint from 9.14.0 to 9.15.0 in /playground
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
go.mod | 24 +-
go.sum | 60 +-
playground/package-lock.json | 1300 +++++++++++++---------------------
playground/package.json | 8 +-
4 files changed, 537 insertions(+), 855 deletions(-)
diff --git a/go.mod b/go.mod
index 8d2c2715f2..ac7d480c08 100644
--- a/go.mod
+++ b/go.mod
@@ -19,7 +19,7 @@ require (
github.com/go-errors/errors v1.5.1
github.com/gofrs/uuid/v5 v5.3.0
github.com/iancoleman/strcase v0.3.0
- github.com/ipfs/boxo v0.24.2
+ github.com/ipfs/boxo v0.24.3
github.com/ipfs/go-block-format v0.2.0
github.com/ipfs/go-cid v0.4.1
github.com/ipfs/go-datastore v0.6.0
@@ -34,7 +34,7 @@ require (
github.com/lestrrat-go/jwx/v2 v2.1.2
github.com/libp2p/go-libp2p v0.37.0
github.com/libp2p/go-libp2p-gostream v0.6.0
- github.com/libp2p/go-libp2p-kad-dht v0.27.0
+ github.com/libp2p/go-libp2p-kad-dht v0.28.1
github.com/libp2p/go-libp2p-pubsub v0.12.0
github.com/libp2p/go-libp2p-record v0.2.0
github.com/mr-tron/base58 v1.2.0
@@ -62,7 +62,7 @@ require (
go.opentelemetry.io/otel/metric v1.32.0
go.opentelemetry.io/otel/sdk/metric v1.32.0
go.uber.org/zap v1.27.0
- golang.org/x/crypto v0.28.0
+ golang.org/x/crypto v0.29.0
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c
google.golang.org/grpc v1.67.1
)
@@ -80,7 +80,7 @@ require (
cosmossdk.io/depinject v1.0.0 // indirect
cosmossdk.io/errors v1.0.1 // indirect
cosmossdk.io/log v1.4.1 // indirect
- cosmossdk.io/math v1.3.0 // indirect
+ cosmossdk.io/math v1.4.0 // indirect
cosmossdk.io/store v1.1.1 // indirect
cosmossdk.io/x/circuit v0.1.0 // indirect
cosmossdk.io/x/evidence v0.1.0 // indirect
@@ -151,7 +151,7 @@ require (
github.com/flynn/noise v1.1.0 // indirect
github.com/francoispqt/gojay v1.2.13 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
- github.com/gabriel-vasile/mimetype v1.4.4 // indirect
+ github.com/gabriel-vasile/mimetype v1.4.6 // indirect
github.com/getsentry/sentry-go v0.27.0 // indirect
github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214 // indirect
github.com/go-kit/kit v0.12.0 // indirect
@@ -190,7 +190,7 @@ require (
github.com/gorilla/websocket v1.5.3 // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect
- github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect
+ github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
@@ -351,7 +351,7 @@ require (
go.etcd.io/bbolt v1.3.10 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
- go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect
go.opentelemetry.io/otel v1.32.0 // indirect
go.opentelemetry.io/otel/sdk v1.32.0 // indirect
go.opentelemetry.io/otel/trace v1.32.0 // indirect
@@ -362,17 +362,17 @@ require (
golang.org/x/mod v0.21.0 // indirect
golang.org/x/net v0.30.0 // indirect
golang.org/x/oauth2 v0.23.0 // indirect
- golang.org/x/sync v0.8.0 // indirect
+ golang.org/x/sync v0.9.0 // indirect
golang.org/x/sys v0.27.0 // indirect
- golang.org/x/term v0.25.0 // indirect
- golang.org/x/text v0.19.0 // indirect
+ golang.org/x/term v0.26.0 // indirect
+ golang.org/x/text v0.20.0 // indirect
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.26.0 // indirect
gonum.org/v1/gonum v0.15.0 // indirect
google.golang.org/api v0.171.0 // indirect
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect
- google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142 // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 // indirect
google.golang.org/protobuf v1.35.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
diff --git a/go.sum b/go.sum
index 4b820aa582..f4c2388f9e 100644
--- a/go.sum
+++ b/go.sum
@@ -202,8 +202,8 @@ cosmossdk.io/errors v1.0.1 h1:bzu+Kcr0kS/1DuPBtUFdWjzLqyUuCiyHjyJB6srBV/0=
cosmossdk.io/errors v1.0.1/go.mod h1:MeelVSZThMi4bEakzhhhE/CKqVv3nOJDA25bIqRDu/U=
cosmossdk.io/log v1.4.1 h1:wKdjfDRbDyZRuWa8M+9nuvpVYxrEOwbD/CA8hvhU8QM=
cosmossdk.io/log v1.4.1/go.mod h1:k08v0Pyq+gCP6phvdI6RCGhLf/r425UT6Rk/m+o74rU=
-cosmossdk.io/math v1.3.0 h1:RC+jryuKeytIiictDslBP9i1fhkVm6ZDmZEoNP316zE=
-cosmossdk.io/math v1.3.0/go.mod h1:vnRTxewy+M7BtXBNFybkuhSH4WfedVAAnERHgVFhp3k=
+cosmossdk.io/math v1.4.0 h1:XbgExXFnXmF/CccPPEto40gOO7FpWu9yWNAZPN3nkNQ=
+cosmossdk.io/math v1.4.0/go.mod h1:O5PkD4apz2jZs4zqFdTr16e1dcaQCc5z6lkEnrrppuk=
cosmossdk.io/store v1.1.1 h1:NA3PioJtWDVU7cHHeyvdva5J/ggyLDkyH0hGHl2804Y=
cosmossdk.io/store v1.1.1/go.mod h1:8DwVTz83/2PSI366FERGbWSH7hL6sB7HbYp8bqksNwM=
cosmossdk.io/x/circuit v0.1.0 h1:IAej8aRYeuOMritczqTlljbUVHq1E85CpBqaCTwYgXs=
@@ -262,8 +262,8 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
-github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs=
-github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
+github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b h1:mimo19zliBX/vSQ6PWWSL9lK8qwHozUj03+zLoEB8O0=
+github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4vN3H1qMT+8wDmzjAcDONcgo2/SZ/TyfdUOFs=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
@@ -419,8 +419,8 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:ma
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668 h1:ZFUue+PNxmHlu7pYv+IYMtqlaO/0VwaGEqKepZf9JpA=
-github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE=
+github.com/crackcomm/go-gitignore v0.0.0-20241020182519-7843d2ba8fdf h1:dwGgBWn84wUS1pVikGiruW+x5XM4amhjaZO20vCjay4=
+github.com/crackcomm/go-gitignore v0.0.0-20241020182519-7843d2ba8fdf/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0=
@@ -510,8 +510,8 @@ github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nos
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
-github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I=
-github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s=
+github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
+github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
github.com/getkin/kin-openapi v0.128.0 h1:jqq3D9vC9pPq1dGcOCv7yOp1DaEe7c/T1vzcLbITSp4=
github.com/getkin/kin-openapi v0.128.0/go.mod h1:OZrfXzUfGrNbsKj+xmFBx6E5c6yH3At/tAKSc2UszXM=
github.com/getsentry/sentry-go v0.27.0 h1:Pv98CIbtB3LkMWmXi4Joa5OOcwbmnX88sF5qbK3r3Ps=
@@ -758,8 +758,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpg
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I=
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU=
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0=
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
@@ -856,8 +856,8 @@ github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso=
github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA=
github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs=
github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0=
-github.com/ipfs/boxo v0.24.2 h1:feLM6DY6CNI0uSG3TvP/Hv4PdM/fsekjqSCqKtifF0E=
-github.com/ipfs/boxo v0.24.2/go.mod h1:Dt3TJjMZtF2QksMv2LC8pQlG9VQUiSV2DsHQzvDiroo=
+github.com/ipfs/boxo v0.24.3 h1:gldDPOWdM3Rz0v5LkVLtZu7A7gFNvAlWcmxhCqlHR3c=
+github.com/ipfs/boxo v0.24.3/go.mod h1:h0DRzOY1IBFDHp6KNvrJLMFdSXTYID0Zf+q7X05JsNg=
github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA=
github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU=
github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs=
@@ -1001,8 +1001,8 @@ github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl9
github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8=
github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU=
github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA=
-github.com/libp2p/go-libp2p-kad-dht v0.27.0 h1:1Ea32tVTPiAfaLpPMbaBWFJgbsi/JpMqC2YBuFdf32o=
-github.com/libp2p/go-libp2p-kad-dht v0.27.0/go.mod h1:ixhjLuzaXSGtWsKsXTj7erySNuVC4UP7NO015cRrF14=
+github.com/libp2p/go-libp2p-kad-dht v0.28.1 h1:DVTfzG8Ybn88g9RycIq47evWCRss5f0Wm8iWtpwyHso=
+github.com/libp2p/go-libp2p-kad-dht v0.28.1/go.mod h1:0wHURlSFdAC42+wF7GEmpLoARw8JuS8do2guCtc/Y/w=
github.com/libp2p/go-libp2p-kbucket v0.6.4 h1:OjfiYxU42TKQSB8t8WYd8MKhYhMJeO2If+NiuKfb6iQ=
github.com/libp2p/go-libp2p-kbucket v0.6.4/go.mod h1:jp6w82sczYaBsAypt5ayACcRJi0lgsba7o4TzJKEfWA=
github.com/libp2p/go-libp2p-pubsub v0.12.0 h1:PENNZjSfk8KYxANRlpipdS7+BfLmOl3L2E/6vSNjbdI=
@@ -1339,8 +1339,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
-github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA=
-github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA=
+github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc=
+github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU=
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0=
github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM=
@@ -1536,8 +1536,8 @@ go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 h1:9l89oX4ba9kHbBol3Xin3leYJ+252h0zszDtBwyKe2A=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0/go.mod h1:XLZfZboOJWHNKUv7eH0inh0E9VV6eWDFB/9yJyTLPp0=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 h1:UP6IpuHFkUgOQL9FFQFrZ+5LiwhhYRbi7VZSIx6Nj5s=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0/go.mod h1:qxuZLtbq5QDtdeSHsS7bcf6EH6uO6jUAgk764zd3rhM=
go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U=
go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg=
go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M=
@@ -1601,8 +1601,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE=
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
-golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
-golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
+golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
+golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -1760,8 +1760,8 @@ golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
-golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
+golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -1880,8 +1880,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
-golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
-golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
+golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU=
+golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -1896,8 +1896,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
-golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
+golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
+golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -2157,10 +2157,10 @@ google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz
google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s=
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY=
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo=
-google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 h1:wKguEg1hsxI2/L3hUYrpo1RVi48K+uTyzKqprwLXsb8=
-google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142 h1:e7S5W7MGGLaSu8j3YjdezkZ+m1/Nm0uRVRMEMGk26Xs=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
+google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 h1:T6rh4haD3GVYsgEfWExoCZA2o2FmbNyKpTuAxbEFPTg=
+google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:wp2WsuBYj6j8wUdo3ToZsdxxixbvQNAHqVJrTgi5E5M=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 h1:QCqS/PdaHTSWGvupk2F/ehwHtGc0/GYkT+3GAcR1CCc=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index f14d580e2f..5125c66756 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -18,13 +18,13 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.13.0",
- "@typescript-eslint/parser": "^8.13.0",
+ "@typescript-eslint/eslint-plugin": "^8.15.0",
+ "@typescript-eslint/parser": "^8.15.0",
"@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.14.0",
+ "eslint": "^9.15.0",
"eslint-plugin-react-hooks": "^5.0.0",
"eslint-plugin-react-refresh": "^0.4.14",
- "typescript": "^5.6.3",
+ "typescript": "^5.7.2",
"vite": "^5.4.11"
}
},
@@ -497,9 +497,9 @@
}
},
"node_modules/@eslint/config-array": {
- "version": "0.18.0",
- "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.18.0.tgz",
- "integrity": "sha512-fTxvnS1sRMu3+JjXwJG0j/i4RT9u4qJ+lqS/yCGap4lH4zZGzQ7tu+xZqQmcMZq5OBZDL4QRxQzRjkWcGt8IVw==",
+ "version": "0.19.0",
+ "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.0.tgz",
+ "integrity": "sha512-zdHg2FPIFNKPdcHWtiNT+jEFCHYVplAXRDlQDyqy0zGx/q2parwh7brGJSiTxRk/TSMkbM//zt/f5CHgyTyaSQ==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
@@ -536,9 +536,9 @@
}
},
"node_modules/@eslint/core": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.7.0.tgz",
- "integrity": "sha512-xp5Jirz5DyPYlPiKat8jaq0EmYvDXKKpzTbxXMpT9eqlRJkRKIz9AGMdlvYjih+im+QlhWrpvVjl8IPC/lHlUw==",
+ "version": "0.9.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.9.0.tgz",
+ "integrity": "sha512-7ATR9F0e4W85D/0w7cU0SNj7qkAexMG+bAHEZOjo9akvGuhHE2m7umzWzfnpa0XAg5Kxc1BWmtPMV67jJ+9VUg==",
"dev": true,
"license": "Apache-2.0",
"engines": {
@@ -546,9 +546,9 @@
}
},
"node_modules/@eslint/eslintrc": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.1.0.tgz",
- "integrity": "sha512-4Bfj15dVJdoy3RfZmmo86RK1Fwzn6SstsvK9JS+BaVKqC6QQQQyXekNaC+g+LKNgkQ+2VhGAzm6hO40AhMR3zQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz",
+ "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -594,9 +594,9 @@
}
},
"node_modules/@eslint/js": {
- "version": "9.14.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.14.0.tgz",
- "integrity": "sha512-pFoEtFWCPyDOl+C6Ift+wC7Ro89otjigCf5vcuWqWgqNSQbRrpjSvdeE6ofLz4dHmyxD5f7gIdGT4+p36L6Twg==",
+ "version": "9.15.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.15.0.tgz",
+ "integrity": "sha512-tMTqrY+EzbXmKJR5ToI8lxu7jaN5EdmrBFJpQk5JmSlyLsx6o4t27r883K5xsLuCYCpfKBCGswMSWXsM+jB7lg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -614,9 +614,9 @@
}
},
"node_modules/@eslint/plugin-kit": {
- "version": "0.2.2",
- "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.2.tgz",
- "integrity": "sha512-CXtq5nR4Su+2I47WPOlWud98Y5Lv8Kyxp2ukhgFx/eW6Blm18VXJO5WuQylPugRo8nbluoi6GvvxBLqHcvqUUw==",
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.3.tgz",
+ "integrity": "sha512-2b/g5hRmpbb1o4GnTZax9N9m0FXzz9OV42ZzI4rDDMDuHUqigAiQCEWChBWCY4ztAGVRjoWT19v0yMmc5/L5kA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
@@ -694,16 +694,16 @@
}
},
"node_modules/@graphiql/toolkit": {
- "version": "0.11.0",
- "resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.11.0.tgz",
- "integrity": "sha512-VqqQrvkMwgbGhj7J5907yfuAy5B1OCgOTIPi7gtRneG1jYmnqvSxi8Yrmu0B8G8fZxkxKVsYi8dE8EtsOBrTGQ==",
+ "version": "0.11.1",
+ "resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.11.1.tgz",
+ "integrity": "sha512-G02te70/oYYna5UhbH6TXwNxeQyWa+ChlPonUrKwC5Ot9ItraGJ9yUU4sS+YRaA+EvkzNoHG79XcW2k1QaAMiw==",
"license": "MIT",
"dependencies": {
"@n1ru4l/push-pull-async-iterable-iterator": "^3.1.0",
"meros": "^1.1.4"
},
"peerDependencies": {
- "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0-alpha.2",
+ "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0",
"graphql-ws": ">= 4.5.0"
},
"peerDependenciesMeta": {
@@ -1394,9 +1394,9 @@
}
},
"node_modules/@radix-ui/react-tooltip": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.3.tgz",
- "integrity": "sha512-Z4w1FIS0BqVFI2c1jZvb/uDVJijJjJ2ZMuPV81oVgTZ7g3BZxobplnMVvXtFWgtozdvYJ+MFWtwkM5S2HnAong==",
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.4.tgz",
+ "integrity": "sha512-QpObUH/ZlpaO4YgHSaYzrLO2VuO+ZBFFgGzjMUPwtiYnAzzNNDPJeEGRrT7qNOrWm/Jr08M1vlp+vTHtnSQ0Uw==",
"license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.0",
@@ -1559,9 +1559,9 @@
"license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.25.0.tgz",
- "integrity": "sha512-CC/ZqFZwlAIbU1wUPisHyV/XRc5RydFrNLtgl3dGYskdwPZdt4HERtKm50a/+DtTlKeCq9IXFEWR+P6blwjqBA==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.27.4.tgz",
+ "integrity": "sha512-2Y3JT6f5MrQkICUyRVCw4oa0sutfAsgaSsb0Lmmy1Wi2y7X5vT9Euqw4gOsCyy0YfKURBg35nhUKZS4mDcfULw==",
"cpu": [
"arm"
],
@@ -1573,9 +1573,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.25.0.tgz",
- "integrity": "sha512-/Y76tmLGUJqVBXXCfVS8Q8FJqYGhgH4wl4qTA24E9v/IJM0XvJCGQVSW1QZ4J+VURO9h8YCa28sTFacZXwK7Rg==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.27.4.tgz",
+ "integrity": "sha512-wzKRQXISyi9UdCVRqEd0H4cMpzvHYt1f/C3CoIjES6cG++RHKhrBj2+29nPF0IB5kpy9MS71vs07fvrNGAl/iA==",
"cpu": [
"arm64"
],
@@ -1587,9 +1587,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.25.0.tgz",
- "integrity": "sha512-YVT6L3UrKTlC0FpCZd0MGA7NVdp7YNaEqkENbWQ7AOVOqd/7VzyHpgIpc1mIaxRAo1ZsJRH45fq8j4N63I/vvg==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.27.4.tgz",
+ "integrity": "sha512-PlNiRQapift4LNS8DPUHuDX/IdXiLjf8mc5vdEmUR0fF/pyy2qWwzdLjB+iZquGr8LuN4LnUoSEvKRwjSVYz3Q==",
"cpu": [
"arm64"
],
@@ -1601,9 +1601,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.25.0.tgz",
- "integrity": "sha512-ZRL+gexs3+ZmmWmGKEU43Bdn67kWnMeWXLFhcVv5Un8FQcx38yulHBA7XR2+KQdYIOtD0yZDWBCudmfj6lQJoA==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.27.4.tgz",
+ "integrity": "sha512-o9bH2dbdgBDJaXWJCDTNDYa171ACUdzpxSZt+u/AAeQ20Nk5x+IhA+zsGmrQtpkLiumRJEYef68gcpn2ooXhSQ==",
"cpu": [
"x64"
],
@@ -1615,9 +1615,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.25.0.tgz",
- "integrity": "sha512-xpEIXhiP27EAylEpreCozozsxWQ2TJbOLSivGfXhU4G1TBVEYtUPi2pOZBnvGXHyOdLAUUhPnJzH3ah5cqF01g==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.27.4.tgz",
+ "integrity": "sha512-NBI2/i2hT9Q+HySSHTBh52da7isru4aAAo6qC3I7QFVsuhxi2gM8t/EI9EVcILiHLj1vfi+VGGPaLOUENn7pmw==",
"cpu": [
"arm64"
],
@@ -1629,9 +1629,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.25.0.tgz",
- "integrity": "sha512-sC5FsmZGlJv5dOcURrsnIK7ngc3Kirnx3as2XU9uER+zjfyqIjdcMVgzy4cOawhsssqzoAX19qmxgJ8a14Qrqw==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.27.4.tgz",
+ "integrity": "sha512-wYcC5ycW2zvqtDYrE7deary2P2UFmSh85PUpAx+dwTCO9uw3sgzD6Gv9n5X4vLaQKsrfTSZZ7Z7uynQozPVvWA==",
"cpu": [
"x64"
],
@@ -1643,9 +1643,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.25.0.tgz",
- "integrity": "sha512-uD/dbLSs1BEPzg564TpRAQ/YvTnCds2XxyOndAO8nJhaQcqQGFgv/DAVko/ZHap3boCvxnzYMa3mTkV/B/3SWA==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.27.4.tgz",
+ "integrity": "sha512-9OwUnK/xKw6DyRlgx8UizeqRFOfi9mf5TYCw1uolDaJSbUmBxP85DE6T4ouCMoN6pXw8ZoTeZCSEfSaYo+/s1w==",
"cpu": [
"arm"
],
@@ -1657,9 +1657,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.25.0.tgz",
- "integrity": "sha512-ZVt/XkrDlQWegDWrwyC3l0OfAF7yeJUF4fq5RMS07YM72BlSfn2fQQ6lPyBNjt+YbczMguPiJoCfaQC2dnflpQ==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.27.4.tgz",
+ "integrity": "sha512-Vgdo4fpuphS9V24WOV+KwkCVJ72u7idTgQaBoLRD0UxBAWTF9GWurJO9YD9yh00BzbkhpeXtm6na+MvJU7Z73A==",
"cpu": [
"arm"
],
@@ -1671,9 +1671,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.25.0.tgz",
- "integrity": "sha512-qboZ+T0gHAW2kkSDPHxu7quaFaaBlynODXpBVnPxUgvWYaE84xgCKAPEYE+fSMd3Zv5PyFZR+L0tCdYCMAtG0A==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.27.4.tgz",
+ "integrity": "sha512-pleyNgyd1kkBkw2kOqlBx+0atfIIkkExOTiifoODo6qKDSpnc6WzUY5RhHdmTdIJXBdSnh6JknnYTtmQyobrVg==",
"cpu": [
"arm64"
],
@@ -1685,9 +1685,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.25.0.tgz",
- "integrity": "sha512-ndWTSEmAaKr88dBuogGH2NZaxe7u2rDoArsejNslugHZ+r44NfWiwjzizVS1nUOHo+n1Z6qV3X60rqE/HlISgw==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.27.4.tgz",
+ "integrity": "sha512-caluiUXvUuVyCHr5DxL8ohaaFFzPGmgmMvwmqAITMpV/Q+tPoaHZ/PWa3t8B2WyoRcIIuu1hkaW5KkeTDNSnMA==",
"cpu": [
"arm64"
],
@@ -1699,9 +1699,9 @@
]
},
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.25.0.tgz",
- "integrity": "sha512-BVSQvVa2v5hKwJSy6X7W1fjDex6yZnNKy3Kx1JGimccHft6HV0THTwNtC2zawtNXKUu+S5CjXslilYdKBAadzA==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.27.4.tgz",
+ "integrity": "sha512-FScrpHrO60hARyHh7s1zHE97u0KlT/RECzCKAdmI+LEoC1eDh/RDji9JgFqyO+wPDb86Oa/sXkily1+oi4FzJQ==",
"cpu": [
"ppc64"
],
@@ -1713,9 +1713,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.25.0.tgz",
- "integrity": "sha512-G4hTREQrIdeV0PE2JruzI+vXdRnaK1pg64hemHq2v5fhv8C7WjVaeXc9P5i4Q5UC06d/L+zA0mszYIKl+wY8oA==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.27.4.tgz",
+ "integrity": "sha512-qyyprhyGb7+RBfMPeww9FlHwKkCXdKHeGgSqmIXw9VSUtvyFZ6WZRtnxgbuz76FK7LyoN8t/eINRbPUcvXB5fw==",
"cpu": [
"riscv64"
],
@@ -1727,9 +1727,9 @@
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.25.0.tgz",
- "integrity": "sha512-9T/w0kQ+upxdkFL9zPVB6zy9vWW1deA3g8IauJxojN4bnz5FwSsUAD034KpXIVX5j5p/rn6XqumBMxfRkcHapQ==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.27.4.tgz",
+ "integrity": "sha512-PFz+y2kb6tbh7m3A7nA9++eInGcDVZUACulf/KzDtovvdTizHpZaJty7Gp0lFwSQcrnebHOqxF1MaKZd7psVRg==",
"cpu": [
"s390x"
],
@@ -1741,9 +1741,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.25.0.tgz",
- "integrity": "sha512-ThcnU0EcMDn+J4B9LD++OgBYxZusuA7iemIIiz5yzEcFg04VZFzdFjuwPdlURmYPZw+fgVrFzj4CA64jSTG4Ig==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.27.4.tgz",
+ "integrity": "sha512-Ni8mMtfo+o/G7DVtweXXV/Ol2TFf63KYjTtoZ5f078AUgJTmaIJnj4JFU7TK/9SVWTaSJGxPi5zMDgK4w+Ez7Q==",
"cpu": [
"x64"
],
@@ -1755,9 +1755,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.25.0.tgz",
- "integrity": "sha512-zx71aY2oQxGxAT1JShfhNG79PnjYhMC6voAjzpu/xmMjDnKNf6Nl/xv7YaB/9SIa9jDYf8RBPWEnjcdlhlv1rQ==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.27.4.tgz",
+ "integrity": "sha512-5AeeAF1PB9TUzD+3cROzFTnAJAcVUGLuR8ng0E0WXGkYhp6RD6L+6szYVX+64Rs0r72019KHZS1ka1q+zU/wUw==",
"cpu": [
"x64"
],
@@ -1769,9 +1769,9 @@
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.25.0.tgz",
- "integrity": "sha512-JT8tcjNocMs4CylWY/CxVLnv8e1lE7ff1fi6kbGocWwxDq9pj30IJ28Peb+Y8yiPNSF28oad42ApJB8oUkwGww==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.27.4.tgz",
+ "integrity": "sha512-yOpVsA4K5qVwu2CaS3hHxluWIK5HQTjNV4tWjQXluMiiiu4pJj4BN98CvxohNCpcjMeTXk/ZMJBRbgRg8HBB6A==",
"cpu": [
"arm64"
],
@@ -1783,9 +1783,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.25.0.tgz",
- "integrity": "sha512-dRLjLsO3dNOfSN6tjyVlG+Msm4IiZnGkuZ7G5NmpzwF9oOc582FZG05+UdfTbz5Jd4buK/wMb6UeHFhG18+OEg==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.27.4.tgz",
+ "integrity": "sha512-KtwEJOaHAVJlxV92rNYiG9JQwQAdhBlrjNRp7P9L8Cb4Rer3in+0A+IPhJC9y68WAi9H0sX4AiG2NTsVlmqJeQ==",
"cpu": [
"ia32"
],
@@ -1797,9 +1797,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.25.0.tgz",
- "integrity": "sha512-/RqrIFtLB926frMhZD0a5oDa4eFIbyNEwLLloMTEjmqfwZWXywwVVOVmwTsuyhC9HKkVEZcOOi+KV4U9wmOdlg==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.27.4.tgz",
+ "integrity": "sha512-3j4jx1TppORdTAoBJRd+/wJRGCPC0ETWkXOecJ6PPZLj6SptXkrXcNqdj0oclbKML6FkQltdz7bBA3rUSirZug==",
"cpu": [
"x64"
],
@@ -1818,13 +1818,13 @@
"license": "Apache-2.0"
},
"node_modules/@swagger-api/apidom-ast": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-1.0.0-alpha.10.tgz",
- "integrity": "sha512-f4Y9t1oBlnsvMoLPCykzn5LRrmARiaPzorocQkMFTkYUPb7RKA4zCuWi67hH4iDVsVvkPutgew19XyJiI3OF9Q==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-1.0.0-beta.3.tgz",
+ "integrity": "sha512-JOXGfadL3ucJH+MY9BDT7dJOwFy0jX3XaAY/CWR92EnliEYfaEzZvH08FGnyqyYHcfT8T0DLKna5CWUHaskZuw==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1832,14 +1832,14 @@
}
},
"node_modules/@swagger-api/apidom-core": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-1.0.0-alpha.10.tgz",
- "integrity": "sha512-4uXIN8cLigD1SZUDhmrEwW+1zbrB6bbD9Hlpo/BF74t/Nh4ZoEOUXv1oR/8QXB9AsIkdO65FdDHyaPzyGbjMiQ==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-1.0.0-beta.3.tgz",
+ "integrity": "sha512-oRcv3PgwSAvfxvai0afGt/rC2Kk9Zs2ArLPZ6FnVCv/GSnMsuvIQJc5UH29P9eGFcLJIZpQtEHnU6W+u8u0zAA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ast": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"minim": "~0.23.8",
"ramda": "~0.30.0",
@@ -1849,39 +1849,39 @@
}
},
"node_modules/@swagger-api/apidom-error": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-1.0.0-alpha.10.tgz",
- "integrity": "sha512-ydHNOKTdp9jaeW2yBvdZazXNCVFPbzC2Dy3dtDWU3MwUtSryoefT9OUQFWL7NxzChFRneNhBEcVl4NRocitXeA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-1.0.0-beta.3.tgz",
+ "integrity": "sha512-cW1tzehphuxA0uM+1m4/0G1d/WjDQyF+RL9D9t1mfhuVxr8AorgYUgY+bjg0pkLfiSTwjrDiuTbYM+jZwrHx8w==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7"
}
},
"node_modules/@swagger-api/apidom-json-pointer": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-1.0.0-alpha.10.tgz",
- "integrity": "sha512-Xo0v4Jxp0ZiAm+OOL2PSLyjiw5OAkCMxI0nN9+vOw1/mfXcC+tdb30QQ9WNtF7O9LExjznfFID/NnDEYqBRDwA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-1.0.0-beta.3.tgz",
+ "integrity": "sha512-r6Gvbj2XDcK1wIULoclHcGYPAVXeUkj5ECRslB/Zle/fOU0Jb8s4mmFARyQE/DT+fQggXn8nUJBda3NWPK4GcA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-ns-api-design-systems": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-1.0.0-alpha.10.tgz",
- "integrity": "sha512-0i4KKNboHi7F8Nra2WNHDl9aOndyTcfKiBfdzSw3j+H5wYAHldeKg7zppqj5rVfwZL9pB5r7eFYZlowwGtmlLg==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-1.0.0-beta.3.tgz",
+ "integrity": "sha512-x+NiLR0xZ0VB8AMJr7ii+6A27AP2CGjLyPQr6JutnifXG+vpkjbgXCPyz2qlmrvuLIkBJIE2lBuyX3+qQXmgCw==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1889,15 +1889,15 @@
}
},
"node_modules/@swagger-api/apidom-ns-asyncapi-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-d1LLJ/9LQaT/4jJudFhy3xhpjdTA3pVwBBUqXGPgW2Fp21auTYJMBM9J91wvVUXMUQiVg95DohkCb6TNUYzqLw==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-9E4/kTf/OzV3vgRjZOB+6TRqQX2ljirD+UBQ8QPSJKBUTtq8+F7U9a8Z9AGYrKCQUMgbge5JMYCqHmOmrJKVUA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-json-schema-draft-7": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-json-schema-draft-7": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1905,14 +1905,14 @@
}
},
"node_modules/@swagger-api/apidom-ns-json-schema-draft-4": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-1.0.0-alpha.10.tgz",
- "integrity": "sha512-sNj4pAmxEfFYIqRcP9A7/gjNMaa7nu1pWT6gTMXtYROyo4XrChc3wit8F76WJEFIiEPLrPs2SrnnA5GIHM7EnA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-1.0.0-beta.3.tgz",
+ "integrity": "sha512-Sc/ywYCHFIMwhZX0Yo+OTmHUvszv3JE3xsvpd18nu7rH+jNyA10oUdTMgnRsTNMnL7siVO+32OKQkdLOSKsEHA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ast": "^1.0.0-beta.3",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1920,16 +1920,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-json-schema-draft-6": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-1.0.0-alpha.10.tgz",
- "integrity": "sha512-Okwi0ikBSIBhQwMvsoe1+8Ff55cwwp9hu88N/sTDBxI7lyX0xCGAlSrJ9tx4Z/uOn5X+IL9HCRuNlbFt4Bvi2w==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-1.0.0-beta.3.tgz",
+ "integrity": "sha512-UuGfaJfWzsTCTEyxyKtM86SNdS4EsWB/+j8JWw88h7nFK59YNDmnuXk9PpFyuccpIAHnDq7UJypD3lRvNkJdhQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1937,16 +1937,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-json-schema-draft-7": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-1.0.0-alpha.10.tgz",
- "integrity": "sha512-Y5p+iA1k8HR5d5cS1jtoADPKJLVg5czaHrs39UcMoMPhINqgqKGd2sYKtX7DnglcLARXe06pv0Qs9ERwCd5ayQ==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-1.0.0-beta.3.tgz",
+ "integrity": "sha512-7Snaf8/qZ3Q9xnjEXo2cJ8L4pvDbHA+k/j7rqbY4o3h5EeMy93ClVUwoeJ2y/JWax/V1DWTyYMhq+9dXlcIUYQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-json-schema-draft-6": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-json-schema-draft-6": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1954,16 +1954,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-openapi-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-hVhpXIG5CXSqeLo7+d5VwN8b9X0BM8yMZCEIxVAu5050GlcHC3CeJVpy+2DEBkbvR9tzc2HfPGMpWyQpgnimhQ==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-eBNUkQdIDE2fWUXdIeRpN9OMxwfxU2WJFMRHst204Doanh8iJVp3Mz/+z9agHJ6Pkqth2XTXA0EDd1QiI37t+g==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1971,15 +1971,15 @@
}
},
"node_modules/@swagger-api/apidom-ns-openapi-3-0": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-1.0.0-alpha.10.tgz",
- "integrity": "sha512-zF2tPojJBGmQ/GuX+QJ0BhBWmnC+ET8Zah9utKpYWFFjqG/Wl6YzWpyrEflXpfGFzDFgoo+R+/3QvzScbPssqg==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-1.0.0-beta.3.tgz",
+ "integrity": "sha512-wKMdk5nplkT2PA1sRFZ2WOLmb7xi9++T6UnCeivmV+sy5NtUPpwkJLUWWIlZdZLyiGKmhZQ1gVvhsbyWRoAVPw==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-json-schema-draft-4": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -1987,16 +1987,16 @@
}
},
"node_modules/@swagger-api/apidom-ns-openapi-3-1": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-1.0.0-alpha.10.tgz",
- "integrity": "sha512-/7o+/Z2LelLcOdDSeY8O467Tjmr4yp0c8T4l13+zoQlaJFCzoeJqUUzP/dyqLPxqSeSMOez7uXnYpii6F8uYcA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-1.0.0-beta.3.tgz",
+ "integrity": "sha512-XltfOZNTjrBvrWx1hPU6pHn7lHKKY9jXmiQzojX/jhMjZ6Kp6TLGjMMU3SmEUPU6sTaXKUeO5UUTxe2v6VmqMA==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-json-pointer": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ast": "^1.0.0-beta.3",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-json-pointer": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -2004,15 +2004,15 @@
}
},
"node_modules/@swagger-api/apidom-ns-workflows-1": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-1.0.0-alpha.10.tgz",
- "integrity": "sha512-tem8H3DHvQNxUqbiLmepccjAyFffS41Z90ibugsw17xzCNIIr6kDwlhiSSGkl52C+IBqoUlE6kdV0afPr2WuUA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-1.0.0-beta.3.tgz",
+ "integrity": "sha512-+7i8CZAC+TypSYuxTtwXH2qIyQC1ATn8r+1pW4NWCs4F2Yr4K2gGG4ZmOE6ckNa+Q53yyx+Spt7xhLfZDJZp/w==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
@@ -2020,243 +2020,243 @@
}
},
"node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-json": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-1.0.0-alpha.10.tgz",
- "integrity": "sha512-8yuL2w3G4zdBxyITLHKSFRwpgl8Rp4/bCR2GTznYKr5wYuN9RVSKAp2sGtuWHnynnpspodswu3AI1BVCLKBj1A==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-1.0.0-beta.3.tgz",
+ "integrity": "sha512-IpnxjLDVdRaY+ewNW8zbiMzYu5eKifpioFPGDlHc2MoTW6zqo5UKViZKL4MbsncySWBj7+URvTIFYjip3TvkKg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-yaml": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-1.0.0-alpha.10.tgz",
- "integrity": "sha512-I+/tRdC6CK0GfjZgOaTfpjtehkFW7i1A1ixFOPtrwKA8v3oZ2eUW7dIjDMMC0yTt67j7enHlGTw6o2rZZGnjpA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-1.0.0-beta.3.tgz",
+ "integrity": "sha512-Pvj+4OMIzKMx77Ulbp/CdWGAQhor88q5BJlY3cuSNd2Oth+mfe6r7NUXWVSpG6H9+9Y6YJdnGOzQ1PHWJPOlqA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-api-design-systems": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-asyncapi-json-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-FX4buMibcnz0rsQKMBUrZM8cS1/s0pi3TV9HAsKPQY1mKssyeUEE/nlp6DBbYM6kNCEdq2ALvnPtZVwEJpxS3A==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-Z8xIy3pirwAapLgZ18BqRVua5rh0NsvQNpx+5Bi5yJD+SD6Syk5OqsgFkqN7T/LmyqpivQiYRgItUBaHXuDnxg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-JsPYRsaKCecY8UN2AHuHm6X0WgWfys6ypH8UPYic1n3XUfNPkTSOaUY87Vi04wJmy8pQ1F0wHeESY//Zb37aIA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-Xl9MU1+24ZTDuGzy/mKVPlnMSvgA/lS+AoqwMzxLMuiIsTmnQX3gEdiM+pXmK7rg1KV/k0aLwDLKt3e00CPiXQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-json": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-1.0.0-alpha.10.tgz",
- "integrity": "sha512-CTSgLG33GgC3POxLBCzlXyBBUz+EFGe62VICH012RIYDXHDmcr4dPmfHyj85LVJxLh7regQ+SGL4NwqQSxTY3A==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-1.0.0-beta.3.tgz",
+ "integrity": "sha512-28zQdF8oeaUmNxZNU0De4JUY9jvxiaN+QCJ1GZN9aQ6NQ/eOAuGg+HRuL8+RrSe4STacdi1FCX46jHcMGQeqfg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ast": "^1.0.0-beta.3",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
- "tree-sitter": "=0.20.4",
- "tree-sitter-json": "=0.20.2",
- "web-tree-sitter": "=0.20.3"
+ "tree-sitter": "=0.21.1",
+ "tree-sitter-json": "=0.24.8",
+ "web-tree-sitter": "=0.24.3"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-json-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-YtPu2BansaTpW6MrIRJgZpa9V+MLl/DFqC2tHbGSO+u73PdWndONRgqzAAc5pBWR+u1RNgULrCK6sX7uPiFLVg==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-ufiQMl89sTGf09qlh/QvFLEUs9FH9ZZV4mjz1xIB127rnNbWg/sSGr0WIcJGKoLrioI9orb+7aqIhmSDw/plmw==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-2": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-0": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-1.0.0-alpha.10.tgz",
- "integrity": "sha512-zzZdK+xhj+sVh4z3vZrxdBrDitraD1szJPc3sUC0pukuCz3P7R/u+//6+GLE9UVjUakdbQI2cyKyUOIZX51+/g==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-1.0.0-beta.3.tgz",
+ "integrity": "sha512-yINlDTIZCywuKRsBeJJDmQLV4+r9FaWDezb4omw6xFQnQZQV1tHgIb549OsV6lT70TabLj+HoMYNLQ9/Bm59Yw==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-1": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-1.0.0-alpha.10.tgz",
- "integrity": "sha512-i7HaRnU2kDtvDqM5Yv1sbYZghCeRhiVQEyaIIp59Zhc5SwLS3dSoD/kh0TeuKpaY5Lg0ISIM3SLRDcdaYUsGww==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-1.0.0-beta.3.tgz",
+ "integrity": "sha512-kBZsyNHtp6w41g9N5c+PF4FqoE8vosxgYJEfhQeQs4qXK7T7d8sfjXwcnWRjqlOM4X8dt5R359h58AfwyEF20w==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-QbqCTAvthqhZmFZKf9HBYnVt4kV7konYnauylVFIaE5KAzmZkcb30FtkAwmZfnyW3AURMzZcLfOgJRGHOjYSqA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-K/FRLCuB0UD9Nq/CNqfjkSVfQfzcpA7lJCg6QueZKd0dQJ54dyHFU9AroshutXHTmEjBleoL7V1K3PNh10HiYQ==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-2": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-1.0.0-alpha.10.tgz",
- "integrity": "sha512-ajVOqs8lNta7uXkFtU5k1zDJTjwV6Ki3uS+JwBvjuMHsF/i/WIZOmgI4g1Z3yQ1c0QI4dHJskq4WDyp2qW64aw==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-1.0.0-beta.3.tgz",
+ "integrity": "sha512-EUdpzJnqZqCu2keEyOxlCED/u0oaA05c6dO48XzbdyENONY/etoN5wrEoqxqxOz+1cC+FZWj/cnmsXdFfbJlEg==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-1.0.0-alpha.10.tgz",
- "integrity": "sha512-ljYmbBFWjIcfN+MJr7JFh6NA/fgyu5gXDI6KUrg/sbWTKdUYP4iNLJPw8VLPBXHnExevjZCt1Ni74mmL4ZfyBg==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-1.0.0-beta.3.tgz",
+ "integrity": "sha512-2Q9vmrgTQ4cA5WALGyTLp8tF984R9C7QmDOjGf/ngrTIQLyyrQZ0ZDaXL7RHTmT6K9Lg6axMpKquBNiO+Aff6g==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-workflows-json-1": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-1.0.0-alpha.10.tgz",
- "integrity": "sha512-vd0H5IYX96AIhOLcU9SJnXDD6OV61i00JDDfJcFnf1K2NCB0D0Otk2V2z9LXqe51s3pZ7d/Dz0biDjYMsMKVww==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-1.0.0-beta.3.tgz",
+ "integrity": "sha512-OsKz09YcfQfTbiNZueTLHBrn7umnMjtuN0ZzuNiBs5txaLS196grpzyTiG+4UJ1zIWvjvZmLZEbQqbKZ9qTw8A==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-workflows-1": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-workflows-yaml-1": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-1.0.0-alpha.10.tgz",
- "integrity": "sha512-lH0AiPetMLRDy38gcB6TmQnaKv6p1ePimnT4xqcVSHEnc/FsjMbyOE3x6DUENau2eeWFduAhofE9zvliW6iJaQ==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-1.0.0-beta.3.tgz",
+ "integrity": "sha512-IifK3T6UtqBkIoHOQe6QRGpFU9LFqmJ5T1JzbWnVX+gazoVE+N9ZkFWQfb9pKCaCfAwPVp+vai6bQ2eUsGh4CA==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.10",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-ns-workflows-1": "^1.0.0-beta.3",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0"
}
},
"node_modules/@swagger-api/apidom-parser-adapter-yaml-1-2": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-1.0.0-alpha.10.tgz",
- "integrity": "sha512-mW/W/Q8w4RCw41Y9vggPbsFg+gj0FxKdecVYzZ8TmgyM9oVN6/KZFegUYKlg1HDRAfjceKehE06aLLS5GXEJCA==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-1.0.0-beta.3.tgz",
+ "integrity": "sha512-sSGxnMTNNTqhJBeUOge4Q/5l/7170maoxyrK6J57kRxqkchSAqam73VIBpKa8c/sJ7zhdZI7CZ9aTJe/q7vc7w==",
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-ast": "^1.0.0-alpha.10",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
- "@swagger-api/apidom-error": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-ast": "^1.0.0-beta.3",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
+ "@swagger-api/apidom-error": "^1.0.0-beta.3",
+ "@tree-sitter-grammars/tree-sitter-yaml": "=0.6.1",
"@types/ramda": "~0.30.0",
"ramda": "~0.30.0",
"ramda-adjunct": "^5.0.0",
- "tree-sitter": "=0.20.4",
- "tree-sitter-yaml": "=0.5.0",
- "web-tree-sitter": "=0.20.3"
+ "tree-sitter": "=0.21.1",
+ "web-tree-sitter": "=0.24.3"
}
},
"node_modules/@swagger-api/apidom-reference": {
- "version": "1.0.0-alpha.10",
- "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-1.0.0-alpha.10.tgz",
- "integrity": "sha512-aFG6EHC1NOa0IhawTiE8A8TffzmW0PSO5d+lpzvcJ0w7KbrYG6SFQF2L6lZppqGaIGWbmV0Mq3LDU9mgSVEqqQ==",
+ "version": "1.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-1.0.0-beta.3.tgz",
+ "integrity": "sha512-MkSW/uKA+iCUeQ5HqICGxXPZI1y5vbXnOZLT+22+ZvaO3+5j7tD2aS9mAF+140VaaE5AkpZE28XC9TaYyjEwDg==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.20.7",
- "@swagger-api/apidom-core": "^1.0.0-alpha.10",
+ "@swagger-api/apidom-core": "^1.0.0-beta.3",
"@types/ramda": "~0.30.0",
"axios": "^1.7.4",
"minimatch": "^7.4.3",
@@ -2265,27 +2265,27 @@
"ramda-adjunct": "^5.0.0"
},
"optionalDependencies": {
- "@swagger-api/apidom-error": "^1.0.0-alpha.1",
- "@swagger-api/apidom-json-pointer": "^1.0.0-alpha.1",
- "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-alpha.1",
- "@swagger-api/apidom-ns-openapi-2": "^1.0.0-alpha.1",
- "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-alpha.1",
- "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-alpha.1",
- "@swagger-api/apidom-ns-workflows-1": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-json": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-openapi-json-2": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-workflows-json-1": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^1.0.0-alpha.1",
- "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-alpha.1"
+ "@swagger-api/apidom-error": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-json-pointer": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-ns-asyncapi-2": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-ns-openapi-2": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-ns-openapi-3-0": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-ns-openapi-3-1": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-ns-workflows-1": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-json": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-openapi-json-2": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-workflows-json-1": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-parser-adapter-yaml-1-2": "^1.0.0-beta.3 <1.0.0-rc.0"
}
},
"node_modules/@swagger-api/apidom-reference/node_modules/minimatch": {
@@ -2304,15 +2304,15 @@
}
},
"node_modules/@swc/core": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.9.2.tgz",
- "integrity": "sha512-dYyEkO6mRYtZFpnOsnYzv9rY69fHAHoawYOjGOEcxk9WYtaJhowMdP/w6NcOKnz2G7GlZaenjkzkMa6ZeQeMsg==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.9.3.tgz",
+ "integrity": "sha512-oRj0AFePUhtatX+BscVhnzaAmWjpfAeySpM1TCbxA1rtBDeH/JDhi5yYzAKneDYtVtBvA7ApfeuzhMC9ye4xSg==",
"dev": true,
"hasInstallScript": true,
"license": "Apache-2.0",
"dependencies": {
"@swc/counter": "^0.1.3",
- "@swc/types": "^0.1.15"
+ "@swc/types": "^0.1.17"
},
"engines": {
"node": ">=10"
@@ -2322,16 +2322,16 @@
"url": "https://opencollective.com/swc"
},
"optionalDependencies": {
- "@swc/core-darwin-arm64": "1.9.2",
- "@swc/core-darwin-x64": "1.9.2",
- "@swc/core-linux-arm-gnueabihf": "1.9.2",
- "@swc/core-linux-arm64-gnu": "1.9.2",
- "@swc/core-linux-arm64-musl": "1.9.2",
- "@swc/core-linux-x64-gnu": "1.9.2",
- "@swc/core-linux-x64-musl": "1.9.2",
- "@swc/core-win32-arm64-msvc": "1.9.2",
- "@swc/core-win32-ia32-msvc": "1.9.2",
- "@swc/core-win32-x64-msvc": "1.9.2"
+ "@swc/core-darwin-arm64": "1.9.3",
+ "@swc/core-darwin-x64": "1.9.3",
+ "@swc/core-linux-arm-gnueabihf": "1.9.3",
+ "@swc/core-linux-arm64-gnu": "1.9.3",
+ "@swc/core-linux-arm64-musl": "1.9.3",
+ "@swc/core-linux-x64-gnu": "1.9.3",
+ "@swc/core-linux-x64-musl": "1.9.3",
+ "@swc/core-win32-arm64-msvc": "1.9.3",
+ "@swc/core-win32-ia32-msvc": "1.9.3",
+ "@swc/core-win32-x64-msvc": "1.9.3"
},
"peerDependencies": {
"@swc/helpers": "*"
@@ -2343,9 +2343,9 @@
}
},
"node_modules/@swc/core-darwin-arm64": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.9.2.tgz",
- "integrity": "sha512-nETmsCoY29krTF2PtspEgicb3tqw7Ci5sInTI03EU5zpqYbPjoPH99BVTjj0OsF53jP5MxwnLI5Hm21lUn1d6A==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.9.3.tgz",
+ "integrity": "sha512-hGfl/KTic/QY4tB9DkTbNuxy5cV4IeejpPD4zo+Lzt4iLlDWIeANL4Fkg67FiVceNJboqg48CUX+APhDHO5G1w==",
"cpu": [
"arm64"
],
@@ -2360,9 +2360,9 @@
}
},
"node_modules/@swc/core-darwin-x64": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.9.2.tgz",
- "integrity": "sha512-9gD+bwBz8ZByjP6nZTXe/hzd0tySIAjpDHgkFiUrc+5zGF+rdTwhcNrzxNHJmy6mw+PW38jqII4uspFHUqqxuQ==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.9.3.tgz",
+ "integrity": "sha512-IaRq05ZLdtgF5h9CzlcgaNHyg4VXuiStnOFpfNEMuI5fm5afP2S0FHq8WdakUz5WppsbddTdplL+vpeApt/WCQ==",
"cpu": [
"x64"
],
@@ -2377,9 +2377,9 @@
}
},
"node_modules/@swc/core-linux-arm-gnueabihf": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.9.2.tgz",
- "integrity": "sha512-kYq8ief1Qrn+WmsTWAYo4r+Coul4dXN6cLFjiPZ29Cv5pyU+GFvSPAB4bEdMzwy99rCR0u2P10UExaeCjurjvg==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.9.3.tgz",
+ "integrity": "sha512-Pbwe7xYprj/nEnZrNBvZfjnTxlBIcfApAGdz2EROhjpPj+FBqBa3wOogqbsuGGBdCphf8S+KPprL1z+oDWkmSQ==",
"cpu": [
"arm"
],
@@ -2394,9 +2394,9 @@
}
},
"node_modules/@swc/core-linux-arm64-gnu": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.9.2.tgz",
- "integrity": "sha512-n0W4XiXlmEIVqxt+rD3ZpkogsEWUk1jJ+i5bQNgB+1JuWh0fBE8c/blDgTQXa0GB5lTPVDZQussgdNOCnAZwiA==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.9.3.tgz",
+ "integrity": "sha512-AQ5JZiwNGVV/2K2TVulg0mw/3LYfqpjZO6jDPtR2evNbk9Yt57YsVzS+3vHSlUBQDRV9/jqMuZYVU3P13xrk+g==",
"cpu": [
"arm64"
],
@@ -2411,9 +2411,9 @@
}
},
"node_modules/@swc/core-linux-arm64-musl": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.9.2.tgz",
- "integrity": "sha512-8xzrOmsyCC1zrx2Wzx/h8dVsdewO1oMCwBTLc1gSJ/YllZYTb04pNm6NsVbzUX2tKddJVRgSJXV10j/NECLwpA==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.9.3.tgz",
+ "integrity": "sha512-tzVH480RY6RbMl/QRgh5HK3zn1ZTFsThuxDGo6Iuk1MdwIbdFYUY034heWUTI4u3Db97ArKh0hNL0xhO3+PZdg==",
"cpu": [
"arm64"
],
@@ -2428,9 +2428,9 @@
}
},
"node_modules/@swc/core-linux-x64-gnu": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.9.2.tgz",
- "integrity": "sha512-kZrNz/PjRQKcchWF6W292jk3K44EoVu1ad5w+zbS4jekIAxsM8WwQ1kd+yjUlN9jFcF8XBat5NKIs9WphJCVXg==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.9.3.tgz",
+ "integrity": "sha512-ivXXBRDXDc9k4cdv10R21ccBmGebVOwKXT/UdH1PhxUn9m/h8erAWjz5pcELwjiMf27WokqPgaWVfaclDbgE+w==",
"cpu": [
"x64"
],
@@ -2445,9 +2445,9 @@
}
},
"node_modules/@swc/core-linux-x64-musl": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.9.2.tgz",
- "integrity": "sha512-TTIpR4rjMkhX1lnFR+PSXpaL83TrQzp9znRdp2TzYrODlUd/R20zOwSo9vFLCyH6ZoD47bccY7QeGZDYT3nlRg==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.9.3.tgz",
+ "integrity": "sha512-ILsGMgfnOz1HwdDz+ZgEuomIwkP1PHT6maigZxaCIuC6OPEhKE8uYna22uU63XvYcLQvZYDzpR3ms47WQPuNEg==",
"cpu": [
"x64"
],
@@ -2462,9 +2462,9 @@
}
},
"node_modules/@swc/core-win32-arm64-msvc": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.9.2.tgz",
- "integrity": "sha512-+Eg2d4icItKC0PMjZxH7cSYFLWk0aIp94LNmOw6tPq0e69ax6oh10upeq0D1fjWsKLmOJAWEvnXlayZcijEXDw==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.9.3.tgz",
+ "integrity": "sha512-e+XmltDVIHieUnNJHtspn6B+PCcFOMYXNJB1GqoCcyinkEIQNwC8KtWgMqUucUbEWJkPc35NHy9k8aCXRmw9Kg==",
"cpu": [
"arm64"
],
@@ -2479,9 +2479,9 @@
}
},
"node_modules/@swc/core-win32-ia32-msvc": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.9.2.tgz",
- "integrity": "sha512-nLWBi4vZDdM/LkiQmPCakof8Dh1/t5EM7eudue04V1lIcqx9YHVRS3KMwEaCoHLGg0c312Wm4YgrWQd9vwZ5zQ==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.9.3.tgz",
+ "integrity": "sha512-rqpzNfpAooSL4UfQnHhkW8aL+oyjqJniDP0qwZfGnjDoJSbtPysHg2LpcOBEdSnEH+uIZq6J96qf0ZFD8AGfXA==",
"cpu": [
"ia32"
],
@@ -2496,9 +2496,9 @@
}
},
"node_modules/@swc/core-win32-x64-msvc": {
- "version": "1.9.2",
- "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.9.2.tgz",
- "integrity": "sha512-ik/k+JjRJBFkXARukdU82tSVx0CbExFQoQ78qTO682esbYXzjdB5eLVkoUbwen299pnfr88Kn4kyIqFPTje8Xw==",
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.9.3.tgz",
+ "integrity": "sha512-3YJJLQ5suIEHEKc1GHtqVq475guiyqisKSoUnoaRtxkDaW5g1yvPt9IoSLOe2mRs7+FFhGGU693RsBUSwOXSdQ==",
"cpu": [
"x64"
],
@@ -2520,9 +2520,9 @@
"license": "Apache-2.0"
},
"node_modules/@swc/types": {
- "version": "0.1.15",
- "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.15.tgz",
- "integrity": "sha512-XKaZ+dzDIQ9Ot9o89oJQ/aluI17+VvUnIpYJTcZtvv1iYX6MzHh3Ik2CSR7MdPKpPwfZXHBeCingb2b4PoDVdw==",
+ "version": "0.1.17",
+ "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.17.tgz",
+ "integrity": "sha512-V5gRru+aD8YVyCOMAjMpWR1Ui577DD5KSJsHP8RAxopAH22jFz6GZd/qxqjO6MJHQhcsjvjOFXyDhyLQUnMveQ==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
@@ -2556,6 +2556,26 @@
"url": "https://github.com/sponsors/tannerlinsley"
}
},
+ "node_modules/@tree-sitter-grammars/tree-sitter-yaml": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/@tree-sitter-grammars/tree-sitter-yaml/-/tree-sitter-yaml-0.6.1.tgz",
+ "integrity": "sha512-FqgUNdtMuPpk5D/9YQvCxTK4tzlUEVq/yNewdcxJbMv0KVt/yDfuuUn5ZvxphftKyOco+1e/6/oNHCKVQ5A83Q==",
+ "hasInstallScript": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "node-addon-api": "^8.0.0",
+ "node-gyp-build": "^4.8.0"
+ },
+ "peerDependencies": {
+ "tree-sitter": "^0.21.1"
+ },
+ "peerDependenciesMeta": {
+ "tree_sitter": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@types/codemirror": {
"version": "5.60.15",
"resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-5.60.15.tgz",
@@ -2656,17 +2676,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.14.0.tgz",
- "integrity": "sha512-tqp8H7UWFaZj0yNO6bycd5YjMwxa6wIHOLZvWPkidwbgLCsBMetQoGj7DPuAlWa2yGO3H48xmPwjhsSPPCGU5w==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.16.0.tgz",
+ "integrity": "sha512-5YTHKV8MYlyMI6BaEG7crQ9BhSc8RxzshOReKwZwRWN0+XvvTOm+L/UYLCYxFpfwYuAAqhxiq4yae0CMFwbL7Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.14.0",
- "@typescript-eslint/type-utils": "8.14.0",
- "@typescript-eslint/utils": "8.14.0",
- "@typescript-eslint/visitor-keys": "8.14.0",
+ "@typescript-eslint/scope-manager": "8.16.0",
+ "@typescript-eslint/type-utils": "8.16.0",
+ "@typescript-eslint/utils": "8.16.0",
+ "@typescript-eslint/visitor-keys": "8.16.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2690,16 +2710,16 @@
}
},
"node_modules/@typescript-eslint/parser": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.14.0.tgz",
- "integrity": "sha512-2p82Yn9juUJq0XynBXtFCyrBDb6/dJombnz6vbo6mgQEtWHfvHbQuEa9kAOVIt1c9YFwi7H6WxtPj1kg+80+RA==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.16.0.tgz",
+ "integrity": "sha512-D7DbgGFtsqIPIFMPJwCad9Gfi/hC0PWErRRHFnaCWoEDYi5tQUDiJCTmGUbBiLzjqAck4KcXt9Ayj0CNlIrF+w==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.14.0",
- "@typescript-eslint/types": "8.14.0",
- "@typescript-eslint/typescript-estree": "8.14.0",
- "@typescript-eslint/visitor-keys": "8.14.0",
+ "@typescript-eslint/scope-manager": "8.16.0",
+ "@typescript-eslint/types": "8.16.0",
+ "@typescript-eslint/typescript-estree": "8.16.0",
+ "@typescript-eslint/visitor-keys": "8.16.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2719,14 +2739,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.14.0.tgz",
- "integrity": "sha512-aBbBrnW9ARIDn92Zbo7rguLnqQ/pOrUguVpbUwzOhkFg2npFDwTgPGqFqE0H5feXcOoJOfX3SxlJaKEVtq54dw==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.16.0.tgz",
+ "integrity": "sha512-mwsZWubQvBki2t5565uxF0EYvG+FwdFb8bMtDuGQLdCCnGPrDEDvm1gtfynuKlnpzeBRqdFCkMf9jg1fnAK8sg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.14.0",
- "@typescript-eslint/visitor-keys": "8.14.0"
+ "@typescript-eslint/types": "8.16.0",
+ "@typescript-eslint/visitor-keys": "8.16.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2737,14 +2757,14 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.14.0.tgz",
- "integrity": "sha512-Xcz9qOtZuGusVOH5Uk07NGs39wrKkf3AxlkK79RBK6aJC1l03CobXjJbwBPSidetAOV+5rEVuiT1VSBUOAsanQ==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.16.0.tgz",
+ "integrity": "sha512-IqZHGG+g1XCWX9NyqnI/0CX5LL8/18awQqmkZSl2ynn8F76j579dByc0jhfVSnSnhf7zv76mKBQv9HQFKvDCgg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.14.0",
- "@typescript-eslint/utils": "8.14.0",
+ "@typescript-eslint/typescript-estree": "8.16.0",
+ "@typescript-eslint/utils": "8.16.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -2755,6 +2775,9 @@
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
+ "peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0"
+ },
"peerDependenciesMeta": {
"typescript": {
"optional": true
@@ -2762,9 +2785,9 @@
}
},
"node_modules/@typescript-eslint/types": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.14.0.tgz",
- "integrity": "sha512-yjeB9fnO/opvLJFAsPNYlKPnEM8+z4og09Pk504dkqonT02AyL5Z9SSqlE0XqezS93v6CXn49VHvB2G7XSsl0g==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.16.0.tgz",
+ "integrity": "sha512-NzrHj6thBAOSE4d9bsuRNMvk+BvaQvmY4dDglgkgGC0EW/tB3Kelnp3tAKH87GEwzoxgeQn9fNGRyFJM/xd+GQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2776,14 +2799,14 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.14.0.tgz",
- "integrity": "sha512-OPXPLYKGZi9XS/49rdaCbR5j/S14HazviBlUQFvSKz3npr3NikF+mrgK7CFVur6XEt95DZp/cmke9d5i3vtVnQ==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.16.0.tgz",
+ "integrity": "sha512-E2+9IzzXMc1iaBy9zmo+UYvluE3TW7bCGWSF41hVWUE01o8nzr1rvOQYSxelxr6StUvRcTMe633eY8mXASMaNw==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/types": "8.14.0",
- "@typescript-eslint/visitor-keys": "8.14.0",
+ "@typescript-eslint/types": "8.16.0",
+ "@typescript-eslint/visitor-keys": "8.16.0",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -2805,16 +2828,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.14.0.tgz",
- "integrity": "sha512-OGqj6uB8THhrHj0Fk27DcHPojW7zKwKkPmHXHvQ58pLYp4hy8CSUdTKykKeh+5vFqTTVmjz0zCOOPKRovdsgHA==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.16.0.tgz",
+ "integrity": "sha512-C1zRy/mOL8Pj157GiX4kaw7iyRLKfJXBR3L82hk5kS/GyHcOFmy4YUq/zfZti72I9wnuQtA/+xzft4wCC8PJdA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.14.0",
- "@typescript-eslint/types": "8.14.0",
- "@typescript-eslint/typescript-estree": "8.14.0"
+ "@typescript-eslint/scope-manager": "8.16.0",
+ "@typescript-eslint/types": "8.16.0",
+ "@typescript-eslint/typescript-estree": "8.16.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2825,17 +2848,22 @@
},
"peerDependencies": {
"eslint": "^8.57.0 || ^9.0.0"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
}
},
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.14.0.tgz",
- "integrity": "sha512-vG0XZo8AdTH9OE6VFRwAZldNc7qtJ/6NLGWak+BtENuEUXGZgFpihILPiBvKXvJ2nFu27XNGC6rKiwuaoMbYzQ==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.16.0.tgz",
+ "integrity": "sha512-pq19gbaMOmFE3CbL0ZB8J8BFCo2ckfHBfaIsaOZgBIF4EoISJIdLX5xRhd0FGB0LlHReNRuzoJoMGpTjq8F2CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.14.0",
- "eslint-visitor-keys": "^3.4.3"
+ "@typescript-eslint/types": "8.16.0",
+ "eslint-visitor-keys": "^4.2.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2845,6 +2873,19 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
+ "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@vitejs/plugin-react-swc": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.1.tgz",
@@ -2990,18 +3031,6 @@
],
"license": "MIT"
},
- "node_modules/bl": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
- "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "buffer": "^5.5.0",
- "inherits": "^2.0.4",
- "readable-stream": "^3.4.0"
- }
- },
"node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
@@ -3024,31 +3053,6 @@
"node": ">=8"
}
},
- "node_modules/buffer": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
- "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "base64-js": "^1.3.1",
- "ieee754": "^1.1.13"
- }
- },
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -3106,13 +3110,6 @@
"url": "https://github.com/sponsors/wooorm"
}
},
- "node_modules/chownr": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
- "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
- "license": "ISC",
- "optional": true
- },
"node_modules/classnames": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz",
@@ -3243,9 +3240,9 @@
}
},
"node_modules/cross-spawn": {
- "version": "7.0.5",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz",
- "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==",
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
+ "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3294,22 +3291,6 @@
}
}
},
- "node_modules/decompress-response": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
- "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "mimic-response": "^3.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/deep-extend": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
@@ -3344,16 +3325,6 @@
"node": ">=0.4.0"
}
},
- "node_modules/detect-libc": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
- "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
- "license": "Apache-2.0",
- "optional": true,
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/detect-node-es": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz",
@@ -3375,16 +3346,6 @@
"node": ">=4"
}
},
- "node_modules/end-of-stream": {
- "version": "1.4.4",
- "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
- "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "once": "^1.4.0"
- }
- },
"node_modules/entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
@@ -3450,27 +3411,27 @@
}
},
"node_modules/eslint": {
- "version": "9.14.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.14.0.tgz",
- "integrity": "sha512-c2FHsVBr87lnUtjP4Yhvk4yEhKrQavGafRA/Se1ouse8PfbfC/Qh9Mxa00yWsZRlqeUB9raXip0aiiUZkgnr9g==",
+ "version": "9.15.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.15.0.tgz",
+ "integrity": "sha512-7CrWySmIibCgT1Os28lUU6upBshZ+GxybLOrmRzi08kS8MBuO8QA7pXEgYgY5W8vK3e74xv0lpjo9DbaGU9Rkw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.12.1",
- "@eslint/config-array": "^0.18.0",
- "@eslint/core": "^0.7.0",
- "@eslint/eslintrc": "^3.1.0",
- "@eslint/js": "9.14.0",
- "@eslint/plugin-kit": "^0.2.0",
+ "@eslint/config-array": "^0.19.0",
+ "@eslint/core": "^0.9.0",
+ "@eslint/eslintrc": "^3.2.0",
+ "@eslint/js": "9.15.0",
+ "@eslint/plugin-kit": "^0.2.3",
"@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
- "@humanwhocodes/retry": "^0.4.0",
+ "@humanwhocodes/retry": "^0.4.1",
"@types/estree": "^1.0.6",
"@types/json-schema": "^7.0.15",
"ajv": "^6.12.4",
"chalk": "^4.0.0",
- "cross-spawn": "^7.0.2",
+ "cross-spawn": "^7.0.5",
"debug": "^4.3.2",
"escape-string-regexp": "^4.0.0",
"eslint-scope": "^8.2.0",
@@ -3489,8 +3450,7 @@
"lodash.merge": "^4.6.2",
"minimatch": "^3.1.2",
"natural-compare": "^1.4.0",
- "optionator": "^0.9.3",
- "text-table": "^0.2.0"
+ "optionator": "^0.9.3"
},
"bin": {
"eslint": "bin/eslint.js"
@@ -3677,16 +3637,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/expand-template": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz",
- "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==",
- "license": "(MIT OR WTFPL)",
- "optional": true,
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
@@ -3825,9 +3775,9 @@
}
},
"node_modules/flatted": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
- "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==",
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz",
+ "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==",
"dev": true,
"license": "ISC"
},
@@ -3903,13 +3853,6 @@
"tslib": "^2.1.0"
}
},
- "node_modules/fs-constants": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
- "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==",
- "license": "MIT",
- "optional": true
- },
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
@@ -3946,13 +3889,6 @@
"node": ">=6.0"
}
},
- "node_modules/github-from-package": {
- "version": "0.0.0",
- "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz",
- "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==",
- "license": "MIT",
- "optional": true
- },
"node_modules/glob-parent": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
@@ -4156,13 +4092,6 @@
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"license": "ISC"
},
- "node_modules/ini": {
- "version": "1.3.8",
- "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
- "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
- "license": "ISC",
- "optional": true
- },
"node_modules/invariant": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz",
@@ -4510,19 +4439,6 @@
"node": ">= 0.6"
}
},
- "node_modules/mimic-response": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
- "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==",
- "license": "MIT",
- "optional": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/minim": {
"version": "0.23.8",
"resolved": "https://registry.npmjs.org/minim/-/minim-0.23.8.tgz",
@@ -4551,23 +4467,6 @@
"url": "https://github.com/sponsors/isaacs"
}
},
- "node_modules/minimist": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
- "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
- "license": "MIT",
- "optional": true,
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/mkdirp-classic": {
- "version": "0.5.3",
- "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
- "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
- "license": "MIT",
- "optional": true
- },
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -4575,13 +4474,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/nan": {
- "version": "2.22.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
- "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
- "license": "MIT",
- "optional": true
- },
"node_modules/nanoid": {
"version": "3.3.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
@@ -4601,13 +4493,6 @@
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
- "node_modules/napi-build-utils": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz",
- "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==",
- "license": "MIT",
- "optional": true
- },
"node_modules/natural-compare": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
@@ -4624,25 +4509,22 @@
"node": ">= 10"
}
},
- "node_modules/node-abi": {
- "version": "3.71.0",
- "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.71.0.tgz",
- "integrity": "sha512-SZ40vRiy/+wRTf21hxkkEjPJZpARzUMVcJoQse2EF8qkUWbbO2z7vd5oA/H6bVH6SZQ5STGcu0KRDS7biNRfxw==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "semver": "^7.3.5"
- },
- "engines": {
- "node": ">=10"
- }
- },
"node_modules/node-abort-controller": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==",
"license": "MIT"
},
+ "node_modules/node-addon-api": {
+ "version": "8.2.2",
+ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-8.2.2.tgz",
+ "integrity": "sha512-9emqXAKhVoNrQ792nLI/wpzPpJ/bj/YXxW0CvAau1+RdGBcCRF1Dmz7719zgVsQNrzHl9Tzn3ImZ4qWFarWL0A==",
+ "license": "MIT",
+ "optional": true,
+ "engines": {
+ "node": "^18 || ^20 || >= 21"
+ }
+ },
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
@@ -4679,6 +4561,18 @@
"url": "https://opencollective.com/node-fetch"
}
},
+ "node_modules/node-gyp-build": {
+ "version": "4.8.4",
+ "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz",
+ "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==",
+ "license": "MIT",
+ "optional": true,
+ "bin": {
+ "node-gyp-build": "bin.js",
+ "node-gyp-build-optional": "optional.js",
+ "node-gyp-build-test": "build-test.js"
+ }
+ },
"node_modules/nullthrows": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz",
@@ -4694,16 +4588,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/once": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "license": "ISC",
- "optional": true,
- "dependencies": {
- "wrappy": "1"
- }
- },
"node_modules/openapi-path-templating": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/openapi-path-templating/-/openapi-path-templating-1.6.0.tgz",
@@ -4890,33 +4774,6 @@
"node": "^10 || ^12 || >=14"
}
},
- "node_modules/prebuild-install": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.2.tgz",
- "integrity": "sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "detect-libc": "^2.0.0",
- "expand-template": "^2.0.3",
- "github-from-package": "0.0.0",
- "minimist": "^1.2.3",
- "mkdirp-classic": "^0.5.3",
- "napi-build-utils": "^1.0.1",
- "node-abi": "^3.3.0",
- "pump": "^3.0.0",
- "rc": "^1.2.7",
- "simple-get": "^4.0.0",
- "tar-fs": "^2.0.0",
- "tunnel-agent": "^0.6.0"
- },
- "bin": {
- "prebuild-install": "bin.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
"node_modules/prelude-ls": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
@@ -4975,17 +4832,6 @@
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
},
- "node_modules/pump": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz",
- "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "end-of-stream": "^1.1.0",
- "once": "^1.3.1"
- }
- },
"node_modules/punycode": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
@@ -5080,32 +4926,6 @@
"safe-buffer": "^5.1.0"
}
},
- "node_modules/rc": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
- "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
- "license": "(BSD-2-Clause OR MIT OR Apache-2.0)",
- "optional": true,
- "dependencies": {
- "deep-extend": "^0.6.0",
- "ini": "~1.3.0",
- "minimist": "^1.2.0",
- "strip-json-comments": "~2.0.1"
- },
- "bin": {
- "rc": "cli.js"
- }
- },
- "node_modules/rc/node_modules/strip-json-comments": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
- "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==",
- "license": "MIT",
- "optional": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
"node_modules/react": {
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
@@ -5305,21 +5125,6 @@
"react": ">= 0.14.0"
}
},
- "node_modules/readable-stream": {
- "version": "3.6.2",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
- "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
"node_modules/redux": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/redux/-/redux-5.0.1.tgz",
@@ -5442,9 +5247,9 @@
}
},
"node_modules/rollup": {
- "version": "4.25.0",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.25.0.tgz",
- "integrity": "sha512-uVbClXmR6wvx5R1M3Od4utyLUxrmOcEm3pAtMphn73Apq19PDtHpgZoEvqH2YnnaNUuvKmg2DgRd2Sqv+odyqg==",
+ "version": "4.27.4",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.27.4.tgz",
+ "integrity": "sha512-RLKxqHEMjh/RGLsDxAEsaLO3mWgyoU6x9w6n1ikAzet4B3gI2/3yP6PWY2p9QzRTh6MfEIXB3MwsOY0Iv3vNrw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -5458,24 +5263,24 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
- "@rollup/rollup-android-arm-eabi": "4.25.0",
- "@rollup/rollup-android-arm64": "4.25.0",
- "@rollup/rollup-darwin-arm64": "4.25.0",
- "@rollup/rollup-darwin-x64": "4.25.0",
- "@rollup/rollup-freebsd-arm64": "4.25.0",
- "@rollup/rollup-freebsd-x64": "4.25.0",
- "@rollup/rollup-linux-arm-gnueabihf": "4.25.0",
- "@rollup/rollup-linux-arm-musleabihf": "4.25.0",
- "@rollup/rollup-linux-arm64-gnu": "4.25.0",
- "@rollup/rollup-linux-arm64-musl": "4.25.0",
- "@rollup/rollup-linux-powerpc64le-gnu": "4.25.0",
- "@rollup/rollup-linux-riscv64-gnu": "4.25.0",
- "@rollup/rollup-linux-s390x-gnu": "4.25.0",
- "@rollup/rollup-linux-x64-gnu": "4.25.0",
- "@rollup/rollup-linux-x64-musl": "4.25.0",
- "@rollup/rollup-win32-arm64-msvc": "4.25.0",
- "@rollup/rollup-win32-ia32-msvc": "4.25.0",
- "@rollup/rollup-win32-x64-msvc": "4.25.0",
+ "@rollup/rollup-android-arm-eabi": "4.27.4",
+ "@rollup/rollup-android-arm64": "4.27.4",
+ "@rollup/rollup-darwin-arm64": "4.27.4",
+ "@rollup/rollup-darwin-x64": "4.27.4",
+ "@rollup/rollup-freebsd-arm64": "4.27.4",
+ "@rollup/rollup-freebsd-x64": "4.27.4",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.27.4",
+ "@rollup/rollup-linux-arm-musleabihf": "4.27.4",
+ "@rollup/rollup-linux-arm64-gnu": "4.27.4",
+ "@rollup/rollup-linux-arm64-musl": "4.27.4",
+ "@rollup/rollup-linux-powerpc64le-gnu": "4.27.4",
+ "@rollup/rollup-linux-riscv64-gnu": "4.27.4",
+ "@rollup/rollup-linux-s390x-gnu": "4.27.4",
+ "@rollup/rollup-linux-x64-gnu": "4.27.4",
+ "@rollup/rollup-linux-x64-musl": "4.27.4",
+ "@rollup/rollup-win32-arm64-msvc": "4.27.4",
+ "@rollup/rollup-win32-ia32-msvc": "4.27.4",
+ "@rollup/rollup-win32-x64-msvc": "4.27.4",
"fsevents": "~2.3.2"
}
},
@@ -5536,7 +5341,7 @@
"version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
- "devOptional": true,
+ "dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
@@ -5624,53 +5429,6 @@
"suid": "bin/short-unique-id"
}
},
- "node_modules/simple-concat": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz",
- "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "optional": true
- },
- "node_modules/simple-get": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz",
- "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "decompress-response": "^6.0.0",
- "once": "^1.3.1",
- "simple-concat": "^1.0.0"
- }
- },
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
@@ -5697,16 +5455,6 @@
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
"license": "BSD-3-Clause"
},
- "node_modules/string_decoder": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
- "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "safe-buffer": "~5.2.0"
- }
- },
"node_modules/strip-json-comments": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
@@ -5744,18 +5492,18 @@
}
},
"node_modules/swagger-client": {
- "version": "3.31.0",
- "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.31.0.tgz",
- "integrity": "sha512-hVYift5XB8nOgNJVl6cbNtVTVPT2Fdx2wCOcIvuAFcyq0Mwe6+70ezoZ5WfiaIAzzwWfq72jyaLeg8TViGNSmw==",
+ "version": "3.32.1",
+ "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.32.1.tgz",
+ "integrity": "sha512-vXRjuiUU8QbyniRwv/cOcv2glJS3eLrj3JRmge+R/Kwb+xH9t9SzWAyoalhpAkOlm+NEqpJe9wmbOJbYGR74+g==",
"license": "Apache-2.0",
"dependencies": {
"@babel/runtime-corejs3": "^7.22.15",
"@scarf/scarf": "=1.4.0",
- "@swagger-api/apidom-core": ">=1.0.0-alpha.9 <1.0.0-beta.0",
- "@swagger-api/apidom-error": ">=1.0.0-alpha.9 <1.0.0-beta.0",
- "@swagger-api/apidom-json-pointer": ">=1.0.0-alpha.9 <1.0.0-beta.0",
- "@swagger-api/apidom-ns-openapi-3-1": ">=1.0.0-alpha.9 <1.0.0-beta.0",
- "@swagger-api/apidom-reference": ">=1.0.0-alpha.9 <1.0.0-beta.0",
+ "@swagger-api/apidom-core": ">=1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-error": ">=1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-json-pointer": ">=1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-ns-openapi-3-1": ">=1.0.0-beta.3 <1.0.0-rc.0",
+ "@swagger-api/apidom-reference": ">=1.0.0-beta.3 <1.0.0-rc.0",
"cookie": "~0.7.2",
"deepmerge": "~4.3.0",
"fast-json-patch": "^3.0.0-1",
@@ -5815,43 +5563,6 @@
"react-dom": ">=16.8.0 <19"
}
},
- "node_modules/tar-fs": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz",
- "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "chownr": "^1.1.1",
- "mkdirp-classic": "^0.5.2",
- "pump": "^3.0.0",
- "tar-stream": "^2.1.4"
- }
- },
- "node_modules/tar-stream": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
- "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "bl": "^4.0.3",
- "end-of-stream": "^1.4.1",
- "fs-constants": "^1.0.0",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/text-table": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
- "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
@@ -5872,43 +5583,41 @@
"license": "MIT"
},
"node_modules/tree-sitter": {
- "version": "0.20.4",
- "resolved": "https://registry.npmjs.org/tree-sitter/-/tree-sitter-0.20.4.tgz",
- "integrity": "sha512-rjfR5dc4knG3jnJNN/giJ9WOoN1zL/kZyrS0ILh+eqq8RNcIbiXA63JsMEgluug0aNvfQvK4BfCErN1vIzvKog==",
+ "version": "0.21.1",
+ "resolved": "https://registry.npmjs.org/tree-sitter/-/tree-sitter-0.21.1.tgz",
+ "integrity": "sha512-7dxoA6kYvtgWw80265MyqJlkRl4yawIjO7S5MigytjELkX43fV2WsAXzsNfO7sBpPPCF5Gp0+XzHk0DwLCq3xQ==",
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
- "nan": "^2.17.0",
- "prebuild-install": "^7.1.1"
+ "node-addon-api": "^8.0.0",
+ "node-gyp-build": "^4.8.0"
}
},
"node_modules/tree-sitter-json": {
- "version": "0.20.2",
- "resolved": "https://registry.npmjs.org/tree-sitter-json/-/tree-sitter-json-0.20.2.tgz",
- "integrity": "sha512-eUxrowp4F1QEGk/i7Sa+Xl8Crlfp7J0AXxX1QdJEQKQYMWhgMbCIgyQvpO3Q0P9oyTrNQxRLlRipDS44a8EtRw==",
+ "version": "0.24.8",
+ "resolved": "https://registry.npmjs.org/tree-sitter-json/-/tree-sitter-json-0.24.8.tgz",
+ "integrity": "sha512-Tc9ZZYwHyWZ3Tt1VEw7Pa2scu1YO7/d2BCBbKTx5hXwig3UfdQjsOPkPyLpDJOn/m1UBEWYAtSdGAwCSyagBqQ==",
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
- "nan": "^2.18.0"
- }
- },
- "node_modules/tree-sitter-yaml": {
- "version": "0.5.0",
- "resolved": "https://registry.npmjs.org/tree-sitter-yaml/-/tree-sitter-yaml-0.5.0.tgz",
- "integrity": "sha512-POJ4ZNXXSWIG/W4Rjuyg36MkUD4d769YRUGKRqN+sVaj/VCo6Dh6Pkssn1Rtewd5kybx+jT1BWMyWN0CijXnMA==",
- "hasInstallScript": true,
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "nan": "^2.14.0"
+ "node-addon-api": "^8.2.2",
+ "node-gyp-build": "^4.8.2"
+ },
+ "peerDependencies": {
+ "tree-sitter": "^0.21.1"
+ },
+ "peerDependenciesMeta": {
+ "tree-sitter": {
+ "optional": true
+ }
}
},
"node_modules/ts-api-utils": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.0.tgz",
- "integrity": "sha512-032cPxaEKwM+GT3vA5JXNzIaizx388rhsSW79vGRNGXfRRAdEAn2mvk36PvK5HnOchyWZ7afLEXqYCvPCrzuzQ==",
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.1.tgz",
+ "integrity": "sha512-5RU2/lxTA3YUZxju61HO2U6EoZLvBLtmV2mbTvqyu4a/7s7RmJPT+1YekhMVsQhznRWk/czIwDUg+V8Q9ZuG4w==",
"dev": true,
"license": "MIT",
"engines": {
@@ -5936,19 +5645,6 @@
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD"
},
- "node_modules/tunnel-agent": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
- "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
- "license": "Apache-2.0",
- "optional": true,
- "dependencies": {
- "safe-buffer": "^5.0.1"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
@@ -5984,9 +5680,9 @@
}
},
"node_modules/typescript": {
- "version": "5.6.3",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz",
- "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==",
+ "version": "5.7.2",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz",
+ "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==",
"dev": true,
"license": "Apache-2.0",
"bin": {
@@ -6081,13 +5777,6 @@
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
}
},
- "node_modules/util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
- "license": "MIT",
- "optional": true
- },
"node_modules/vite": {
"version": "5.4.11",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz",
@@ -6164,9 +5853,9 @@
}
},
"node_modules/web-tree-sitter": {
- "version": "0.20.3",
- "resolved": "https://registry.npmjs.org/web-tree-sitter/-/web-tree-sitter-0.20.3.tgz",
- "integrity": "sha512-zKGJW9r23y3BcJusbgvnOH2OYAW40MXAOi9bi3Gcc7T4Gms9WWgXF8m6adsJWpGJEhgOzCrfiz1IzKowJWrtYw==",
+ "version": "0.24.3",
+ "resolved": "https://registry.npmjs.org/web-tree-sitter/-/web-tree-sitter-0.24.3.tgz",
+ "integrity": "sha512-uR9YNewr1S2EzPKE+y39nAwaTyobBaZRG/IsfkB/OT4v0lXtNj5WjtHKgn2h7eOYUWIZh5rK9Px7tI6S9CRKdA==",
"license": "MIT",
"optional": true
},
@@ -6196,13 +5885,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/wrappy": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
- "license": "ISC",
- "optional": true
- },
"node_modules/xml": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
diff --git a/playground/package.json b/playground/package.json
index 61a6efd544..3f8066719e 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -20,13 +20,13 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.13.0",
- "@typescript-eslint/parser": "^8.13.0",
+ "@typescript-eslint/eslint-plugin": "^8.15.0",
+ "@typescript-eslint/parser": "^8.15.0",
"@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.14.0",
+ "eslint": "^9.15.0",
"eslint-plugin-react-hooks": "^5.0.0",
"eslint-plugin-react-refresh": "^0.4.14",
- "typescript": "^5.6.3",
+ "typescript": "^5.7.2",
"vite": "^5.4.11"
}
}
From c7b8b93dd02d4b68b183cee8fc22131fd08c2d3b Mon Sep 17 00:00:00 2001
From: Shahzad Lone
Date: Tue, 26 Nov 2024 09:42:08 -0500
Subject: [PATCH 34/47] feat: Add ability to add/delete relationship for all
actors (#3254)
## Relevant issue(s)
Resolves #3255
## Description
- Can target all actors using `"*"` to add or delete acp relationships.
- All explicitly added relationships are unaffected upon revocation
using `"*"` (they will keep access).
### For Reviewers
- Commit by commit review should be easier.
- [x] todo: Pushing the crashing gql tests fix once
https://github.com/sourcenetwork/defradb/pull/3267 is merged
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
- Integration tests
Specify the platform(s) on which this was tested:
- Manjaro WSL2
---
acp/README.md | 40 ++
acp/acp_local.go | 36 +-
acp/acp_source_hub.go | 64 +-
cli/acp_relationship_add.go | 8 +
client/db.go | 9 +-
.../defradb_client_acp_relationship_add.md | 8 +
tests/integration/acp.go | 10 +-
.../add/with_target_all_actors_gql_test.go | 250 ++++++++
.../add/with_target_all_actors_test.go | 250 ++++++++
.../delete/with_target_all_actors_test.go | 548 ++++++++++++++++++
tests/integration/identity.go | 112 ++--
tests/integration/state.go | 4 +-
tests/integration/test_case.go | 12 +-
13 files changed, 1278 insertions(+), 73 deletions(-)
create mode 100644 tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go
create mode 100644 tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go
create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go
diff --git a/acp/README.md b/acp/README.md
index 4e8d5b7f5b..3fedb5a274 100644
--- a/acp/README.md
+++ b/acp/README.md
@@ -631,6 +631,26 @@ Result:
Error: document not found or not authorized to access
```
+Sometimes we might want to give a specific access (form a relationship) not just to one identity, but any identity.
+In that case we can specify "*" instead of specifying an explicit `actor`:
+```sh
+defradb client acp relationship add \
+--collection Users \
+--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \
+--relation reader \
+--actor "*" \
+--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
+```
+
+Result:
+```json
+{
+ "ExistedAlready": false
+}
+```
+
+**Note: specifying `*` does not overwrite any previous formed relationships, they will remain as is **
+
### Revoking Access To Private Documents
To revoke access to a document for an actor, we must delete the relationship between the
@@ -695,6 +715,26 @@ defradb client collection docIDs --identity 4d092126012ebaf56161716018a71630d994
**Result is empty from the above command**
+We can also revoke the previously granted implicit relationship which gave all actors access using the "*" actor.
+Similarly we can just specify "*" to revoke all access given to actors implicitly through this relationship:
+```sh
+defradb client acp relationship delete \
+--collection Users \
+--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \
+--relation reader \
+--actor "*" \
+--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
+```
+
+Result:
+```json
+{
+ "RecordFound": true
+}
+```
+
+**Note: Deleting with`*` does not remove any explicitly formed relationships, they will remain as they were **
+
## DAC Usage HTTP:
### Authentication
diff --git a/acp/acp_local.go b/acp/acp_local.go
index a8a0d32290..99b2cb15f4 100644
--- a/acp/acp_local.go
+++ b/acp/acp_local.go
@@ -254,9 +254,25 @@ func (l *ACPLocal) AddActorRelationship(
ctx = auth.InjectPrincipal(ctx, principal)
+ var newActorRelationship *types.Relationship
+ if targetActor == "*" {
+ newActorRelationship = types.NewAllActorsRelationship(
+ resourceName,
+ objectID,
+ relation,
+ )
+ } else {
+ newActorRelationship = types.NewActorRelationship(
+ resourceName,
+ objectID,
+ relation,
+ targetActor,
+ )
+ }
+
setRelationshipRequest := types.SetRelationshipRequest{
PolicyId: policyID,
- Relationship: types.NewActorRelationship(resourceName, objectID, relation, targetActor),
+ Relationship: newActorRelationship,
CreationTime: creationTime,
}
@@ -285,9 +301,25 @@ func (l *ACPLocal) DeleteActorRelationship(
ctx = auth.InjectPrincipal(ctx, principal)
+ var newActorRelationship *types.Relationship
+ if targetActor == "*" {
+ newActorRelationship = types.NewAllActorsRelationship(
+ resourceName,
+ objectID,
+ relation,
+ )
+ } else {
+ newActorRelationship = types.NewActorRelationship(
+ resourceName,
+ objectID,
+ relation,
+ targetActor,
+ )
+ }
+
deleteRelationshipRequest := types.DeleteRelationshipRequest{
PolicyId: policyID,
- Relationship: types.NewActorRelationship(resourceName, objectID, relation, targetActor),
+ Relationship: newActorRelationship,
}
deleteRelationshipResponse, err := l.engine.DeleteRelationship(ctx, &deleteRelationshipRequest)
diff --git a/acp/acp_source_hub.go b/acp/acp_source_hub.go
index edd6008b63..dd248c5db9 100644
--- a/acp/acp_source_hub.go
+++ b/acp/acp_source_hub.go
@@ -273,18 +273,28 @@ func (a *acpSourceHub) AddActorRelationship(
creationTime *protoTypes.Timestamp,
) (bool, error) {
msgSet := sourcehub.MsgSet{}
+
+ var newActorRelationship *acptypes.Relationship
+ if targetActor == "*" {
+ newActorRelationship = acptypes.NewAllActorsRelationship(
+ resourceName,
+ objectID,
+ relation,
+ )
+ } else {
+ newActorRelationship = acptypes.NewActorRelationship(
+ resourceName,
+ objectID,
+ relation,
+ targetActor,
+ )
+ }
+
cmdMapper := msgSet.WithBearerPolicyCmd(&acptypes.MsgBearerPolicyCmd{
- Creator: a.signer.GetAccAddress(),
- BearerToken: requester.BearerToken,
- PolicyId: policyID,
- Cmd: acptypes.NewSetRelationshipCmd(
- acptypes.NewActorRelationship(
- resourceName,
- objectID,
- relation,
- targetActor,
- ),
- ),
+ Creator: a.signer.GetAccAddress(),
+ BearerToken: requester.BearerToken,
+ PolicyId: policyID,
+ Cmd: acptypes.NewSetRelationshipCmd(newActorRelationship),
CreationTime: creationTime,
})
tx, err := a.txBuilder.Build(ctx, a.signer, &msgSet)
@@ -323,18 +333,28 @@ func (a *acpSourceHub) DeleteActorRelationship(
creationTime *protoTypes.Timestamp,
) (bool, error) {
msgSet := sourcehub.MsgSet{}
+
+ var newActorRelationship *acptypes.Relationship
+ if targetActor == "*" {
+ newActorRelationship = acptypes.NewAllActorsRelationship(
+ resourceName,
+ objectID,
+ relation,
+ )
+ } else {
+ newActorRelationship = acptypes.NewActorRelationship(
+ resourceName,
+ objectID,
+ relation,
+ targetActor,
+ )
+ }
+
cmdMapper := msgSet.WithBearerPolicyCmd(&acptypes.MsgBearerPolicyCmd{
- Creator: a.signer.GetAccAddress(),
- BearerToken: requester.BearerToken,
- PolicyId: policyID,
- Cmd: acptypes.NewDeleteRelationshipCmd(
- acptypes.NewActorRelationship(
- resourceName,
- objectID,
- relation,
- targetActor,
- ),
- ),
+ Creator: a.signer.GetAccAddress(),
+ BearerToken: requester.BearerToken,
+ PolicyId: policyID,
+ Cmd: acptypes.NewDeleteRelationshipCmd(newActorRelationship),
CreationTime: creationTime,
})
diff --git a/cli/acp_relationship_add.go b/cli/acp_relationship_add.go
index c0838a2ce2..0026e992f5 100644
--- a/cli/acp_relationship_add.go
+++ b/cli/acp_relationship_add.go
@@ -64,6 +64,14 @@ Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502
--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \
--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
+Example: Let all actors read a private document:
+ defradb client acp relationship add \
+ --collection Users \
+ --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \
+ --relation reader \
+ --actor "*" \
+ --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
+
Example: Creating a dummy relationship does nothing (from database perspective):
defradb client acp relationship add \
-c Users \
diff --git a/client/db.go b/client/db.go
index e8942e8501..bfafb76942 100644
--- a/client/db.go
+++ b/client/db.go
@@ -113,7 +113,9 @@ type DB interface {
// If failure occurs, the result will return an error. Upon success the boolean value will
// be true if the relationship already existed (no-op), and false if a new relationship was made.
//
- // Note: The request actor must either be the owner or manager of the document.
+ // Note:
+ // - The request actor must either be the owner or manager of the document.
+ // - If the target actor arg is "*", then the relationship applies to all actors implicitly.
AddDocActorRelationship(
ctx context.Context,
collectionName string,
@@ -128,7 +130,10 @@ type DB interface {
// be true if the relationship record was found and deleted. Upon success the boolean value
// will be false if the relationship record was not found (no-op).
//
- // Note: The request actor must either be the owner or manager of the document.
+ // Note:
+ // - The request actor must either be the owner or manager of the document.
+ // - If the target actor arg is "*", then the implicitly added relationship with all actors is
+ // removed, however this does not revoke access from actors that had explicit relationships.
DeleteDocActorRelationship(
ctx context.Context,
collectionName string,
diff --git a/docs/website/references/cli/defradb_client_acp_relationship_add.md b/docs/website/references/cli/defradb_client_acp_relationship_add.md
index 1251ffb74e..f3313b45d4 100644
--- a/docs/website/references/cli/defradb_client_acp_relationship_add.md
+++ b/docs/website/references/cli/defradb_client_acp_relationship_add.md
@@ -30,6 +30,14 @@ Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502
--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \
--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
+Example: Let all actors read a private document:
+ defradb client acp relationship add \
+ --collection Users \
+ --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \
+ --relation reader \
+ --actor "*" \
+ --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac
+
Example: Creating a dummy relationship does nothing (from database perspective):
defradb client acp relationship add \
-c Users \
diff --git a/tests/integration/acp.go b/tests/integration/acp.go
index ce50637d4b..5983ad228a 100644
--- a/tests/integration/acp.go
+++ b/tests/integration/acp.go
@@ -88,7 +88,7 @@ type AddPolicy struct {
Policy string
// The policy creator identity, i.e. actor creating the policy.
- Identity immutable.Option[identityRef]
+ Identity immutable.Option[identity]
// The expected policyID generated based on the Policy loaded in to the ACP system.
ExpectedPolicyID string
@@ -159,13 +159,13 @@ type AddDocActorRelationship struct {
// The target public identity, i.e. the identity of the actor to tie the document's relation with.
//
// This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
- TargetIdentity immutable.Option[identityRef]
+ TargetIdentity immutable.Option[identity]
// The requestor identity, i.e. identity of the actor creating the relationship.
// Note: This identity must either own or have managing access defined in the policy.
//
// This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
- RequestorIdentity immutable.Option[identityRef]
+ RequestorIdentity immutable.Option[identity]
// Result returns true if it was a no-op due to existing before, and false if a new relationship was made.
ExpectedExistence bool
@@ -251,13 +251,13 @@ type DeleteDocActorRelationship struct {
// The target public identity, i.e. the identity of the actor with whom the relationship is with.
//
// This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
- TargetIdentity immutable.Option[identityRef]
+ TargetIdentity immutable.Option[identity]
// The requestor identity, i.e. identity of the actor deleting the relationship.
// Note: This identity must either own or have managing access defined in the policy.
//
// This is a required field. To test the invalid usage of not having this arg, use NoIdentity() or leave default.
- RequestorIdentity immutable.Option[identityRef]
+ RequestorIdentity immutable.Option[identity]
// Result returns true if the relationship record was expected to be found and deleted,
// and returns false if no matching relationship record was found (no-op).
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go
new file mode 100644
index 0000000000..c05380d8e0
--- /dev/null
+++ b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go
@@ -0,0 +1,250 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package test_acp_relationship_doc_actor_add
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/sourcenetwork/immutable"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestACP_OwnerGivesOnlyReadAccessToAllActors_GQL_AllActorsCanReadButNotUpdateOrDelete(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner gives read access to all actors (gql), but the other actor can't update or delete",
+
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents yet
+ },
+ },
+
+ testUtils.DeleteDoc{ // Since it can't read, it can't delete either.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.UpdateDoc{ // Since it can't read, it can't update either.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ SkipLocalUpdateEvent: true,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // Now any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(3), // Now any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.UpdateDoc{ // But doesn't mean they can update.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.DeleteDoc{ // But doesn't mean they can delete.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go
new file mode 100644
index 0000000000..4ee858345b
--- /dev/null
+++ b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go
@@ -0,0 +1,250 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package test_acp_relationship_doc_actor_add
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/sourcenetwork/immutable"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestACP_OwnerGivesOnlyReadAccessToAllActors_AllActorsCanReadButNotUpdateOrDelete(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner gives read access to all actors, but the other actor can't update or delete",
+
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ },
+ ),
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // This identity can not read yet.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents yet
+ },
+ },
+
+ testUtils.DeleteDoc{ // Since it can't read, it can't delete either.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.UpdateDoc{ // Since it can't read, it can't update either.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // Now any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(3), // Now any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.UpdateDoc{ // But doesn't mean they can update.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.DeleteDoc{ // But doesn't mean they can delete.
+ CollectionID: 0,
+
+ Identity: testUtils.ClientIdentity(2),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go
new file mode 100644
index 0000000000..14c0121a41
--- /dev/null
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go
@@ -0,0 +1,548 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package test_acp_relationship_doc_actor_delete
+
+import (
+ "fmt"
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestACP_OwnerRevokesAccessFromAllNonExplicitActors_ActorsCanNotReadAnymore(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner revokes read access from actors that were given read access implicitly",
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(), // Give implicit access to all identities.
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // Any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(3), // Any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, not explictly allowed.
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedRecordFound: true,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // Can not read anymore
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents now
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(3), // Can not read anymore
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents now
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestACP_OwnerRevokesAccessFromAllNonExplicitActors_ExplicitActorsCanStillRead(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner revokes read access from actors that were given read access implicitly",
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.ClientIdentity(2), // Give access to this identity explictly before.
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(), // Give implicit access to all identities.
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.ClientIdentity(4), // Give access to this identity explictly after.
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // Any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(3), // Any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(4), // Any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(5), // Any identity can read
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, not explictly allowed.
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedRecordFound: true,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(3), // Can not read anymore, because it gained access implicitly.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents now
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(5), // Can not read anymore, because it gained access implicitly.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents now
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(2), // Can still read because it was given access explictly.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.Request{
+ Identity: testUtils.ClientIdentity(4), // Can still read because it was given access explictly.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/identity.go b/tests/integration/identity.go
index 7c56d81375..d8efe506df 100644
--- a/tests/integration/identity.go
+++ b/tests/integration/identity.go
@@ -13,6 +13,7 @@ package tests
import (
"context"
"math/rand"
+ "strconv"
"github.com/decred/dcrd/dcrec/secp256k1/v4"
"github.com/sourcenetwork/immutable"
@@ -21,31 +22,56 @@ import (
acpIdentity "github.com/sourcenetwork/defradb/acp/identity"
)
-// identityRef is a type that refers to a specific identity of a certain type.
-type identityRef struct {
- isClient bool
- index int
+type identityType int
+
+const (
+ clientIdentityType identityType = iota
+ nodeIdentityType
+)
+
+// identity helps specify identity type info and selector/index of identity to use in a test case.
+type identity struct {
+ // type of identity
+ kind identityType
+
+ // selector can be a valid identity index or a selecting pattern like "*".
+ // Note: "*" means to select all identities of the specified [kind] type.
+ selector string
}
// NoIdentity returns an reference to an identity that represents no identity.
-func NoIdentity() immutable.Option[identityRef] {
- return immutable.None[identityRef]()
+func NoIdentity() immutable.Option[identity] {
+ return immutable.None[identity]()
}
-// ClientIdentity returns a reference to a user identity with a given index.
-func ClientIdentity(index int) immutable.Option[identityRef] {
- return immutable.Some(identityRef{
- isClient: true,
- index: index,
- })
+// AllClientIdentities returns user identity selector specified with the "*".
+func AllClientIdentities() immutable.Option[identity] {
+ return immutable.Some(
+ identity{
+ kind: clientIdentityType,
+ selector: "*",
+ },
+ )
}
-// NodeIdentity returns a reference to a node identity with a given index.
-func NodeIdentity(index int) immutable.Option[identityRef] {
- return immutable.Some(identityRef{
- isClient: false,
- index: index,
- })
+// ClientIdentity returns a user identity at the given index.
+func ClientIdentity(indexSelector int) immutable.Option[identity] {
+ return immutable.Some(
+ identity{
+ kind: clientIdentityType,
+ selector: strconv.Itoa(indexSelector),
+ },
+ )
+}
+
+// ClientIdentity returns a node identity at the given index.
+func NodeIdentity(indexSelector int) immutable.Option[identity] {
+ return immutable.Some(
+ identity{
+ kind: nodeIdentityType,
+ selector: strconv.Itoa(indexSelector),
+ },
+ )
}
// identityHolder holds an identity and the generated tokens for each target node.
@@ -66,30 +92,37 @@ func newIdentityHolder(ident acpIdentity.Identity) *identityHolder {
// getIdentity returns the identity for the given reference.
// If the identity does not exist, it will be generated.
-func getIdentity(s *state, ref immutable.Option[identityRef]) acpIdentity.Identity {
- if !ref.HasValue() {
+func getIdentity(s *state, identity immutable.Option[identity]) acpIdentity.Identity {
+ if !identity.HasValue() {
return acpIdentity.Identity{}
}
- return getIdentityHolder(s, ref.Value()).Identity
+
+ // The selector must never be "*" here because this function returns a specific identity from the
+ // stored identities, if "*" string needs to be signaled to the acp module then it should be handled
+ // a call before this function.
+ if identity.Value().selector == "*" {
+ require.Fail(s.t, "Used the \"*\" selector for identity incorrectly.", s.testCase.Description)
+ }
+ return getIdentityHolder(s, identity.Value()).Identity
}
// getIdentityHolder returns the identity holder for the given reference.
// If the identity does not exist, it will be generated.
-func getIdentityHolder(s *state, ref identityRef) *identityHolder {
- ident, ok := s.identities[ref]
+func getIdentityHolder(s *state, identity identity) *identityHolder {
+ ident, ok := s.identities[identity]
if ok {
return ident
}
- s.identities[ref] = newIdentityHolder(generateIdentity(s))
- return s.identities[ref]
+ s.identities[identity] = newIdentityHolder(generateIdentity(s))
+ return s.identities[identity]
}
// getIdentityForRequest returns the identity for the given reference and node index.
// It prepares the identity for a request by generating a token if needed, i.e. it will
// return an identity with [Identity.BearerToken] set.
-func getIdentityForRequest(s *state, ref identityRef, nodeIndex int) acpIdentity.Identity {
- identHolder := getIdentityHolder(s, ref)
+func getIdentityForRequest(s *state, identity identity, nodeIndex int) acpIdentity.Identity {
+ identHolder := getIdentityHolder(s, identity)
ident := identHolder.Identity
token, ok := identHolder.NodeTokens[nodeIndex]
@@ -129,19 +162,30 @@ func generateIdentity(s *state) acpIdentity.Identity {
func getContextWithIdentity(
ctx context.Context,
s *state,
- ref immutable.Option[identityRef],
+ identity immutable.Option[identity],
nodeIndex int,
) context.Context {
- if !ref.HasValue() {
+ if !identity.HasValue() {
return ctx
}
- ident := getIdentityForRequest(s, ref.Value(), nodeIndex)
- return acpIdentity.WithContext(ctx, immutable.Some(ident))
+ return acpIdentity.WithContext(
+ ctx,
+ immutable.Some(
+ getIdentityForRequest(
+ s,
+ identity.Value(),
+ nodeIndex,
+ ),
+ ),
+ )
}
-func getIdentityDID(s *state, ref immutable.Option[identityRef]) string {
- if ref.HasValue() {
- return getIdentity(s, ref).DID
+func getIdentityDID(s *state, identity immutable.Option[identity]) string {
+ if identity.HasValue() {
+ if identity.Value().selector == "*" {
+ return identity.Value().selector
+ }
+ return getIdentity(s, identity).DID
}
return ""
}
diff --git a/tests/integration/state.go b/tests/integration/state.go
index c495f80d9e..a1085b94b9 100644
--- a/tests/integration/state.go
+++ b/tests/integration/state.go
@@ -173,7 +173,7 @@ type state struct {
// types. See [identRef].
// The map value is the identity holder that contains the identity itself and token
// generated for different target nodes. See [identityHolder].
- identities map[identityRef]*identityHolder
+ identities map[identity]*identityHolder
// The seed for the next identity generation. We want identities to be deterministic.
nextIdentityGenSeed int
@@ -237,7 +237,7 @@ func newState(
clientType: clientType,
txns: []datastore.Txn{},
allActionsDone: make(chan struct{}),
- identities: map[identityRef]*identityHolder{},
+ identities: map[identity]*identityHolder{},
subscriptionResultsChans: []chan func(){},
collectionNames: collectionNames,
collectionIndexesByRoot: map[uint32]int{},
diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go
index a1ab291257..2e7eaaa5ea 100644
--- a/tests/integration/test_case.go
+++ b/tests/integration/test_case.go
@@ -297,7 +297,7 @@ type CreateDoc struct {
//
// Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
// Default value is `NoIdentity()`.
- Identity immutable.Option[identityRef]
+ Identity immutable.Option[identity]
// Specifies whether the document should be encrypted.
IsDocEncrypted bool
@@ -369,7 +369,7 @@ type DeleteDoc struct {
//
// Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
// Default value is `NoIdentity()`.
- Identity immutable.Option[identityRef]
+ Identity immutable.Option[identity]
// The collection in which this document should be deleted.
CollectionID int
@@ -402,7 +402,7 @@ type UpdateDoc struct {
//
// Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
// Default value is `NoIdentity()`.
- Identity immutable.Option[identityRef]
+ Identity immutable.Option[identity]
// The collection in which this document exists.
CollectionID int
@@ -445,7 +445,7 @@ type UpdateWithFilter struct {
//
// Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
// Default value is `NoIdentity()`.
- Identity immutable.Option[identityRef]
+ Identity immutable.Option[identity]
// The collection in which this document exists.
CollectionID int
@@ -602,7 +602,7 @@ type Request struct {
//
// Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
// Default value is `NoIdentity()`.
- Identity immutable.Option[identityRef]
+ Identity immutable.Option[identity]
// Used to identify the transaction for this to run against. Optional.
TransactionID immutable.Option[int]
@@ -805,7 +805,7 @@ type GetNodeIdentity struct {
//
// Use `UserIdentity` to create a user identity and `NodeIdentity` to create a node identity.
// Default value is `NoIdentity()`.
- ExpectedIdentity immutable.Option[identityRef]
+ ExpectedIdentity immutable.Option[identity]
}
// Wait is an action that will wait for the given duration.
From 85092118fd5e7421673c23700a64c8459442be9a Mon Sep 17 00:00:00 2001
From: amarkpark
Date: Wed, 27 Nov 2024 10:44:40 -0700
Subject: [PATCH 35/47] docs(i): Replace broken DQL link in README (#3274)
## Relevant issue(s)
Resolves #3269
## Description
This is a change to update a broken link in the README.
This changes the target URI to point to an extant page in the docs.
FROM:
https://docs.source.network/references/query-specification/query-language-overview
TO:
https://docs.source.network/defradb/references/query-specification/query-language-overview
NO CODE CHANGES
## Tasks
- [NA] I made sure the code is well commented, particularly
hard-to-understand areas.
- [X] I made sure the repository-held documentation is changed
accordingly.
- [X] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [NA] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
I manually followed the old link and the new link. Old was a broken
link. New is a proposed correct link.
Specify the platform(s) on which this was tested:
- MacOS
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 3f8276b5cb..368dbe4e0c 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,7 @@
-DefraDB is a user-centric database that prioritizes data ownership, personal privacy, and information security. Its data model, powered by the convergence of [MerkleCRDTs](https://arxiv.org/pdf/2004.00107.pdf) and the content-addressability of [IPLD](https://docs.ipld.io/), enables a multi-write-master architecture. It features [DQL](https://docs.source.network/references/query-specification/query-language-overview), a query language compatible with GraphQL but providing extra convenience. By leveraging peer-to-peer networking it can be deployed nimbly in novel topologies. Access control is determined by a relationship-based DSL, supporting document or field-level policies, secured by the SourceHub network. DefraDB is a core part of the [Source technologies](https://source.network/) that enable new paradigms of decentralized data and access-control management, user-centric apps, data trustworthiness, and much more.
+DefraDB is a user-centric database that prioritizes data ownership, personal privacy, and information security. Its data model, powered by the convergence of [MerkleCRDTs](https://arxiv.org/pdf/2004.00107.pdf) and the content-addressability of [IPLD](https://docs.ipld.io/), enables a multi-write-master architecture. It features [DQL](https://docs.source.network/defradb/references/query-specification/query-language-overview), a query language compatible with GraphQL but providing extra convenience. By leveraging peer-to-peer networking it can be deployed nimbly in novel topologies. Access control is determined by a relationship-based DSL, supporting document or field-level policies, secured by the SourceHub network. DefraDB is a core part of the [Source technologies](https://source.network/) that enable new paradigms of decentralized data and access-control management, user-centric apps, data trustworthiness, and much more.
Read the documentation on [docs.source.network](https://docs.source.network/).
From 6cb32894ea94321214ab17270867ad5a02a0be43 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 2 Dec 2024 17:13:16 -0500
Subject: [PATCH 36/47] bot: Update dependencies (bulk dependabot PRs)
03-12-2024 (#3288)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3287 bot: Bump github.com/bits-and-blooms/bitset from 1.15.0 to 1.17.0
#3286 bot: Bump github.com/stretchr/testify from 1.9.0 to 1.10.0
#3285 bot: Bump github.com/lestrrat-go/jwx/v2 from 2.1.2 to 2.1.3
#3284 bot: Bump vite from 5.4.11 to 6.0.2 in /playground
#3283 bot: Bump eslint-plugin-react-refresh from 0.4.14 to 0.4.15 in
/playground
#3282 bot: Bump @vitejs/plugin-react-swc from 3.7.1 to 3.7.2 in
/playground
⚠️ The following PRs were resolved manually due to merge conflicts:
#3281 bot: Bump eslint from 9.15.0 to 9.16.0 in /playground
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
go.mod | 6 +-
go.sum | 11 +-
playground/package-lock.json | 427 ++++++++++++++++++++---------------
playground/package.json | 8 +-
4 files changed, 260 insertions(+), 192 deletions(-)
diff --git a/go.mod b/go.mod
index ac7d480c08..e8d263bb76 100644
--- a/go.mod
+++ b/go.mod
@@ -5,7 +5,7 @@ go 1.22.0
toolchain go1.22.7
require (
- github.com/bits-and-blooms/bitset v1.15.0
+ github.com/bits-and-blooms/bitset v1.17.0
github.com/bxcodec/faker v2.0.1+incompatible
github.com/cosmos/cosmos-sdk v0.50.10
github.com/cosmos/gogoproto v1.7.0
@@ -31,7 +31,7 @@ require (
github.com/jbenet/goprocess v0.1.4
github.com/joho/godotenv v1.5.1
github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c
- github.com/lestrrat-go/jwx/v2 v2.1.2
+ github.com/lestrrat-go/jwx/v2 v2.1.3
github.com/libp2p/go-libp2p v0.37.0
github.com/libp2p/go-libp2p-gostream v0.6.0
github.com/libp2p/go-libp2p-kad-dht v0.28.1
@@ -54,7 +54,7 @@ require (
github.com/spf13/cobra v1.8.1
github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.19.0
- github.com/stretchr/testify v1.9.0
+ github.com/stretchr/testify v1.10.0
github.com/tidwall/btree v1.7.0
github.com/valyala/fastjson v1.6.4
github.com/vito/go-sse v1.1.2
diff --git a/go.sum b/go.sum
index f4c2388f9e..db0e7bc21a 100644
--- a/go.sum
+++ b/go.sum
@@ -294,8 +294,8 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s=
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
-github.com/bits-and-blooms/bitset v1.15.0 h1:DiCRMscZsGyYePE9AR3sVhKqUXCt5IZvkX5AfAc5xLQ=
-github.com/bits-and-blooms/bitset v1.15.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
+github.com/bits-and-blooms/bitset v1.17.0 h1:1X2TS7aHz1ELcC0yU1y2stUs/0ig5oMU6STFZGrhvHI=
+github.com/bits-and-blooms/bitset v1.17.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
@@ -983,8 +983,8 @@ github.com/lestrrat-go/httprc v1.0.6 h1:qgmgIRhpvBqexMJjA/PmwSvhNk679oqD1RbovdCG
github.com/lestrrat-go/httprc v1.0.6/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo=
github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI=
github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4=
-github.com/lestrrat-go/jwx/v2 v2.1.2 h1:6poete4MPsO8+LAEVhpdrNI4Xp2xdiafgl2RD89moBc=
-github.com/lestrrat-go/jwx/v2 v2.1.2/go.mod h1:pO+Gz9whn7MPdbsqSJzG8TlEpMZCwQDXnFJ+zsUVh8Y=
+github.com/lestrrat-go/jwx/v2 v2.1.3 h1:Ud4lb2QuxRClYAmRleF50KrbKIoM1TddXgBrneT5/Jo=
+github.com/lestrrat-go/jwx/v2 v2.1.3/go.mod h1:q6uFgbgZfEmQrfJfrCo90QcQOcXFMfbI/fO0NqRtvZo=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
@@ -1450,8 +1450,9 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
-github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d h1:vfofYNRScrDdvS342BElfbETmL1Aiz3i2t0zfRj16Hs=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index 5125c66756..42c3faa7aa 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -20,12 +20,12 @@
"@types/swagger-ui-react": "^4.18.3",
"@typescript-eslint/eslint-plugin": "^8.15.0",
"@typescript-eslint/parser": "^8.15.0",
- "@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.15.0",
+ "@vitejs/plugin-react-swc": "^3.7.2",
+ "eslint": "^9.16.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.14",
+ "eslint-plugin-react-refresh": "^0.4.15",
"typescript": "^5.7.2",
- "vite": "^5.4.11"
+ "vite": "^6.0.2"
}
},
"node_modules/@babel/runtime": {
@@ -59,6 +59,37 @@
"integrity": "sha512-hPYRrKFoI+nuckPgDJfyYAkybFvheo4usS0Vw0HNAe+fmGBQA5Az37b/yStO284atBoqqdOUhKJ3d9Zw3PQkcQ==",
"license": "MIT"
},
+ "node_modules/@codemirror/language": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz",
+ "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==",
+ "peer": true,
+ "dependencies": {
+ "@codemirror/state": "^6.0.0",
+ "@codemirror/view": "^6.0.0",
+ "@lezer/common": "^1.0.0",
+ "@lezer/highlight": "^1.0.0",
+ "@lezer/lr": "^1.0.0",
+ "style-mod": "^4.0.0"
+ }
+ },
+ "node_modules/@codemirror/state": {
+ "version": "6.4.1",
+ "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz",
+ "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==",
+ "peer": true
+ },
+ "node_modules/@codemirror/view": {
+ "version": "6.35.0",
+ "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.35.0.tgz",
+ "integrity": "sha512-I0tYy63q5XkaWsJ8QRv5h6ves7kvtrBWjBcnf/bzohFJQc5c14a1AQRdE8QpPF9eMp5Mq2FMm59TCj1gDfE7kw==",
+ "peer": true,
+ "dependencies": {
+ "@codemirror/state": "^6.4.0",
+ "style-mod": "^4.1.0",
+ "w3c-keyname": "^2.2.4"
+ }
+ },
"node_modules/@emotion/is-prop-valid": {
"version": "0.8.8",
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz",
@@ -77,394 +108,387 @@
"optional": true
},
"node_modules/@esbuild/aix-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
- "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz",
+ "integrity": "sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==",
"cpu": [
"ppc64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
- "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.0.tgz",
+ "integrity": "sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==",
"cpu": [
"arm"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
- "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz",
+ "integrity": "sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==",
"cpu": [
"arm64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
- "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.0.tgz",
+ "integrity": "sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
- "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz",
+ "integrity": "sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==",
"cpu": [
"arm64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
- "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz",
+ "integrity": "sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
- "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz",
+ "integrity": "sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==",
"cpu": [
"arm64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
- "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz",
+ "integrity": "sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
- "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz",
+ "integrity": "sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==",
"cpu": [
"arm"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
- "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz",
+ "integrity": "sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==",
"cpu": [
"arm64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
- "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz",
+ "integrity": "sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==",
"cpu": [
"ia32"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
- "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz",
+ "integrity": "sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==",
"cpu": [
"loong64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
- "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz",
+ "integrity": "sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==",
"cpu": [
"mips64el"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
- "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz",
+ "integrity": "sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==",
"cpu": [
"ppc64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
- "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz",
+ "integrity": "sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==",
"cpu": [
"riscv64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
- "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz",
+ "integrity": "sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==",
"cpu": [
"s390x"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
- "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz",
+ "integrity": "sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
- "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz",
+ "integrity": "sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz",
+ "integrity": "sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
- "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz",
+ "integrity": "sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
- "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz",
+ "integrity": "sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
- "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz",
+ "integrity": "sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==",
"cpu": [
"arm64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
- "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz",
+ "integrity": "sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==",
"cpu": [
"ia32"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
- "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz",
+ "integrity": "sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==",
"cpu": [
"x64"
],
"dev": true,
- "license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@eslint-community/eslint-utils": {
@@ -594,11 +618,10 @@
}
},
"node_modules/@eslint/js": {
- "version": "9.15.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.15.0.tgz",
- "integrity": "sha512-tMTqrY+EzbXmKJR5ToI8lxu7jaN5EdmrBFJpQk5JmSlyLsx6o4t27r883K5xsLuCYCpfKBCGswMSWXsM+jB7lg==",
+ "version": "9.16.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.16.0.tgz",
+ "integrity": "sha512-tw2HxzQkrbeuvyj1tG2Yqq+0H9wGoI2IMk4EOsQeX+vmd75FtJAzf+gTA69WF+baUKRYQ3x2kbLE08js5OsTVg==",
"dev": true,
- "license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
@@ -795,6 +818,30 @@
"url": "https://github.com/sponsors/nzakas"
}
},
+ "node_modules/@lezer/common": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz",
+ "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==",
+ "peer": true
+ },
+ "node_modules/@lezer/highlight": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz",
+ "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==",
+ "peer": true,
+ "dependencies": {
+ "@lezer/common": "^1.0.0"
+ }
+ },
+ "node_modules/@lezer/lr": {
+ "version": "1.4.2",
+ "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz",
+ "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==",
+ "peer": true,
+ "dependencies": {
+ "@lezer/common": "^1.0.0"
+ }
+ },
"node_modules/@motionone/animation": {
"version": "10.18.0",
"resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz",
@@ -2611,7 +2658,7 @@
"version": "15.7.13",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.13.tgz",
"integrity": "sha512-hCZTSvwbzWGvhqxp/RqVqwU999pBf2vp7hzIjiYOsl8wqOmUxkQ6ddw1cV3l8811+kdUFus/q4d1Y3E3SyEifA==",
- "dev": true,
+ "devOptional": true,
"license": "MIT"
},
"node_modules/@types/ramda": {
@@ -2627,7 +2674,7 @@
"version": "18.3.12",
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.12.tgz",
"integrity": "sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==",
- "dev": true,
+ "devOptional": true,
"license": "MIT",
"dependencies": {
"@types/prop-types": "*",
@@ -2638,7 +2685,7 @@
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==",
- "dev": true,
+ "devOptional": true,
"license": "MIT",
"dependencies": {
"@types/react": "*"
@@ -2887,16 +2934,15 @@
}
},
"node_modules/@vitejs/plugin-react-swc": {
- "version": "3.7.1",
- "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.1.tgz",
- "integrity": "sha512-vgWOY0i1EROUK0Ctg1hwhtC3SdcDjZcdit4Ups4aPkDcB1jYhmo+RMYWY87cmXMhvtD5uf8lV89j2w16vkdSVg==",
+ "version": "3.7.2",
+ "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.2.tgz",
+ "integrity": "sha512-y0byko2b2tSVVf5Gpng1eEhX1OvPC7x8yns1Fx8jDzlJp4LS6CMkCPfLw47cjyoMrshQDoQw4qcgjsU9VvlCew==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@swc/core": "^1.7.26"
},
"peerDependencies": {
- "vite": "^4 || ^5"
+ "vite": "^4 || ^5 || ^6"
}
},
"node_modules/acorn": {
@@ -3264,7 +3310,7 @@
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
"integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
- "dev": true,
+ "devOptional": true,
"license": "MIT"
},
"node_modules/debounce-promise": {
@@ -3359,42 +3405,42 @@
}
},
"node_modules/esbuild": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
- "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.0.tgz",
+ "integrity": "sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==",
"dev": true,
"hasInstallScript": true,
- "license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
- "node": ">=12"
+ "node": ">=18"
},
"optionalDependencies": {
- "@esbuild/aix-ppc64": "0.21.5",
- "@esbuild/android-arm": "0.21.5",
- "@esbuild/android-arm64": "0.21.5",
- "@esbuild/android-x64": "0.21.5",
- "@esbuild/darwin-arm64": "0.21.5",
- "@esbuild/darwin-x64": "0.21.5",
- "@esbuild/freebsd-arm64": "0.21.5",
- "@esbuild/freebsd-x64": "0.21.5",
- "@esbuild/linux-arm": "0.21.5",
- "@esbuild/linux-arm64": "0.21.5",
- "@esbuild/linux-ia32": "0.21.5",
- "@esbuild/linux-loong64": "0.21.5",
- "@esbuild/linux-mips64el": "0.21.5",
- "@esbuild/linux-ppc64": "0.21.5",
- "@esbuild/linux-riscv64": "0.21.5",
- "@esbuild/linux-s390x": "0.21.5",
- "@esbuild/linux-x64": "0.21.5",
- "@esbuild/netbsd-x64": "0.21.5",
- "@esbuild/openbsd-x64": "0.21.5",
- "@esbuild/sunos-x64": "0.21.5",
- "@esbuild/win32-arm64": "0.21.5",
- "@esbuild/win32-ia32": "0.21.5",
- "@esbuild/win32-x64": "0.21.5"
+ "@esbuild/aix-ppc64": "0.24.0",
+ "@esbuild/android-arm": "0.24.0",
+ "@esbuild/android-arm64": "0.24.0",
+ "@esbuild/android-x64": "0.24.0",
+ "@esbuild/darwin-arm64": "0.24.0",
+ "@esbuild/darwin-x64": "0.24.0",
+ "@esbuild/freebsd-arm64": "0.24.0",
+ "@esbuild/freebsd-x64": "0.24.0",
+ "@esbuild/linux-arm": "0.24.0",
+ "@esbuild/linux-arm64": "0.24.0",
+ "@esbuild/linux-ia32": "0.24.0",
+ "@esbuild/linux-loong64": "0.24.0",
+ "@esbuild/linux-mips64el": "0.24.0",
+ "@esbuild/linux-ppc64": "0.24.0",
+ "@esbuild/linux-riscv64": "0.24.0",
+ "@esbuild/linux-s390x": "0.24.0",
+ "@esbuild/linux-x64": "0.24.0",
+ "@esbuild/netbsd-x64": "0.24.0",
+ "@esbuild/openbsd-arm64": "0.24.0",
+ "@esbuild/openbsd-x64": "0.24.0",
+ "@esbuild/sunos-x64": "0.24.0",
+ "@esbuild/win32-arm64": "0.24.0",
+ "@esbuild/win32-ia32": "0.24.0",
+ "@esbuild/win32-x64": "0.24.0"
}
},
"node_modules/escape-string-regexp": {
@@ -3411,18 +3457,17 @@
}
},
"node_modules/eslint": {
- "version": "9.15.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.15.0.tgz",
- "integrity": "sha512-7CrWySmIibCgT1Os28lUU6upBshZ+GxybLOrmRzi08kS8MBuO8QA7pXEgYgY5W8vK3e74xv0lpjo9DbaGU9Rkw==",
+ "version": "9.16.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.16.0.tgz",
+ "integrity": "sha512-whp8mSQI4C8VXd+fLgSM0lh3UlmcFtVwUQjyKCFfsp+2ItAIYhlq/hqGahGqHE6cv9unM41VlqKk2VtKYR2TaA==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.12.1",
"@eslint/config-array": "^0.19.0",
"@eslint/core": "^0.9.0",
"@eslint/eslintrc": "^3.2.0",
- "@eslint/js": "9.15.0",
+ "@eslint/js": "9.16.0",
"@eslint/plugin-kit": "^0.2.3",
"@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
@@ -3484,13 +3529,12 @@
}
},
"node_modules/eslint-plugin-react-refresh": {
- "version": "0.4.14",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.14.tgz",
- "integrity": "sha512-aXvzCTK7ZBv1e7fahFuR3Z/fyQQSIQ711yPgYRj+Oj64tyTgO4iQIDmYXDBqvSWQ/FA4OSCsXOStlF+noU0/NA==",
+ "version": "0.4.15",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.15.tgz",
+ "integrity": "sha512-poU5qfmwLS5WO69drZnB9J1vXv+NQkE0p+oIY4B85Z9IuvpaIdHa+9IE/sFrN79QW49QcHQIP6c7NHpDMQ9TvA==",
"dev": true,
- "license": "MIT",
"peerDependencies": {
- "eslint": ">=7"
+ "eslint": ">=8.40"
}
},
"node_modules/eslint-scope": {
@@ -5468,6 +5512,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/style-mod": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz",
+ "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==",
+ "peer": true
+ },
"node_modules/style-value-types": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz",
@@ -5778,21 +5828,20 @@
}
},
"node_modules/vite": {
- "version": "5.4.11",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz",
- "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==",
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.2.tgz",
+ "integrity": "sha512-XdQ+VsY2tJpBsKGs0wf3U/+azx8BBpYRHFAyKm5VeEZNOJZRB63q7Sc8Iup3k0TrN3KO6QgyzFf+opSbfY1y0g==",
"dev": true,
- "license": "MIT",
"dependencies": {
- "esbuild": "^0.21.3",
- "postcss": "^8.4.43",
- "rollup": "^4.20.0"
+ "esbuild": "^0.24.0",
+ "postcss": "^8.4.49",
+ "rollup": "^4.23.0"
},
"bin": {
"vite": "bin/vite.js"
},
"engines": {
- "node": "^18.0.0 || >=20.0.0"
+ "node": "^18.0.0 || ^20.0.0 || >=22.0.0"
},
"funding": {
"url": "https://github.com/vitejs/vite?sponsor=1"
@@ -5801,19 +5850,25 @@
"fsevents": "~2.3.3"
},
"peerDependencies": {
- "@types/node": "^18.0.0 || >=20.0.0",
+ "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
+ "jiti": ">=1.21.0",
"less": "*",
"lightningcss": "^1.21.0",
"sass": "*",
"sass-embedded": "*",
"stylus": "*",
"sugarss": "*",
- "terser": "^5.4.0"
+ "terser": "^5.16.0",
+ "tsx": "^4.8.1",
+ "yaml": "^2.4.2"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
},
+ "jiti": {
+ "optional": true
+ },
"less": {
"optional": true
},
@@ -5834,6 +5889,12 @@
},
"terser": {
"optional": true
+ },
+ "tsx": {
+ "optional": true
+ },
+ "yaml": {
+ "optional": true
}
}
},
@@ -5843,6 +5904,12 @@
"integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==",
"license": "MIT"
},
+ "node_modules/w3c-keyname": {
+ "version": "2.2.8",
+ "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
+ "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
+ "peer": true
+ },
"node_modules/web-streams-polyfill": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
diff --git a/playground/package.json b/playground/package.json
index 3f8066719e..cea10661a4 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -22,11 +22,11 @@
"@types/swagger-ui-react": "^4.18.3",
"@typescript-eslint/eslint-plugin": "^8.15.0",
"@typescript-eslint/parser": "^8.15.0",
- "@vitejs/plugin-react-swc": "^3.7.1",
- "eslint": "^9.15.0",
+ "@vitejs/plugin-react-swc": "^3.7.2",
+ "eslint": "^9.16.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.14",
+ "eslint-plugin-react-refresh": "^0.4.15",
"typescript": "^5.7.2",
- "vite": "^5.4.11"
+ "vite": "^6.0.2"
}
}
From 2776f1e1131e0b4ff5e453f7a55e288137fa3f40 Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Thu, 5 Dec 2024 10:27:45 -0800
Subject: [PATCH 37/47] feat: Aggregate order alias targeting (#3293)
## Relevant issue(s)
Resolves #3239
Resolves #2820
## Description
This PR enables ordering by aggregated fields by using an alias to
target them. This was mostly implemented in a previous PR, so the
changes here are tests to verify that it works.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Added integration tests.
Specify the platform(s) on which this was tested:
- MacOS
---
internal/db/base/compare.go | 2 +-
.../one_to_many/with_count_order_test.go | 87 +++++++++++++++++
.../query/one_to_many/with_sum_order_test.go | 95 +++++++++++++++++++
3 files changed, 183 insertions(+), 1 deletion(-)
create mode 100644 tests/integration/query/one_to_many/with_count_order_test.go
create mode 100644 tests/integration/query/one_to_many/with_sum_order_test.go
diff --git a/internal/db/base/compare.go b/internal/db/base/compare.go
index c5636f9e15..63bf27bf0b 100644
--- a/internal/db/base/compare.go
+++ b/internal/db/base/compare.go
@@ -34,7 +34,7 @@ func Compare(a, b any) int {
case bool:
return compareBool(v, b.(bool))
case int:
- return compareInt(int64(v), b.(int64))
+ return compareInt(int64(v), int64(b.(int)))
case int64:
return compareInt(v, b.(int64))
case uint64:
diff --git a/tests/integration/query/one_to_many/with_count_order_test.go b/tests/integration/query/one_to_many/with_count_order_test.go
new file mode 100644
index 0000000000..11b26ae396
--- /dev/null
+++ b/tests/integration/query/one_to_many/with_count_order_test.go
@@ -0,0 +1,87 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package one_to_many
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryOneToMany_WithCountAliasOrder_ShouldOrderResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with order alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(order: {_alias: {publishedCount: DESC}}) {
+ name
+ publishedCount: _count(published: {})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "publishedCount": int64(2),
+ },
+ {
+ "name": "Cornelia Funke",
+ "publishedCount": int64(1),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/one_to_many/with_sum_order_test.go b/tests/integration/query/one_to_many/with_sum_order_test.go
new file mode 100644
index 0000000000..4c6c7aa0bf
--- /dev/null
+++ b/tests/integration/query/one_to_many/with_sum_order_test.go
@@ -0,0 +1,95 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package one_to_many
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryOneToMany_WithSumWithAliasOrder_ShouldOrderResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with sum with order alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "The Associate",
+ "rating": 4.2,
+ "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": "bae-72e8c691-9f20-55e7-9228-8af1cf54cace"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(order: {_alias: {totalRating: DESC}}) {
+ name
+ totalRating: _sum(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "totalRating": 13.600000000000001,
+ },
+ {
+ "name": "Cornelia Funke",
+ "totalRating": 4.8,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
From e4599e85aa5f3351bce533a21346fecc30e45883 Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Thu, 5 Dec 2024 13:49:01 -0800
Subject: [PATCH 38/47] feat: Aggregate filter alias targeting (#3252)
## Relevant issue(s)
Resolves #3195
## Description
This PR enables aggregate alias targeting in filters.
Blocked by #3253
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Added and updated integration tests.
Specify the platform(s) on which this was tested:
- MacOS
---
internal/planner/average.go | 6 +-
internal/planner/count.go | 6 +-
internal/planner/max.go | 5 +-
internal/planner/min.go | 5 +-
internal/planner/select.go | 14 +-
internal/planner/sum.go | 6 +-
internal/planner/top.go | 10 +-
.../query/one_to_many/with_count_test.go | 17 +-
.../with_group_aggregate_alias_filter_test.go | 303 ++++++++++++++++++
9 files changed, 350 insertions(+), 22 deletions(-)
create mode 100644 tests/integration/query/simple/with_group_aggregate_alias_filter_test.go
diff --git a/internal/planner/average.go b/internal/planner/average.go
index c5274b5b6f..76bbfc107d 100644
--- a/internal/planner/average.go
+++ b/internal/planner/average.go
@@ -28,6 +28,8 @@ type averageNode struct {
virtualFieldIndex int
execInfo averageExecInfo
+
+ aggregateFilter *mapper.Filter
}
type averageExecInfo struct {
@@ -37,6 +39,7 @@ type averageExecInfo struct {
func (p *Planner) Average(
field *mapper.Aggregate,
+ filter *mapper.Filter,
) (*averageNode, error) {
var sumField *mapper.Aggregate
var countField *mapper.Aggregate
@@ -57,6 +60,7 @@ func (p *Planner) Average(
countFieldIndex: countField.Index,
virtualFieldIndex: field.Index,
docMapper: docMapper{field.DocumentMapping},
+ aggregateFilter: filter,
}, nil
}
@@ -102,7 +106,7 @@ func (n *averageNode) Next() (bool, error) {
return false, client.NewErrUnhandledType("sum", sumProp)
}
- return true, nil
+ return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
func (n *averageNode) SetPlan(p planNode) { n.plan = p }
diff --git a/internal/planner/count.go b/internal/planner/count.go
index b71fcab1e5..1b58109749 100644
--- a/internal/planner/count.go
+++ b/internal/planner/count.go
@@ -35,6 +35,7 @@ type countNode struct {
virtualFieldIndex int
aggregateMapping []mapper.AggregateTarget
+ aggregateFilter *mapper.Filter
execInfo countExecInfo
}
@@ -44,11 +45,12 @@ type countExecInfo struct {
iterations uint64
}
-func (p *Planner) Count(field *mapper.Aggregate, host *mapper.Select) (*countNode, error) {
+func (p *Planner) Count(field *mapper.Aggregate, host *mapper.Select, filter *mapper.Filter) (*countNode, error) {
return &countNode{
p: p,
virtualFieldIndex: field.Index,
aggregateMapping: field.AggregateTargets,
+ aggregateFilter: filter,
docMapper: docMapper{field.DocumentMapping},
}, nil
}
@@ -181,7 +183,7 @@ func (n *countNode) Next() (bool, error) {
}
n.currentValue.Fields[n.virtualFieldIndex] = count
- return true, nil
+ return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
// countDocs counts the number of documents in a slice, skipping over hidden items
diff --git a/internal/planner/max.go b/internal/planner/max.go
index c3eb6b488e..530e60e25e 100644
--- a/internal/planner/max.go
+++ b/internal/planner/max.go
@@ -33,6 +33,7 @@ type maxNode struct {
// that contains the result of the aggregate.
virtualFieldIndex int
aggregateMapping []mapper.AggregateTarget
+ aggregateFilter *mapper.Filter
execInfo maxExecInfo
}
@@ -45,11 +46,13 @@ type maxExecInfo struct {
func (p *Planner) Max(
field *mapper.Aggregate,
parent *mapper.Select,
+ filter *mapper.Filter,
) (*maxNode, error) {
return &maxNode{
p: p,
parent: parent,
aggregateMapping: field.AggregateTargets,
+ aggregateFilter: filter,
virtualFieldIndex: field.Index,
docMapper: docMapper{field.DocumentMapping},
}, nil
@@ -252,5 +255,5 @@ func (n *maxNode) Next() (bool, error) {
res, _ := max.Int64()
n.currentValue.Fields[n.virtualFieldIndex] = res
}
- return true, nil
+ return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
diff --git a/internal/planner/min.go b/internal/planner/min.go
index 99278785bc..be70a8ccb9 100644
--- a/internal/planner/min.go
+++ b/internal/planner/min.go
@@ -33,6 +33,7 @@ type minNode struct {
// that contains the result of the aggregate.
virtualFieldIndex int
aggregateMapping []mapper.AggregateTarget
+ aggregateFilter *mapper.Filter
execInfo minExecInfo
}
@@ -45,11 +46,13 @@ type minExecInfo struct {
func (p *Planner) Min(
field *mapper.Aggregate,
parent *mapper.Select,
+ filter *mapper.Filter,
) (*minNode, error) {
return &minNode{
p: p,
parent: parent,
aggregateMapping: field.AggregateTargets,
+ aggregateFilter: filter,
virtualFieldIndex: field.Index,
docMapper: docMapper{field.DocumentMapping},
}, nil
@@ -252,5 +255,5 @@ func (n *minNode) Next() (bool, error) {
res, _ := min.Int64()
n.currentValue.Fields[n.virtualFieldIndex] = res
}
- return true, nil
+ return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
diff --git a/internal/planner/select.go b/internal/planner/select.go
index d0e816cfb9..d3bcbb910d 100644
--- a/internal/planner/select.go
+++ b/internal/planner/select.go
@@ -19,6 +19,7 @@ import (
"github.com/sourcenetwork/defradb/internal/core"
"github.com/sourcenetwork/defradb/internal/db/base"
"github.com/sourcenetwork/defradb/internal/keys"
+ "github.com/sourcenetwork/defradb/internal/planner/filter"
"github.com/sourcenetwork/defradb/internal/planner/mapper"
)
@@ -344,18 +345,21 @@ func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, erro
case *mapper.Aggregate:
var plan aggregateNode
var aggregateError error
+ var aggregateFilter *mapper.Filter
+ // extract aggregate filters from the select
+ selectReq.Filter, aggregateFilter = filter.SplitByFields(selectReq.Filter, f.Field)
switch f.Name {
case request.CountFieldName:
- plan, aggregateError = n.planner.Count(f, selectReq)
+ plan, aggregateError = n.planner.Count(f, selectReq, aggregateFilter)
case request.SumFieldName:
- plan, aggregateError = n.planner.Sum(f, selectReq)
+ plan, aggregateError = n.planner.Sum(f, selectReq, aggregateFilter)
case request.AverageFieldName:
- plan, aggregateError = n.planner.Average(f)
+ plan, aggregateError = n.planner.Average(f, aggregateFilter)
case request.MaxFieldName:
- plan, aggregateError = n.planner.Max(f, selectReq)
+ plan, aggregateError = n.planner.Max(f, selectReq, aggregateFilter)
case request.MinFieldName:
- plan, aggregateError = n.planner.Min(f, selectReq)
+ plan, aggregateError = n.planner.Min(f, selectReq, aggregateFilter)
}
if aggregateError != nil {
diff --git a/internal/planner/sum.go b/internal/planner/sum.go
index c790cba60d..a77e56da3d 100644
--- a/internal/planner/sum.go
+++ b/internal/planner/sum.go
@@ -30,6 +30,7 @@ type sumNode struct {
isFloat bool
virtualFieldIndex int
aggregateMapping []mapper.AggregateTarget
+ aggregateFilter *mapper.Filter
execInfo sumExecInfo
}
@@ -42,6 +43,7 @@ type sumExecInfo struct {
func (p *Planner) Sum(
field *mapper.Aggregate,
parent *mapper.Select,
+ filter *mapper.Filter,
) (*sumNode, error) {
isFloat := false
for _, target := range field.AggregateTargets {
@@ -60,6 +62,7 @@ func (p *Planner) Sum(
p: p,
isFloat: isFloat,
aggregateMapping: field.AggregateTargets,
+ aggregateFilter: filter,
virtualFieldIndex: field.Index,
docMapper: docMapper{field.DocumentMapping},
}, nil
@@ -310,8 +313,7 @@ func (n *sumNode) Next() (bool, error) {
typedSum = int64(sum)
}
n.currentValue.Fields[n.virtualFieldIndex] = typedSum
-
- return true, nil
+ return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
func (n *sumNode) SetPlan(p planNode) { n.plan = p }
diff --git a/internal/planner/top.go b/internal/planner/top.go
index 6224b6d62d..658dc66dd8 100644
--- a/internal/planner/top.go
+++ b/internal/planner/top.go
@@ -199,15 +199,15 @@ func (p *Planner) Top(m *mapper.Select) (*topLevelNode, error) {
var err error
switch field.GetName() {
case request.CountFieldName:
- child, err = p.Count(f, m)
+ child, err = p.Count(f, m, nil)
case request.SumFieldName:
- child, err = p.Sum(f, m)
+ child, err = p.Sum(f, m, nil)
case request.AverageFieldName:
- child, err = p.Average(f)
+ child, err = p.Average(f, nil)
case request.MaxFieldName:
- child, err = p.Max(f, m)
+ child, err = p.Max(f, m, nil)
case request.MinFieldName:
- child, err = p.Min(f, m)
+ child, err = p.Min(f, m, nil)
}
if err != nil {
return nil, err
diff --git a/tests/integration/query/one_to_many/with_count_test.go b/tests/integration/query/one_to_many/with_count_test.go
index 77d4e754f3..77905ed748 100644
--- a/tests/integration/query/one_to_many/with_count_test.go
+++ b/tests/integration/query/one_to_many/with_count_test.go
@@ -119,11 +119,9 @@ func TestQueryOneToManyWithCount(t *testing.T) {
}
}
-// This test documents the behavior of aggregate alias targeting which is not yet implemented.
-// https://github.com/sourcenetwork/defradb/issues/3195
-func TestQueryOneToMany_WithCountAliasFilter_ShouldFilterAll(t *testing.T) {
+func TestQueryOneToMany_WithCountAliasFilter_ShouldMatchAll(t *testing.T) {
test := testUtils.TestCase{
- Description: "One-to-many relation query from many side with count",
+ Description: "One-to-many relation query from many side with count alias",
Actions: []any{
testUtils.CreateDoc{
CollectionID: 1,
@@ -173,7 +171,16 @@ func TestQueryOneToMany_WithCountAliasFilter_ShouldFilterAll(t *testing.T) {
}
}`,
Results: map[string]any{
- "Author": []map[string]any{},
+ "Author": []map[string]any{
+ {
+ "name": "Cornelia Funke",
+ "publishedCount": 1,
+ },
+ {
+ "name": "John Grisham",
+ "publishedCount": 2,
+ },
+ },
},
},
},
diff --git a/tests/integration/query/simple/with_group_aggregate_alias_filter_test.go b/tests/integration/query/simple/with_group_aggregate_alias_filter_test.go
new file mode 100644
index 0000000000..037a187cad
--- /dev/null
+++ b/tests/integration/query/simple/with_group_aggregate_alias_filter_test.go
@@ -0,0 +1,303 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package simple
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQuerySimple_WithGroupAverageAliasFilter_FiltersResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with group average alias filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `type Users {
+ Name: String
+ Score: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 10
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 20
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 40
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 0
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(groupBy: [Name], filter: {_alias: {averageScore: {_eq: 20}}}) {
+ Name
+ averageScore: _avg(_group: {field: Score})
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "averageScore": float64(20),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimple_WithGroupSumAliasFilter_FiltersResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with group sum alias filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `type Users {
+ Name: String
+ Score: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 10
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 20
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 40
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 0
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(groupBy: [Name], filter: {_alias: {totalScore: {_eq: 40}}}) {
+ Name
+ totalScore: _sum(_group: {field: Score})
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "totalScore": float64(40),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimple_WithGroupMinAliasFilter_FiltersResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with group min alias filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `type Users {
+ Name: String
+ Score: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 10
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 20
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 40
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 0
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(groupBy: [Name], filter: {_alias: {minScore: {_eq: 0}}}) {
+ Name
+ minScore: _min(_group: {field: Score})
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "minScore": int64(0),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimple_WithGroupMaxAliasFilter_FiltersResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with group max alias filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `type Users {
+ Name: String
+ Score: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 10
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 20
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 40
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 0
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(groupBy: [Name], filter: {_alias: {maxScore: {_eq: 40}}}) {
+ Name
+ maxScore: _max(_group: {field: Score})
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "maxScore": int64(40),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQuerySimple_WithGroupCountAliasFilter_FiltersResults(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Simple query with group count alias filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `type Users {
+ Name: String
+ Score: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 10
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Bob",
+ "Score": 20
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 40
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 0
+ }`,
+ },
+ testUtils.CreateDoc{
+ Doc: `{
+ "Name": "Alice",
+ "Score": 5
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users(groupBy: [Name], filter: {_alias: {scores: {_eq: 3}}}) {
+ Name
+ scores: _count(_group: {})
+ }
+ }`,
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "Name": "Alice",
+ "scores": int64(3),
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
From 89f9f4166d67b5d551ff82ed3a00dd1cc7ef779e Mon Sep 17 00:00:00 2001
From: Chris Quigley
Date: Thu, 5 Dec 2024 18:09:58 -0500
Subject: [PATCH 39/47] fix: Add support for operationName and variables in
HTTP GET (#3292)
## Relevant issue(s)
Resolves #3153
## Description
I have made it so that operationName and variables are supported by HTTP
GET requests. I did this by modifying the `ExecRequest` function inside
`http/handler_store.go` such that `operationName` and `variables`
parameters are extracted if they are present.
I added two new tests to `http/handler_store_test.go` which test this
functionality. See:
`TestExecRequest_WithValidQuery_HttpGet_WithOperationName_OmitsErrors`
and
`TestExecRequest_HttpGet_WithVariables_OmitsErrors`
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
## How has this been tested?
The platform(s) on which this was tested:
- Windows
---
http/handler_ccip_test.go | 10 +++-
http/handler_store.go | 15 +++++-
http/handler_store_test.go | 98 ++++++++++++++++++++++++++++++++++++++
3 files changed, 120 insertions(+), 3 deletions(-)
diff --git a/http/handler_ccip_test.go b/http/handler_ccip_test.go
index 43797b622d..1888effe37 100644
--- a/http/handler_ccip_test.go
+++ b/http/handler_ccip_test.go
@@ -67,7 +67,7 @@ func TestCCIPGet_WithValidData(t *testing.T) {
resHex, err := hex.DecodeString(strings.TrimPrefix(ccipRes.Data, "0x"))
require.NoError(t, err)
- assert.JSONEq(t, `{"data": {"User": [{"name": "bob"}]}}`, string(resHex))
+ assert.JSONEq(t, `{"data": {"User": [{"name": "bob"}, {"name": "adam"}]}}`, string(resHex))
}
func TestCCIPGet_WithSubscription(t *testing.T) {
@@ -153,7 +153,7 @@ func TestCCIPPost_WithValidData(t *testing.T) {
resHex, err := hex.DecodeString(strings.TrimPrefix(ccipRes.Data, "0x"))
require.NoError(t, err)
- assert.JSONEq(t, `{"data": {"User": [{"name": "bob"}]}}`, string(resHex))
+ assert.JSONEq(t, `{"data": {"User": [{"name": "bob"}, {"name": "adam"}]}}`, string(resHex))
}
func TestCCIPPost_WithInvalidGraphQLRequest(t *testing.T) {
@@ -210,5 +210,11 @@ func setupDatabase(t *testing.T) client.DB {
err = col.Create(ctx, doc)
require.NoError(t, err)
+ doc2, err := client.NewDocFromJSON([]byte(`{"name": "adam"}`), col.Definition())
+ require.NoError(t, err)
+
+ err = col.Create(ctx, doc2)
+ require.NoError(t, err)
+
return cdb
}
diff --git a/http/handler_store.go b/http/handler_store.go
index 35436f3762..3d2cef63de 100644
--- a/http/handler_store.go
+++ b/http/handler_store.go
@@ -284,7 +284,21 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) {
var request GraphQLRequest
switch {
case req.URL.Query().Get("query") != "":
+
request.Query = req.URL.Query().Get("query")
+
+ request.OperationName = req.URL.Query().Get("operationName")
+
+ variablesFromQuery := req.URL.Query().Get("variables")
+ if variablesFromQuery != "" {
+ var variables map[string]any
+ if err := json.Unmarshal([]byte(variablesFromQuery), &variables); err != nil {
+ responseJSON(rw, http.StatusBadRequest, errorResponse{err})
+ return
+ }
+ request.Variables = variables
+ }
+
case req.Body != nil:
if err := requestJSON(req, &request); err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
@@ -294,7 +308,6 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) {
responseJSON(rw, http.StatusBadRequest, errorResponse{ErrMissingRequest})
return
}
-
var options []client.RequestOption
if request.OperationName != "" {
options = append(options, client.WithOperationName(request.OperationName))
diff --git a/http/handler_store_test.go b/http/handler_store_test.go
index dabf9648bd..7d39c4be07 100644
--- a/http/handler_store_test.go
+++ b/http/handler_store_test.go
@@ -16,6 +16,7 @@ import (
"io"
"net/http"
"net/http/httptest"
+ "net/url"
"testing"
"github.com/stretchr/testify/assert"
@@ -93,3 +94,100 @@ func TestExecRequest_WithInvalidQuery_HasSpecCompliantErrors(t *testing.T) {
"message": "Cannot query field \"invalid\" on type \"User\".",
}})
}
+
+func TestExecRequest_HttpGet_WithOperationName(t *testing.T) {
+ cdb := setupDatabase(t)
+
+ query := `
+ query UserQuery {
+ User {
+ name
+ }
+ }
+ query UserQueryWithDocID {
+ User {
+ _docID
+ name
+ }
+ }
+ `
+ operationName := "UserQuery"
+
+ encodedQuery := url.QueryEscape(query)
+ encodedOperationName := url.QueryEscape(operationName)
+
+ endpointURL := "http://localhost:9181/api/v0/graphql?query=" + encodedQuery + "&operationName=" + encodedOperationName
+
+ req := httptest.NewRequest(http.MethodGet, endpointURL, nil)
+ rec := httptest.NewRecorder()
+
+ handler, err := NewHandler(cdb)
+ require.NoError(t, err)
+ handler.ServeHTTP(rec, req)
+
+ res := rec.Result()
+ require.NotNil(t, res.Body)
+
+ resData, err := io.ReadAll(res.Body)
+ require.NoError(t, err)
+
+ var gqlResponse map[string]any
+ err = json.Unmarshal(resData, &gqlResponse)
+ require.NoError(t, err)
+
+ // Ensure the response data contains names, but not the _docID field
+ expectedJSON := `{
+ "data": {
+ "User": [
+ {"name": "bob"},
+ {"name": "adam"}
+ ]
+ }
+ }`
+ assert.JSONEq(t, expectedJSON, string(resData))
+}
+
+func TestExecRequest_HttpGet_WithVariables(t *testing.T) {
+ cdb := setupDatabase(t)
+
+ query := `query getUser($filter: UserFilterArg) {
+ User(filter: $filter) {
+ name
+ }
+ }`
+ operationName := "getUser"
+ variables := `{"filter":{"name":{"_eq":"bob"}}}`
+
+ encodedQuery := url.QueryEscape(query)
+ encodedOperationName := url.QueryEscape(operationName)
+ encodedVariables := url.QueryEscape(variables)
+
+ endpointURL := "http://localhost:9181/api/v0/graphql?query=" + encodedQuery + "&operationName=" + encodedOperationName + "&variables=" + encodedVariables
+
+ req := httptest.NewRequest(http.MethodGet, endpointURL, nil)
+ rec := httptest.NewRecorder()
+
+ handler, err := NewHandler(cdb)
+ require.NoError(t, err)
+ handler.ServeHTTP(rec, req)
+
+ res := rec.Result()
+ require.NotNil(t, res.Body)
+
+ resData, err := io.ReadAll(res.Body)
+ require.NoError(t, err)
+
+ var gqlResponse map[string]any
+ err = json.Unmarshal(resData, &gqlResponse)
+ require.NoError(t, err)
+
+ // Ensure only bob is returned, because of the filter variable
+ expectedJSON := `{
+ "data": {
+ "User": [
+ {"name": "bob"}
+ ]
+ }
+ }`
+ assert.JSONEq(t, expectedJSON, string(resData))
+}
From b4a3eba05af6b6b7e0039cc0bd6b386814b404f4 Mon Sep 17 00:00:00 2001
From: Shahzad Lone
Date: Fri, 6 Dec 2024 03:32:00 -0500
Subject: [PATCH 40/47] fix: Make requests with no identity work with "*"
target (#3278)
## Relevant issue(s)
Resolves #3276
## Description
- Fix the bug where a request without an identity still wouldn't be able
to access a document even if there was a "*" relationship
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
- Added tests
Specify the platform(s) on which this was tested:
- WSL2 (Manjaro)
---
acp/README.md | 3 +-
internal/db/permission/check.go | 14 +-
.../doc_actor/add/with_manager_gql_test.go | 33 ++-
.../doc_actor/add/with_manager_test.go | 30 ++-
.../doc_actor/add/with_only_write_gql_test.go | 11 +-
.../doc_actor/add/with_only_write_test.go | 10 +-
.../doc_actor/add/with_reader_gql_test.go | 11 +-
.../doc_actor/add/with_reader_test.go | 9 +-
.../add/with_target_all_actors_gql_test.go | 210 +++++++++++++++++-
.../add/with_target_all_actors_test.go | 208 +++++++++++++++++
.../doc_actor/add/with_update_gql_test.go | 18 +-
.../doc_actor/add/with_update_test.go | 20 +-
.../delete/with_target_all_actors_test.go | 165 +++++++++++++-
13 files changed, 677 insertions(+), 65 deletions(-)
diff --git a/acp/README.md b/acp/README.md
index 3fedb5a274..0f73662dd9 100644
--- a/acp/README.md
+++ b/acp/README.md
@@ -631,7 +631,8 @@ Result:
Error: document not found or not authorized to access
```
-Sometimes we might want to give a specific access (form a relationship) not just to one identity, but any identity.
+Sometimes we might want to give a specific access (i.e. form a relationship) not just with one identity, but with
+any identity (includes even requests with no-identity).
In that case we can specify "*" instead of specifying an explicit `actor`:
```sh
defradb client acp relationship add \
diff --git a/internal/db/permission/check.go b/internal/db/permission/check.go
index ce111bccaf..599329855b 100644
--- a/internal/db/permission/check.go
+++ b/internal/db/permission/check.go
@@ -67,18 +67,22 @@ func CheckAccessOfDocOnCollectionWithACP(
return true, nil
}
- // At this point if the request is not signatured, then it has no access, because:
- // the collection has a policy on it, and the acp is enabled/available,
- // and the document is not public (is registered with acp).
+ var identityValue string
if !identity.HasValue() {
- return false, nil
+ // We can't assume that there is no-access just because there is no identity even if the document
+ // is registered with acp, this is because it is possible that acp has a registered relation targeting
+ // "*" (any) actor which would mean that even a request without an identity might be able to access
+ // a document registered with acp. So we pass an empty `did` to accommodate that case.
+ identityValue = ""
+ } else {
+ identityValue = identity.Value().DID
}
// Now actually check using the signature if this identity has access or not.
hasAccess, err := acpSystem.CheckDocAccess(
ctx,
permission,
- identity.Value().DID,
+ identityValue,
policyID,
resourceName,
docID,
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go
index 757053f365..813294a7fd 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go
@@ -26,10 +26,13 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanR
Description: "Test acp, owner makes a manager that gives itself read and write access",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
Actions: []any{
testUtils.AddPolicy{
@@ -274,10 +277,13 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantR
Description: "Test acp, owner makes a manager, manager can't read or write",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
Actions: []any{
testUtils.AddPolicy{
@@ -442,10 +448,13 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
Description: "Test acp, manager adds relationship with relation it does not manage according to policy, error",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go b/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go
index 485c130805..0b972acf8e 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go
@@ -601,10 +601,12 @@ func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadA
Description: "Test acp, owner makes a manager that gives itself read and write access",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
@@ -849,10 +851,12 @@ func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy
Description: "Test acp, manager adds relationship with relation it does not manage according to policy, error",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
@@ -1017,10 +1021,12 @@ func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadO
Description: "Test acp, owner makes a manager, manager can't read or write",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go
index 6a3f02f4ba..9391bb5b4d 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go
@@ -26,10 +26,13 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ
Description: "Test acp, owner gives write(update) access without explicit read permission, can still update",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go
index ccac9cd232..dba024e1a4 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go
@@ -26,10 +26,12 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot
Description: "Test acp, owner gives write(update) access without explicit read permission, can still update",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go
index f51861ec5c..7ca1c30e09 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go
@@ -26,10 +26,13 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotU
Description: "Test acp, owner gives read access to another actor, but the other actor can't update",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go b/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go
index fd452c2d7d..541b40977e 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go
@@ -465,10 +465,11 @@ func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdat
Description: "Test acp, owner gives read access to another actor, but the other actor can't update",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go
index c05380d8e0..2a421dc74d 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_gql_test.go
@@ -28,7 +28,8 @@ func TestACP_OwnerGivesOnlyReadAccessToAllActors_GQL_AllActorsCanReadButNotUpdat
SupportedMutationTypes: immutable.Some(
[]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
testUtils.GQLRequestMutationType,
},
),
@@ -248,3 +249,210 @@ func TestACP_OwnerGivesOnlyReadAccessToAllActors_GQL_AllActorsCanReadButNotUpdat
testUtils.ExecuteTestCase(t, test)
}
+
+func TestACP_OwnerGivesOnlyReadAccessToAllActors_GQL_CanReadEvenWithoutIdentityButNotUpdateOrDelete(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner gives read access to all actors (gql), can read without an identity but can't update or delete",
+
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used (only for update requests),
+ // so test that separately.
+ testUtils.GQLRequestMutationType,
+ },
+ ),
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.NoIdentity(), // Can not read without an identity.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents yet
+ },
+ },
+
+ testUtils.DeleteDoc{ // Since can't read without identity, can't delete either.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.UpdateDoc{ // Since can't read without identity, can't update either.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ SkipLocalUpdateEvent: true,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.NoIdentity(), // Now any identity can read, even if there is no identity
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.UpdateDoc{ // But doesn't mean they can update.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.DeleteDoc{ // But doesn't mean they can delete.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go
index 4ee858345b..ad587232f9 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_target_all_actors_test.go
@@ -28,6 +28,7 @@ func TestACP_OwnerGivesOnlyReadAccessToAllActors_AllActorsCanReadButNotUpdateOrD
SupportedMutationTypes: immutable.Some(
[]testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
testUtils.CollectionNamedMutationType,
testUtils.CollectionSaveMutationType,
},
@@ -248,3 +249,210 @@ func TestACP_OwnerGivesOnlyReadAccessToAllActors_AllActorsCanReadButNotUpdateOrD
testUtils.ExecuteTestCase(t, test)
}
+
+func TestACP_OwnerGivesOnlyReadAccessToAllActors_CanReadEvenWithoutIdentityButNotUpdateOrDelete(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner gives read access to all actors, can read without an identity but can't update or delete",
+
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ },
+ ),
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.NoIdentity(), // Can not read without an identity.
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents yet
+ },
+ },
+
+ testUtils.DeleteDoc{ // Since can't read without identity, can't delete either.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.UpdateDoc{ // Since can't read without identity, can't update either.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.NoIdentity(), // Now any identity can read, even if there is no identity
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.UpdateDoc{ // But doesn't mean they can update.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad Lone"
+ }
+ `,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+
+ testUtils.DeleteDoc{ // But doesn't mean they can delete.
+ CollectionID: 0,
+
+ Identity: testUtils.NoIdentity(),
+
+ DocID: 0,
+
+ ExpectedError: "document not found or not authorized to access",
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go
index eff2be0f7d..6c17ee792b 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go
@@ -26,10 +26,11 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelat
Description: "Test acp, owner gives write(update) access to another actor twice, no-op",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used so test that separately.
+ testUtils.GQLRequestMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
@@ -184,10 +185,11 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t
Description: "Test acp, owner gives write(update) access to another actor",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- // GQL mutation will return no error when wrong identity is used so test that separately.
- testUtils.GQLRequestMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used so test that separately.
+ testUtils.GQLRequestMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/add/with_update_test.go b/tests/integration/acp/relationship/doc_actor/add/with_update_test.go
index f6bf553356..582f42af01 100644
--- a/tests/integration/acp/relationship/doc_actor/add/with_update_test.go
+++ b/tests/integration/acp/relationship/doc_actor/add/with_update_test.go
@@ -26,10 +26,12 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelations
Description: "Test acp, owner gives write(update) access to another actor twice, no-op",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
@@ -184,10 +186,12 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *te
Description: "Test acp, owner gives write(update) access to another actor",
- SupportedMutationTypes: immutable.Some([]testUtils.MutationType{
- testUtils.CollectionNamedMutationType,
- testUtils.CollectionSaveMutationType,
- }),
+ SupportedMutationTypes: immutable.Some(
+ []testUtils.MutationType{
+ // GQL mutation will return no error when wrong identity is used with gql (only for update requests),
+ testUtils.CollectionNamedMutationType,
+ testUtils.CollectionSaveMutationType,
+ }),
Actions: []any{
testUtils.AddPolicy{
diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go
index 14c0121a41..9db120ce91 100644
--- a/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go
+++ b/tests/integration/acp/relationship/doc_actor/delete/with_target_all_actors_test.go
@@ -165,7 +165,7 @@ func TestACP_OwnerRevokesAccessFromAllNonExplicitActors_ActorsCanNotReadAnymore(
},
},
- testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, not explictly allowed.
+ testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, (ones given access through * implicitly).
RequestorIdentity: testUtils.ClientIdentity(1),
TargetIdentity: testUtils.AllClientIdentities(),
@@ -444,7 +444,7 @@ func TestACP_OwnerRevokesAccessFromAllNonExplicitActors_ExplicitActorsCanStillRe
},
},
- testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, not explictly allowed.
+ testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, (ones given access through * implicitly).
RequestorIdentity: testUtils.ClientIdentity(1),
TargetIdentity: testUtils.AllClientIdentities(),
@@ -546,3 +546,164 @@ func TestACP_OwnerRevokesAccessFromAllNonExplicitActors_ExplicitActorsCanStillRe
testUtils.ExecuteTestCase(t, test)
}
+
+func TestACP_OwnerRevokesAccessFromAllNonExplicitActors_NonIdentityRequestsCanNotReadAnymore(t *testing.T) {
+ expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4"
+
+ test := testUtils.TestCase{
+
+ Description: "Test acp, owner revokes read access from actors that were given read access implicitly, non-identity actors can't read anymore",
+
+ Actions: []any{
+ testUtils.AddPolicy{
+
+ Identity: testUtils.ClientIdentity(1),
+
+ Policy: `
+ name: Test Policy
+
+ description: A Policy
+
+ actor:
+ name: actor
+
+ resources:
+ users:
+ permissions:
+ read:
+ expr: owner + reader + writer
+
+ write:
+ expr: owner + writer
+
+ nothing:
+ expr: dummy
+
+ relations:
+ owner:
+ types:
+ - actor
+
+ reader:
+ types:
+ - actor
+
+ writer:
+ types:
+ - actor
+
+ admin:
+ manages:
+ - reader
+ types:
+ - actor
+
+ dummy:
+ types:
+ - actor
+ `,
+
+ ExpectedPolicyID: expectedPolicyID,
+ },
+
+ testUtils.SchemaUpdate{
+ Schema: fmt.Sprintf(`
+ type Users @policy(
+ id: "%s",
+ resource: "users"
+ ) {
+ name: String
+ age: Int
+ }
+ `,
+ expectedPolicyID,
+ ),
+ },
+
+ testUtils.CreateDoc{
+ Identity: testUtils.ClientIdentity(1),
+
+ CollectionID: 0,
+
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 28
+ }
+ `,
+ },
+
+ testUtils.AddDocActorRelationship{
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(), // Give implicit access to all identities.
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedExistence: false,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.NoIdentity(), // Can read even without identity
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{
+ {
+ "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b",
+ "name": "Shahzad",
+ "age": int64(28),
+ },
+ },
+ },
+ },
+
+ testUtils.DeleteDocActorRelationship{ // Revoke access from all actors, (ones given access through * implicitly).
+ RequestorIdentity: testUtils.ClientIdentity(1),
+
+ TargetIdentity: testUtils.AllClientIdentities(),
+
+ CollectionID: 0,
+
+ DocID: 0,
+
+ Relation: "reader",
+
+ ExpectedRecordFound: true,
+ },
+
+ testUtils.Request{
+ Identity: testUtils.NoIdentity(), // Can not read anymore
+
+ Request: `
+ query {
+ Users {
+ _docID
+ name
+ age
+ }
+ }
+ `,
+
+ Results: map[string]any{
+ "Users": []map[string]any{}, // Can't see the documents now
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
From 1ad9fa95abe0e9b373f41680c9c0dce5f39f692a Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Sun, 8 Dec 2024 04:23:55 -0500
Subject: [PATCH 41/47] bot: Update dependencies (bulk dependabot PRs)
08-12-2024 (#3296)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3290 bot: Bump eslint-plugin-react-refresh from 0.4.15 to 0.4.16 in
/playground
#3289 bot: Bump github.com/quic-go/quic-go from 0.48.1 to 0.48.2
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
go.mod | 2 +-
go.sum | 4 ++--
playground/package-lock.json | 8 ++++----
playground/package.json | 2 +-
4 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/go.mod b/go.mod
index e8d263bb76..fafd58f04a 100644
--- a/go.mod
+++ b/go.mod
@@ -315,7 +315,7 @@ require (
github.com/prometheus/common v0.60.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
github.com/quic-go/qpack v0.5.1 // indirect
- github.com/quic-go/quic-go v0.48.1 // indirect
+ github.com/quic-go/quic-go v0.48.2 // indirect
github.com/quic-go/webtransport-go v0.8.1-0.20241018022711-4ac2c9250e66 // indirect
github.com/raulk/go-watchdog v1.3.0 // indirect
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect
diff --git a/go.sum b/go.sum
index db0e7bc21a..efc32274fc 100644
--- a/go.sum
+++ b/go.sum
@@ -1309,8 +1309,8 @@ github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0leargg
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/quic-go/qpack v0.5.1 h1:giqksBPnT/HDtZ6VhtFKgoLOWmlyo9Ei6u9PqzIMbhI=
github.com/quic-go/qpack v0.5.1/go.mod h1:+PC4XFrEskIVkcLzpEkbLqq1uCoxPhQuvK5rH1ZgaEg=
-github.com/quic-go/quic-go v0.48.1 h1:y/8xmfWI9qmGTc+lBr4jKRUWLGSlSigv847ULJ4hYXA=
-github.com/quic-go/quic-go v0.48.1/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs=
+github.com/quic-go/quic-go v0.48.2 h1:wsKXZPeGWpMpCGSWqOcqpW2wZYic/8T3aqiOID0/KWE=
+github.com/quic-go/quic-go v0.48.2/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs=
github.com/quic-go/webtransport-go v0.8.1-0.20241018022711-4ac2c9250e66 h1:4WFk6u3sOT6pLa1kQ50ZVdm8BQFgJNA117cepZxtLIg=
github.com/quic-go/webtransport-go v0.8.1-0.20241018022711-4ac2c9250e66/go.mod h1:Vp72IJajgeOL6ddqrAhmp7IM9zbTcgkQxD/YdxrVwMw=
github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index 42c3faa7aa..24249dc43e 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -23,7 +23,7 @@
"@vitejs/plugin-react-swc": "^3.7.2",
"eslint": "^9.16.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.15",
+ "eslint-plugin-react-refresh": "^0.4.16",
"typescript": "^5.7.2",
"vite": "^6.0.2"
}
@@ -3529,9 +3529,9 @@
}
},
"node_modules/eslint-plugin-react-refresh": {
- "version": "0.4.15",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.15.tgz",
- "integrity": "sha512-poU5qfmwLS5WO69drZnB9J1vXv+NQkE0p+oIY4B85Z9IuvpaIdHa+9IE/sFrN79QW49QcHQIP6c7NHpDMQ9TvA==",
+ "version": "0.4.16",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.16.tgz",
+ "integrity": "sha512-slterMlxAhov/DZO8NScf6mEeMBBXodFUolijDvrtTxyezyLoTQaa73FyYus/VbTdftd8wBgBxPMRk3poleXNQ==",
"dev": true,
"peerDependencies": {
"eslint": ">=8.40"
diff --git a/playground/package.json b/playground/package.json
index cea10661a4..70abd4a314 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -25,7 +25,7 @@
"@vitejs/plugin-react-swc": "^3.7.2",
"eslint": "^9.16.0",
"eslint-plugin-react-hooks": "^5.0.0",
- "eslint-plugin-react-refresh": "^0.4.15",
+ "eslint-plugin-react-refresh": "^0.4.16",
"typescript": "^5.7.2",
"vite": "^6.0.2"
}
From edd0ce7e76c732740697f93c4805c73faee01f33 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 9 Dec 2024 14:22:21 -0500
Subject: [PATCH 42/47] bot: Update dependencies (bulk dependabot PRs)
09-12-2024 (#3307)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3305 bot: Bump eslint-plugin-react-hooks from 5.0.0 to 5.1.0 in
/playground
#3304 bot: Bump vite from 6.0.2 to 6.0.3 in /playground
#3303 bot: Bump @typescript-eslint/parser from 8.16.0 to 8.17.0 in
/playground
#3301 bot: Bump github.com/bits-and-blooms/bitset from 1.17.0 to 1.18.0
#3300 bot: Bump golang.org/x/crypto from 0.29.0 to 0.30.0
#3297 bot: Bump google.golang.org/grpc from 1.67.1 to 1.68.1
⚠️ The following PRs were resolved manually due to merge conflicts:
#3302 bot: Bump @typescript-eslint/eslint-plugin from 8.16.0 to 8.17.0
in /playground
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
go.mod | 14 +-
go.sum | 28 +--
playground/package-lock.json | 378 +++++++++++++++++++++++++++++++----
playground/package.json | 8 +-
4 files changed, 363 insertions(+), 65 deletions(-)
diff --git a/go.mod b/go.mod
index fafd58f04a..4ca0018dfb 100644
--- a/go.mod
+++ b/go.mod
@@ -5,7 +5,7 @@ go 1.22.0
toolchain go1.22.7
require (
- github.com/bits-and-blooms/bitset v1.17.0
+ github.com/bits-and-blooms/bitset v1.18.0
github.com/bxcodec/faker v2.0.1+incompatible
github.com/cosmos/cosmos-sdk v0.50.10
github.com/cosmos/gogoproto v1.7.0
@@ -62,9 +62,9 @@ require (
go.opentelemetry.io/otel/metric v1.32.0
go.opentelemetry.io/otel/sdk/metric v1.32.0
go.uber.org/zap v1.27.0
- golang.org/x/crypto v0.29.0
+ golang.org/x/crypto v0.30.0
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c
- google.golang.org/grpc v1.67.1
+ google.golang.org/grpc v1.68.1
)
require (
@@ -362,10 +362,10 @@ require (
golang.org/x/mod v0.21.0 // indirect
golang.org/x/net v0.30.0 // indirect
golang.org/x/oauth2 v0.23.0 // indirect
- golang.org/x/sync v0.9.0 // indirect
- golang.org/x/sys v0.27.0 // indirect
- golang.org/x/term v0.26.0 // indirect
- golang.org/x/text v0.20.0 // indirect
+ golang.org/x/sync v0.10.0 // indirect
+ golang.org/x/sys v0.28.0 // indirect
+ golang.org/x/term v0.27.0 // indirect
+ golang.org/x/text v0.21.0 // indirect
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.26.0 // indirect
gonum.org/v1/gonum v0.15.0 // indirect
diff --git a/go.sum b/go.sum
index efc32274fc..388265eb1d 100644
--- a/go.sum
+++ b/go.sum
@@ -294,8 +294,8 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s=
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
-github.com/bits-and-blooms/bitset v1.17.0 h1:1X2TS7aHz1ELcC0yU1y2stUs/0ig5oMU6STFZGrhvHI=
-github.com/bits-and-blooms/bitset v1.17.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
+github.com/bits-and-blooms/bitset v1.18.0 h1:eCa5BU3k9TtC6GP0r+bER6Si5ow7O0S+EXahjgwpLLs=
+github.com/bits-and-blooms/bitset v1.18.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
@@ -1602,8 +1602,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE=
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
-golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
-golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
+golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY=
+golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -1761,8 +1761,8 @@ golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
-golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
+golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -1871,8 +1871,8 @@ golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
-golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
+golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -1881,8 +1881,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
-golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU=
-golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E=
+golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
+golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -1897,8 +1897,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
-golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
+golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
+golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -2205,8 +2205,8 @@ google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu
google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
-google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E=
-google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA=
+google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0=
+google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
diff --git a/playground/package-lock.json b/playground/package-lock.json
index 24249dc43e..9207c015d5 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -18,14 +18,14 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.15.0",
- "@typescript-eslint/parser": "^8.15.0",
+ "@typescript-eslint/eslint-plugin": "^8.17.0",
+ "@typescript-eslint/parser": "^8.17.0",
"@vitejs/plugin-react-swc": "^3.7.2",
"eslint": "^9.16.0",
- "eslint-plugin-react-hooks": "^5.0.0",
+ "eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.16",
"typescript": "^5.7.2",
- "vite": "^6.0.2"
+ "vite": "^6.0.3"
}
},
"node_modules/@babel/runtime": {
@@ -2723,17 +2723,16 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.16.0.tgz",
- "integrity": "sha512-5YTHKV8MYlyMI6BaEG7crQ9BhSc8RxzshOReKwZwRWN0+XvvTOm+L/UYLCYxFpfwYuAAqhxiq4yae0CMFwbL7Q==",
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.17.0.tgz",
+ "integrity": "sha512-HU1KAdW3Tt8zQkdvNoIijfWDMvdSweFYm4hWh+KwhPstv+sCmWb89hCIP8msFm9N1R/ooh9honpSuvqKWlYy3w==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.16.0",
- "@typescript-eslint/type-utils": "8.16.0",
- "@typescript-eslint/utils": "8.16.0",
- "@typescript-eslint/visitor-keys": "8.16.0",
+ "@typescript-eslint/scope-manager": "8.17.0",
+ "@typescript-eslint/type-utils": "8.17.0",
+ "@typescript-eslint/utils": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2756,17 +2755,75 @@
}
}
},
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz",
+ "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
+ "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
+ "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "eslint-visitor-keys": "^4.2.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/eslint-visitor-keys": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@typescript-eslint/parser": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.16.0.tgz",
- "integrity": "sha512-D7DbgGFtsqIPIFMPJwCad9Gfi/hC0PWErRRHFnaCWoEDYi5tQUDiJCTmGUbBiLzjqAck4KcXt9Ayj0CNlIrF+w==",
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.17.0.tgz",
+ "integrity": "sha512-Drp39TXuUlD49F7ilHHCG7TTg8IkA+hxCuULdmzWYICxGXvDXmDmWEjJYZQYgf6l/TFfYNE167m7isnc3xlIEg==",
"dev": true,
- "license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.16.0",
- "@typescript-eslint/types": "8.16.0",
- "@typescript-eslint/typescript-estree": "8.16.0",
- "@typescript-eslint/visitor-keys": "8.16.0",
+ "@typescript-eslint/scope-manager": "8.17.0",
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/typescript-estree": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2785,6 +2842,93 @@
}
}
},
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz",
+ "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
+ "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz",
+ "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
+ "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "eslint-visitor-keys": "^4.2.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/eslint-visitor-keys": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.16.0.tgz",
@@ -2804,14 +2948,13 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.16.0.tgz",
- "integrity": "sha512-IqZHGG+g1XCWX9NyqnI/0CX5LL8/18awQqmkZSl2ynn8F76j579dByc0jhfVSnSnhf7zv76mKBQv9HQFKvDCgg==",
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.17.0.tgz",
+ "integrity": "sha512-q38llWJYPd63rRnJ6wY/ZQqIzPrBCkPdpIsaCfkR3Q4t3p6sb422zougfad4TFW9+ElIFLVDzWGiGAfbb/v2qw==",
"dev": true,
- "license": "MIT",
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.16.0",
- "@typescript-eslint/utils": "8.16.0",
+ "@typescript-eslint/typescript-estree": "8.17.0",
+ "@typescript-eslint/utils": "8.17.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -2831,6 +2974,76 @@
}
}
},
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
+ "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz",
+ "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
+ "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "eslint-visitor-keys": "^4.2.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/eslint-visitor-keys": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@typescript-eslint/types": {
"version": "8.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.16.0.tgz",
@@ -2875,16 +3088,15 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.16.0.tgz",
- "integrity": "sha512-C1zRy/mOL8Pj157GiX4kaw7iyRLKfJXBR3L82hk5kS/GyHcOFmy4YUq/zfZti72I9wnuQtA/+xzft4wCC8PJdA==",
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.17.0.tgz",
+ "integrity": "sha512-bQC8BnEkxqG8HBGKwG9wXlZqg37RKSMY7v/X8VEWD8JG2JuTHuNK0VFvMPMUKQcbk6B+tf05k+4AShAEtCtJ/w==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.16.0",
- "@typescript-eslint/types": "8.16.0",
- "@typescript-eslint/typescript-estree": "8.16.0"
+ "@typescript-eslint/scope-manager": "8.17.0",
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/typescript-estree": "8.17.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2902,6 +3114,93 @@
}
}
},
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz",
+ "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
+ "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz",
+ "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "@typescript-eslint/visitor-keys": "8.17.0",
+ "debug": "^4.3.4",
+ "fast-glob": "^3.3.2",
+ "is-glob": "^4.0.3",
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^1.3.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.17.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
+ "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "8.17.0",
+ "eslint-visitor-keys": "^4.2.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/eslint-visitor-keys": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.16.0.tgz",
@@ -3516,11 +3815,10 @@
}
},
"node_modules/eslint-plugin-react-hooks": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.0.0.tgz",
- "integrity": "sha512-hIOwI+5hYGpJEc4uPRmz2ulCjAGD/N13Lukkh8cLV0i2IRk/bdZDYjgLVHj+U9Z704kLIdIO6iueGvxNur0sgw==",
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.1.0.tgz",
+ "integrity": "sha512-mpJRtPgHN2tNAvZ35AMfqeB3Xqeo273QxrHJsbBEPWODRM4r0yB6jfoROqKEYrOn27UtRPpcpHc2UqyBSuUNTw==",
"dev": true,
- "license": "MIT",
"engines": {
"node": ">=10"
},
@@ -5828,9 +6126,9 @@
}
},
"node_modules/vite": {
- "version": "6.0.2",
- "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.2.tgz",
- "integrity": "sha512-XdQ+VsY2tJpBsKGs0wf3U/+azx8BBpYRHFAyKm5VeEZNOJZRB63q7Sc8Iup3k0TrN3KO6QgyzFf+opSbfY1y0g==",
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.3.tgz",
+ "integrity": "sha512-Cmuo5P0ENTN6HxLSo6IHsjCLn/81Vgrp81oaiFFMRa8gGDj5xEjIcEpf2ZymZtZR8oU0P2JX5WuUp/rlXcHkAw==",
"dev": true,
"dependencies": {
"esbuild": "^0.24.0",
diff --git a/playground/package.json b/playground/package.json
index 70abd4a314..89b6ab6d08 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -20,13 +20,13 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.15.0",
- "@typescript-eslint/parser": "^8.15.0",
+ "@typescript-eslint/eslint-plugin": "^8.17.0",
+ "@typescript-eslint/parser": "^8.17.0",
"@vitejs/plugin-react-swc": "^3.7.2",
"eslint": "^9.16.0",
- "eslint-plugin-react-hooks": "^5.0.0",
+ "eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.16",
"typescript": "^5.7.2",
- "vite": "^6.0.2"
+ "vite": "^6.0.3"
}
}
From be3f381573cee339e80831e1324c13a75d81b515 Mon Sep 17 00:00:00 2001
From: Chris Quigley
Date: Mon, 9 Dec 2024 15:51:26 -0500
Subject: [PATCH 43/47] fix: Adjust OpenAPI index POST example request body
(#3268)
## Relevant issue(s)
Resolves #2459
## Description
On the OpenAPI tool of the playground, the Index POST tab contained an
example body that was incorrect. It listed ID as a field which could be
set, which was untrue (this value is automatically generated, and trying
to pass one in throws an error.) To adjust this, I have created a new
description structure called `IndexCreateRequestDescription` which is
meant to be used for this type of HTTP request. To go alongside this
change, a new schema type, `indexCreateRequestSchema` was also created.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
## How has this been tested?
The platform(s) on which this was tested:
- Windows
---
cli/index_create.go | 6 +-
client/collection.go | 2 +-
client/index.go | 12 ++
client/mocks/collection.go | 16 +--
docs/website/references/http/openapi.json | 27 ++++-
http/client_collection.go | 2 +-
http/handler_collection.go | 12 +-
http/openapi.go | 1 +
internal/db/collection_define.go | 7 +-
internal/db/collection_index.go | 25 ++--
internal/db/index_test.go | 133 ++++++++++++----------
internal/db/indexed_docs_test.go | 16 ++-
tests/clients/cli/wrapper_collection.go | 2 +-
tests/integration/utils.go | 2 +-
14 files changed, 167 insertions(+), 96 deletions(-)
diff --git a/cli/index_create.go b/cli/index_create.go
index e9f4350fa0..d3d9162037 100644
--- a/cli/index_create.go
+++ b/cli/index_create.go
@@ -69,7 +69,7 @@ Example: create a unique index for 'Users' collection on 'name' in ascending ord
})
}
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Name: nameArg,
Fields: fields,
Unique: uniqueArg,
@@ -79,11 +79,11 @@ Example: create a unique index for 'Users' collection on 'name' in ascending ord
return err
}
- desc, err = col.CreateIndex(cmd.Context(), desc)
+ descWithID, err := col.CreateIndex(cmd.Context(), desc)
if err != nil {
return err
}
- return writeJSON(cmd, desc)
+ return writeJSON(cmd, descWithID)
},
}
cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name")
diff --git a/client/collection.go b/client/collection.go
index b557e2e335..b65da2c0a6 100644
--- a/client/collection.go
+++ b/client/collection.go
@@ -115,7 +115,7 @@ type Collection interface {
// only contain letters, numbers, and underscores.
// If the name of the index is not provided, it will be generated.
// WARNING: This method can not create index for a collection that has a policy.
- CreateIndex(context.Context, IndexDescription) (IndexDescription, error)
+ CreateIndex(context.Context, IndexDescriptionCreateRequest) (IndexDescription, error)
// DropIndex drops an index from the collection.
DropIndex(ctx context.Context, indexName string) error
diff --git a/client/index.go b/client/index.go
index b09b258224..fe245d115d 100644
--- a/client/index.go
+++ b/client/index.go
@@ -36,6 +36,18 @@ type IndexDescription struct {
Unique bool
}
+// IndexDescriptionCreateRequest describes an index creation request.
+// It does not contain the ID, as it is not a valid field for the request body.
+// Instead it should be automatically generated.
+type IndexDescriptionCreateRequest struct {
+ // Name contains the name of the index.
+ Name string
+ // Fields contains the fields that are being indexed.
+ Fields []IndexedFieldDescription
+ // Unique indicates whether the index is unique.
+ Unique bool
+}
+
// CollectionIndex is an interface for indexing documents in a collection.
type CollectionIndex interface {
// Save indexes a document by storing indexed field values.
diff --git a/client/mocks/collection.go b/client/mocks/collection.go
index 3b80849661..a34140f144 100644
--- a/client/mocks/collection.go
+++ b/client/mocks/collection.go
@@ -73,7 +73,7 @@ func (_c *Collection_Create_Call) RunAndReturn(run func(context.Context, *client
}
// CreateIndex provides a mock function with given fields: _a0, _a1
-func (_m *Collection) CreateIndex(_a0 context.Context, _a1 client.IndexDescription) (client.IndexDescription, error) {
+func (_m *Collection) CreateIndex(_a0 context.Context, _a1 client.IndexDescriptionCreateRequest) (client.IndexDescription, error) {
ret := _m.Called(_a0, _a1)
if len(ret) == 0 {
@@ -82,16 +82,16 @@ func (_m *Collection) CreateIndex(_a0 context.Context, _a1 client.IndexDescripti
var r0 client.IndexDescription
var r1 error
- if rf, ok := ret.Get(0).(func(context.Context, client.IndexDescription) (client.IndexDescription, error)); ok {
+ if rf, ok := ret.Get(0).(func(context.Context, client.IndexDescriptionCreateRequest) (client.IndexDescription, error)); ok {
return rf(_a0, _a1)
}
- if rf, ok := ret.Get(0).(func(context.Context, client.IndexDescription) client.IndexDescription); ok {
+ if rf, ok := ret.Get(0).(func(context.Context, client.IndexDescriptionCreateRequest) client.IndexDescription); ok {
r0 = rf(_a0, _a1)
} else {
r0 = ret.Get(0).(client.IndexDescription)
}
- if rf, ok := ret.Get(1).(func(context.Context, client.IndexDescription) error); ok {
+ if rf, ok := ret.Get(1).(func(context.Context, client.IndexDescriptionCreateRequest) error); ok {
r1 = rf(_a0, _a1)
} else {
r1 = ret.Error(1)
@@ -107,14 +107,14 @@ type Collection_CreateIndex_Call struct {
// CreateIndex is a helper method to define mock.On call
// - _a0 context.Context
-// - _a1 client.IndexDescription
+// - _a1 client.IndexDescriptionCreateRequest
func (_e *Collection_Expecter) CreateIndex(_a0 interface{}, _a1 interface{}) *Collection_CreateIndex_Call {
return &Collection_CreateIndex_Call{Call: _e.mock.On("CreateIndex", _a0, _a1)}
}
-func (_c *Collection_CreateIndex_Call) Run(run func(_a0 context.Context, _a1 client.IndexDescription)) *Collection_CreateIndex_Call {
+func (_c *Collection_CreateIndex_Call) Run(run func(_a0 context.Context, _a1 client.IndexDescriptionCreateRequest)) *Collection_CreateIndex_Call {
_c.Call.Run(func(args mock.Arguments) {
- run(args[0].(context.Context), args[1].(client.IndexDescription))
+ run(args[0].(context.Context), args[1].(client.IndexDescriptionCreateRequest))
})
return _c
}
@@ -124,7 +124,7 @@ func (_c *Collection_CreateIndex_Call) Return(_a0 client.IndexDescription, _a1 e
return _c
}
-func (_c *Collection_CreateIndex_Call) RunAndReturn(run func(context.Context, client.IndexDescription) (client.IndexDescription, error)) *Collection_CreateIndex_Call {
+func (_c *Collection_CreateIndex_Call) RunAndReturn(run func(context.Context, client.IndexDescriptionCreateRequest) (client.IndexDescription, error)) *Collection_CreateIndex_Call {
_c.Call.Return(run)
return _c
}
diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json
index 1f28b84a92..c5c9fbc7b2 100644
--- a/docs/website/references/http/openapi.json
+++ b/docs/website/references/http/openapi.json
@@ -442,6 +442,31 @@
},
"type": "object"
},
+ "index_create_request": {
+ "properties": {
+ "Fields": {
+ "items": {
+ "properties": {
+ "Descending": {
+ "type": "boolean"
+ },
+ "Name": {
+ "type": "string"
+ }
+ },
+ "type": "object"
+ },
+ "type": "array"
+ },
+ "Name": {
+ "type": "string"
+ },
+ "Unique": {
+ "type": "boolean"
+ }
+ },
+ "type": "object"
+ },
"lens_config": {
"properties": {
"DestinationSchemaVersionID": {
@@ -1245,7 +1270,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/index"
+ "$ref": "#/components/schemas/index_create_request"
}
}
},
diff --git a/http/client_collection.go b/http/client_collection.go
index 3abfa61002..1a56ab50e1 100644
--- a/http/client_collection.go
+++ b/http/client_collection.go
@@ -379,7 +379,7 @@ func (c *Collection) GetAllDocIDs(
func (c *Collection) CreateIndex(
ctx context.Context,
- indexDesc client.IndexDescription,
+ indexDesc client.IndexDescriptionCreateRequest,
) (client.IndexDescription, error) {
if !c.Description().Name.HasValue() {
return client.IndexDescription{}, client.ErrOperationNotPermittedOnNamelessCols
diff --git a/http/handler_collection.go b/http/handler_collection.go
index ddade699e3..1757fc7d00 100644
--- a/http/handler_collection.go
+++ b/http/handler_collection.go
@@ -259,7 +259,12 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
return
}
- index, err := col.CreateIndex(req.Context(), indexDesc)
+ descWithoutID := client.IndexDescriptionCreateRequest{
+ Name: indexDesc.Name,
+ Fields: indexDesc.Fields,
+ Unique: indexDesc.Unique,
+ }
+ index, err := col.CreateIndex(req.Context(), descWithoutID)
if err != nil {
responseJSON(rw, http.StatusBadRequest, errorResponse{err})
return
@@ -318,6 +323,9 @@ func (h *collectionHandler) bindRoutes(router *Router) {
indexSchema := &openapi3.SchemaRef{
Ref: "#/components/schemas/index",
}
+ indexCreateRequestSchema := &openapi3.SchemaRef{
+ Ref: "#/components/schemas/index_create_request",
+ }
collectionNamePathParam := openapi3.NewPathParameter("name").
WithDescription("Collection name").
@@ -389,7 +397,7 @@ func (h *collectionHandler) bindRoutes(router *Router) {
createIndexRequest := openapi3.NewRequestBody().
WithRequired(true).
- WithContent(openapi3.NewContentWithJSONSchemaRef(indexSchema))
+ WithContent(openapi3.NewContentWithJSONSchemaRef(indexCreateRequestSchema))
createIndexResponse := openapi3.NewResponse().
WithDescription("Index description").
WithJSONSchemaRef(indexSchema)
diff --git a/http/openapi.go b/http/openapi.go
index b217036182..6e906ecd77 100644
--- a/http/openapi.go
+++ b/http/openapi.go
@@ -32,6 +32,7 @@ var openApiSchemas = map[string]any{
"schema": &client.SchemaDescription{},
"collection_definition": &client.CollectionDefinition{},
"index": &client.IndexDescription{},
+ "index_create_request": &client.IndexDescriptionCreateRequest{},
"delete_result": &client.DeleteResult{},
"update_result": &client.UpdateResult{},
"lens_config": &client.LensConfig{},
diff --git a/internal/db/collection_define.go b/internal/db/collection_define.go
index 6597a54f1c..8f49b9a970 100644
--- a/internal/db/collection_define.go
+++ b/internal/db/collection_define.go
@@ -91,7 +91,12 @@ func (db *db) createCollections(
col := db.newCollection(desc, def.Schema)
for _, index := range desc.Indexes {
- if _, err := col.createIndex(ctx, index); err != nil {
+ descWithoutID := client.IndexDescriptionCreateRequest{
+ Name: index.Name,
+ Fields: index.Fields,
+ Unique: index.Unique,
+ }
+ if _, err := col.createIndex(ctx, descWithoutID); err != nil {
return nil, err
}
}
diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go
index 3c5da18c58..515a5047aa 100644
--- a/internal/db/collection_index.go
+++ b/internal/db/collection_index.go
@@ -35,7 +35,7 @@ import (
func (db *db) createCollectionIndex(
ctx context.Context,
collectionName string,
- desc client.IndexDescription,
+ desc client.IndexDescriptionCreateRequest,
) (client.IndexDescription, error) {
col, err := db.getCollectionByName(ctx, collectionName)
if err != nil {
@@ -218,7 +218,7 @@ func (c *collection) deleteIndexedDocWithID(
// the documents will be indexed by the new index.
func (c *collection) CreateIndex(
ctx context.Context,
- desc client.IndexDescription,
+ desc client.IndexDescriptionCreateRequest,
) (client.IndexDescription, error) {
ctx, txn, err := ensureContextTxn(ctx, c.db, false)
if err != nil {
@@ -235,7 +235,7 @@ func (c *collection) CreateIndex(
func (c *collection) createIndex(
ctx context.Context,
- desc client.IndexDescription,
+ desc client.IndexDescriptionCreateRequest,
) (CollectionIndex, error) {
if desc.Name != "" && !schema.IsValidIndexName(desc.Name) {
return nil, schema.NewErrIndexWithInvalidName("!")
@@ -266,9 +266,15 @@ func (c *collection) createIndex(
if err != nil {
return nil, err
}
- desc.ID = uint32(colID)
- buf, err := json.Marshal(desc)
+ descWithID := client.IndexDescription{
+ Name: desc.Name,
+ ID: uint32(colID),
+ Fields: desc.Fields,
+ Unique: desc.Unique,
+ }
+
+ buf, err := json.Marshal(descWithID)
if err != nil {
return nil, err
}
@@ -278,7 +284,7 @@ func (c *collection) createIndex(
if err != nil {
return nil, err
}
- colIndex, err := NewCollectionIndex(c, desc)
+ colIndex, err := NewCollectionIndex(c, descWithID)
if err != nil {
return nil, err
}
@@ -491,7 +497,7 @@ func (c *collection) checkExistingFieldsAndAdjustRelFieldNames(
func (c *collection) generateIndexNameIfNeededAndCreateKey(
ctx context.Context,
- desc *client.IndexDescription,
+ desc *client.IndexDescriptionCreateRequest,
) (keys.CollectionIndexKey, error) {
// callers of this function must set a context transaction
txn := mustGetContextTxn(ctx)
@@ -524,10 +530,7 @@ func (c *collection) generateIndexNameIfNeededAndCreateKey(
return indexKey, nil
}
-func validateIndexDescription(desc client.IndexDescription) error {
- if desc.ID != 0 {
- return NewErrNonZeroIndexIDProvided(desc.ID)
- }
+func validateIndexDescription(desc client.IndexDescriptionCreateRequest) error {
if len(desc.Fields) == 0 {
return ErrIndexMissingFields
}
diff --git a/internal/db/index_test.go b/internal/db/index_test.go
index 950f41c47f..1582cdfcfe 100644
--- a/internal/db/index_test.go
+++ b/internal/db/index_test.go
@@ -145,13 +145,13 @@ func newIndexTestFixture(t *testing.T) *indexTestFixture {
}
func (f *indexTestFixture) createCollectionIndex(
- desc client.IndexDescription,
+ desc client.IndexDescriptionCreateRequest,
) (client.IndexDescription, error) {
return f.createCollectionIndexFor(f.users.Name().Value(), desc)
}
-func getUsersIndexDescOnName() client.IndexDescription {
- return client.IndexDescription{
+func getUsersIndexDescOnName() client.IndexDescriptionCreateRequest {
+ return client.IndexDescriptionCreateRequest{
Name: testUsersColIndexName,
Fields: []client.IndexedFieldDescription{
{Name: usersNameFieldName},
@@ -159,8 +159,8 @@ func getUsersIndexDescOnName() client.IndexDescription {
}
}
-func getUsersIndexDescOnAge() client.IndexDescription {
- return client.IndexDescription{
+func getUsersIndexDescOnAge() client.IndexDescriptionCreateRequest {
+ return client.IndexDescriptionCreateRequest{
Name: testUsersColIndexAge,
Fields: []client.IndexedFieldDescription{
{Name: usersAgeFieldName},
@@ -168,8 +168,8 @@ func getUsersIndexDescOnAge() client.IndexDescription {
}
}
-func getUsersIndexDescOnWeight() client.IndexDescription {
- return client.IndexDescription{
+func getUsersIndexDescOnWeight() client.IndexDescriptionCreateRequest {
+ return client.IndexDescriptionCreateRequest{
Name: testUsersColIndexWeight,
Fields: []client.IndexedFieldDescription{
{Name: usersWeightFieldName},
@@ -177,8 +177,8 @@ func getUsersIndexDescOnWeight() client.IndexDescription {
}
}
-func getProductsIndexDescOnCategory() client.IndexDescription {
- return client.IndexDescription{
+func getProductsIndexDescOnCategory() client.IndexDescriptionCreateRequest {
+ return client.IndexDescriptionCreateRequest{
Name: testUsersColIndexAge,
Fields: []client.IndexedFieldDescription{
{Name: productsCategoryFieldName},
@@ -193,7 +193,7 @@ func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescrip
}
func (f *indexTestFixture) createUserCollectionIndexOnNumbers() client.IndexDescription {
- indexDesc := client.IndexDescription{
+ indexDesc := client.IndexDescriptionCreateRequest{
Name: "users_numbers_index",
Fields: []client.IndexedFieldDescription{
{Name: usersNumbersFieldName},
@@ -206,7 +206,7 @@ func (f *indexTestFixture) createUserCollectionIndexOnNumbers() client.IndexDesc
return newDesc
}
-func makeUnique(indexDesc client.IndexDescription) client.IndexDescription {
+func makeUnique(indexDesc client.IndexDescriptionCreateRequest) client.IndexDescriptionCreateRequest {
indexDesc.Unique = true
return indexDesc
}
@@ -218,7 +218,7 @@ func (f *indexTestFixture) createUserCollectionUniqueIndexOnName() client.IndexD
return newDesc
}
-func addFieldToIndex(indexDesc client.IndexDescription, fieldName string) client.IndexDescription {
+func addFieldToIndex(indexDesc client.IndexDescriptionCreateRequest, fieldName string) client.IndexDescriptionCreateRequest {
indexDesc.Fields = append(indexDesc.Fields, client.IndexedFieldDescription{
Name: fieldName,
})
@@ -274,7 +274,7 @@ func (f *indexTestFixture) commitTxn() {
func (f *indexTestFixture) createCollectionIndexFor(
collectionName string,
- desc client.IndexDescription,
+ desc client.IndexDescriptionCreateRequest,
) (client.IndexDescription, error) {
ctx := SetContextTxn(f.ctx, f.txn)
index, err := f.db.createCollectionIndex(ctx, collectionName, desc)
@@ -298,34 +298,17 @@ func TestCreateIndex_IfFieldsIsEmpty_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- _, err := f.createCollectionIndex(client.IndexDescription{
+ _, err := f.createCollectionIndex(client.IndexDescriptionCreateRequest{
Name: "some_index_name",
})
assert.EqualError(t, err, errIndexMissingFields)
}
-func TestCreateIndex_IfIndexDescriptionIDIsNotZero_ReturnError(t *testing.T) {
- f := newIndexTestFixture(t)
- defer f.db.Close()
-
- for _, id := range []uint32{1, 20, 999} {
- desc := client.IndexDescription{
- Name: "some_index_name",
- ID: id,
- Fields: []client.IndexedFieldDescription{
- {Name: usersNameFieldName},
- },
- }
- _, err := f.createCollectionIndex(desc)
- assert.ErrorIs(t, err, NewErrNonZeroIndexIDProvided(0))
- }
-}
-
func TestCreateIndex_IfValidInput_CreateIndex(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Name: "some_index_name",
Fields: []client.IndexedFieldDescription{
{Name: usersNameFieldName},
@@ -342,7 +325,7 @@ func TestCreateIndex_IfFieldNameIsEmpty_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Name: "some_index_name",
Fields: []client.IndexedFieldDescription{
{Name: ""},
@@ -356,7 +339,7 @@ func TestCreateIndex_IfFieldHasNoDirection_DefaultToAsc(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Name: "some_index_name",
Fields: []client.IndexedFieldDescription{{Name: usersNameFieldName}},
}
@@ -370,11 +353,11 @@ func TestCreateIndex_IfIndexWithNameAlreadyExists_ReturnError(t *testing.T) {
defer f.db.Close()
name := "some_index_name"
- desc1 := client.IndexDescription{
+ desc1 := client.IndexDescriptionCreateRequest{
Name: name,
Fields: []client.IndexedFieldDescription{{Name: usersNameFieldName}},
}
- desc2 := client.IndexDescription{
+ desc2 := client.IndexDescriptionCreateRequest{
Name: name,
Fields: []client.IndexedFieldDescription{{Name: usersAgeFieldName}},
}
@@ -389,15 +372,15 @@ func TestCreateIndex_IfGeneratedNameMatchesExisting_AddIncrement(t *testing.T) {
defer f.db.Close()
name := usersColName + "_" + usersAgeFieldName + "_ASC"
- desc1 := client.IndexDescription{
+ desc1 := client.IndexDescriptionCreateRequest{
Name: name,
Fields: []client.IndexedFieldDescription{{Name: usersNameFieldName}},
}
- desc2 := client.IndexDescription{
+ desc2 := client.IndexDescriptionCreateRequest{
Name: name + "_2",
Fields: []client.IndexedFieldDescription{{Name: usersWeightFieldName}},
}
- desc3 := client.IndexDescription{
+ desc3 := client.IndexDescriptionCreateRequest{
Name: "",
Fields: []client.IndexedFieldDescription{{Name: usersAgeFieldName}},
}
@@ -415,7 +398,7 @@ func TestCreateIndex_ShouldSaveToSystemStorage(t *testing.T) {
defer f.db.Close()
name := "users_age_ASC"
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Name: name,
Fields: []client.IndexedFieldDescription{{Name: usersNameFieldName}},
}
@@ -428,15 +411,21 @@ func TestCreateIndex_ShouldSaveToSystemStorage(t *testing.T) {
var deserialized client.IndexDescription
err = json.Unmarshal(data, &deserialized)
assert.NoError(t, err)
- desc.ID = 1
- assert.Equal(t, desc, deserialized)
+
+ descWithID := client.IndexDescription{
+ Name: desc.Name,
+ ID: 1,
+ Fields: desc.Fields,
+ Unique: desc.Unique,
+ }
+ assert.Equal(t, descWithID, deserialized)
}
func TestCreateIndex_IfCollectionDoesntExist_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{{Name: productsPriceFieldName}},
}
@@ -449,7 +438,7 @@ func TestCreateIndex_IfPropertyDoesntExist_ReturnError(t *testing.T) {
defer f.db.Close()
const field = "non_existing_field"
- desc := client.IndexDescription{
+ desc := client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{{Name: field}},
}
@@ -462,8 +451,8 @@ func TestCreateIndex_WithMultipleCollectionsAndIndexes_AssignIncrementedIDPerCol
users := f.addUsersCollection()
products := f.getProductsCollectionDesc()
- makeIndex := func(fieldName string) client.IndexDescription {
- return client.IndexDescription{
+ makeIndex := func(fieldName string) client.IndexDescriptionCreateRequest {
+ return client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{
{Name: fieldName},
},
@@ -533,7 +522,7 @@ func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- usersIndexDesc := client.IndexDescription{
+ usersIndexDesc := client.IndexDescriptionCreateRequest{
Name: "users_name_index",
Fields: []client.IndexedFieldDescription{{Name: usersNameFieldName}},
}
@@ -541,7 +530,7 @@ func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) {
assert.NoError(t, err)
f.getProductsCollectionDesc()
- productsIndexDesc := client.IndexDescription{
+ productsIndexDesc := client.IndexDescriptionCreateRequest{
Name: "products_description_index",
Fields: []client.IndexedFieldDescription{{Name: productsPriceFieldName}},
}
@@ -657,7 +646,7 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T)
f := newIndexTestFixture(t)
defer f.db.Close()
- usersIndexDesc := client.IndexDescription{
+ usersIndexDesc := client.IndexDescriptionCreateRequest{
Name: "users_name_index",
Fields: []client.IndexedFieldDescription{{Name: usersNameFieldName}},
}
@@ -665,7 +654,7 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T)
assert.NoError(t, err)
products := f.getProductsCollectionDesc()
- productsIndexDesc := client.IndexDescription{
+ productsIndexDesc := client.IndexDescriptionCreateRequest{
Name: "products_description_index",
Fields: []client.IndexedFieldDescription{{Name: productsPriceFieldName}},
}
@@ -679,14 +668,26 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T)
userIndexes, err := f.getCollectionIndexes(f.users.ID())
assert.NoError(t, err)
require.Equal(t, 1, len(userIndexes))
- usersIndexDesc.ID = 1
- assert.Equal(t, usersIndexDesc, userIndexes[0])
+
+ descWithID := client.IndexDescription{
+ Name: usersIndexDesc.Name,
+ ID: 1,
+ Fields: usersIndexDesc.Fields,
+ Unique: usersIndexDesc.Unique,
+ }
+ assert.Equal(t, descWithID, userIndexes[0])
productIndexes, err := f.getCollectionIndexes(products.ID())
assert.NoError(t, err)
require.Equal(t, 1, len(productIndexes))
- productsIndexDesc.ID = 1
- assert.Equal(t, productsIndexDesc, productIndexes[0])
+
+ productsIndexDescWithID := client.IndexDescription{
+ Name: productsIndexDesc.Name,
+ ID: 1,
+ Fields: productsIndexDesc.Fields,
+ Unique: productsIndexDesc.Unique,
+ }
+ assert.Equal(t, productsIndexDescWithID, productIndexes[0])
}
func TestGetCollectionIndexes_IfSystemStoreFails_ReturnError(t *testing.T) {
@@ -970,7 +971,7 @@ func TestCollectionGetIndexes_ShouldReturnIndexesInOrderedByName(t *testing.T) {
require.NoError(f.t, err)
for i := 1; i <= num; i++ {
iStr := toSuffix(i)
- indexDesc := client.IndexDescription{
+ indexDesc := client.IndexDescriptionCreateRequest{
Name: indexNamePrefix + iStr,
Fields: []client.IndexedFieldDescription{
{Name: fieldNamePrefix + iStr},
@@ -1114,14 +1115,14 @@ func TestDropIndex_IfSystemStoreFails_ReturnError(t *testing.T) {
func TestDropAllIndexes_ShouldDeleteAllIndexes(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- _, err := f.createCollectionIndexFor(usersColName, client.IndexDescription{
+ _, err := f.createCollectionIndexFor(usersColName, client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{
{Name: usersNameFieldName},
},
})
assert.NoError(f.t, err)
- _, err = f.createCollectionIndexFor(usersColName, client.IndexDescription{
+ _, err = f.createCollectionIndexFor(usersColName, client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{
{Name: usersAgeFieldName},
},
@@ -1231,8 +1232,14 @@ func TestNewCollectionIndex_IfDescriptionHasNoFields_ReturnError(t *testing.T) {
defer f.db.Close()
desc := getUsersIndexDescOnName()
desc.Fields = nil
- _, err := NewCollectionIndex(f.users, desc)
- require.ErrorIs(t, err, NewErrIndexDescHasNoFields(desc))
+ descWithID := client.IndexDescription{
+ Name: desc.Name,
+ ID: 1,
+ Fields: desc.Fields,
+ Unique: desc.Unique,
+ }
+ _, err := NewCollectionIndex(f.users, descWithID)
+ require.ErrorIs(t, err, NewErrIndexDescHasNoFields(descWithID))
}
func TestNewCollectionIndex_IfDescriptionHasNonExistingField_ReturnError(t *testing.T) {
@@ -1240,6 +1247,12 @@ func TestNewCollectionIndex_IfDescriptionHasNonExistingField_ReturnError(t *test
defer f.db.Close()
desc := getUsersIndexDescOnName()
desc.Fields[0].Name = "non_existing_field"
- _, err := NewCollectionIndex(f.users, desc)
+ descWithID := client.IndexDescription{
+ Name: desc.Name,
+ ID: 1,
+ Fields: desc.Fields,
+ Unique: desc.Unique,
+ }
+ _, err := NewCollectionIndex(f.users, descWithID)
require.ErrorIs(t, err, client.NewErrFieldNotExist(desc.Fields[0].Name))
}
diff --git a/internal/db/indexed_docs_test.go b/internal/db/indexed_docs_test.go
index 2c6ce0af53..49b8795021 100644
--- a/internal/db/indexed_docs_test.go
+++ b/internal/db/indexed_docs_test.go
@@ -284,8 +284,13 @@ func (f *indexTestFixture) stubSystemStore(systemStoreOn *mocks.DSReaderWriter_E
f.users = f.addUsersCollection()
}
desc := getUsersIndexDescOnName()
- desc.ID = 1
- indexOnNameDescData, err := json.Marshal(desc)
+ descWithID := client.IndexDescription{
+ Name: desc.Name,
+ ID: 1,
+ Fields: desc.Fields,
+ Unique: desc.Unique,
+ }
+ indexOnNameDescData, err := json.Marshal(descWithID)
require.NoError(f.t, err)
colIndexKey := keys.NewCollectionIndexKey(immutable.Some(f.users.ID()), "")
@@ -1513,13 +1518,12 @@ func TestArrayIndex_With2ArrayFieldsIfDocIsDeleted_ShouldRemoveIndex(t *testing.
f := newIndexTestFixture(t)
defer f.db.Close()
- indexDesc := client.IndexDescription{
+ indexDesc := client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{
{Name: usersNumbersFieldName},
{Name: usersHobbiesFieldName},
},
}
-
_, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc)
require.NoError(f.t, err)
@@ -1541,7 +1545,7 @@ func TestArrayIndex_With2ArrayFieldsIfDocIsDeletedButOneArrayElementHasNoIndexRe
f := newIndexTestFixture(t)
defer f.db.Close()
- indexDesc := client.IndexDescription{
+ indexDesc := client.IndexDescriptionCreateRequest{
Fields: []client.IndexedFieldDescription{
{Name: usersNumbersFieldName},
{Name: usersHobbiesFieldName},
@@ -1572,7 +1576,7 @@ func TestArrayIndex_WithUniqueIndexIfDocIsDeleted_ShouldRemoveIndex(t *testing.T
f := newIndexTestFixture(t)
defer f.db.Close()
- indexDesc := client.IndexDescription{
+ indexDesc := client.IndexDescriptionCreateRequest{
Unique: true,
Fields: []client.IndexedFieldDescription{
{Name: usersNumbersFieldName},
diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go
index eb9c5f5466..6fbd739879 100644
--- a/tests/clients/cli/wrapper_collection.go
+++ b/tests/clients/cli/wrapper_collection.go
@@ -332,7 +332,7 @@ func (c *Collection) GetAllDocIDs(
func (c *Collection) CreateIndex(
ctx context.Context,
- indexDesc client.IndexDescription,
+ indexDesc client.IndexDescriptionCreateRequest,
) (index client.IndexDescription, err error) {
if !c.Description().Name.HasValue() {
return client.IndexDescription{}, client.ErrOperationNotPermittedOnNamelessCols
diff --git a/tests/integration/utils.go b/tests/integration/utils.go
index bf3daffff3..6ab621728e 100644
--- a/tests/integration/utils.go
+++ b/tests/integration/utils.go
@@ -1565,7 +1565,7 @@ func createIndex(
for index, node := range nodes {
nodeID := nodeIDs[index]
collection := s.nodes[nodeID].collections[action.CollectionID]
- indexDesc := client.IndexDescription{
+ indexDesc := client.IndexDescriptionCreateRequest{
Name: action.IndexName,
}
if action.FieldName != "" {
From d6900b7505e3bbf34fb38b09a478e36d11ecbb05 Mon Sep 17 00:00:00 2001
From: Chris Quigley
Date: Mon, 9 Dec 2024 17:20:24 -0500
Subject: [PATCH 44/47] feat: Error if purge request made with dev mode
disabled (#3295)
## Relevant issue(s)
Resolves #3140
## Description
The issue was that if a purge request was made when the database was not
in development mode, it would fail, and an error would be output on the
node side of things. However, on the client side nothing indicated that
the process had failed. Whether the purge was successful, or it failed,
there would be no output.
I have created a new variable in the http package called `IsDevMode`,
which is checked in the `http/handler_extras.go/Purge` function. If dev
mode is enabled then `htttp.StatusOK` is written to the response header.
If it is not enabled, then `http.StatusBadRequest` is written to the
header instead, with a message indicating what happened.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
The platform(s) on which this was tested:
- Windows
---
cli/start.go | 1 +
http/errors.go | 7 ++++---
http/handler.go | 4 ++++
http/handler_extras.go | 9 ++++++++-
http/handler_extras_test.go | 29 ++++++++++++++++++++++++++++-
5 files changed, 45 insertions(+), 5 deletions(-)
diff --git a/cli/start.go b/cli/start.go
index 0bd1510008..ad2f79c928 100644
--- a/cli/start.go
+++ b/cli/start.go
@@ -135,6 +135,7 @@ func MakeStartCommand() *cobra.Command {
}
isDevMode := cfg.GetBool("development")
+ http.IsDevMode = isDevMode
if isDevMode {
cmd.Printf(devModeBanner)
if cfg.GetBool("keyring.disabled") {
diff --git a/http/errors.go b/http/errors.go
index f1e03d5882..0b6cc78d3d 100644
--- a/http/errors.go
+++ b/http/errors.go
@@ -19,9 +19,10 @@ import (
)
const (
- errFailedToLoadKeys string = "failed to load given keys"
- errMethodIsNotImplemented string = "the method is not implemented"
- errFailedToGetContext string = "failed to get context"
+ errFailedToLoadKeys string = "failed to load given keys"
+ errMethodIsNotImplemented string = "the method is not implemented"
+ errFailedToGetContext string = "failed to get context"
+ errPurgeRequestNonDeveloperMode string = "cannot purge database when development mode is disabled"
)
// Errors returnable from this package.
diff --git a/http/handler.go b/http/handler.go
index 336dfc54d3..7d24245ea4 100644
--- a/http/handler.go
+++ b/http/handler.go
@@ -21,6 +21,10 @@ import (
"github.com/go-chi/chi/v5"
)
+// Global variable for the development mode flag
+// This is checked by the http/handler_extras.go/Purge function to determine which response to send
+var IsDevMode bool = false
+
// Version is the identifier for the current API version.
var Version string = "v0"
diff --git a/http/handler_extras.go b/http/handler_extras.go
index 1f14cc40a7..67cba8d0ef 100644
--- a/http/handler_extras.go
+++ b/http/handler_extras.go
@@ -23,7 +23,14 @@ type extrasHandler struct{}
func (s *extrasHandler) Purge(rw http.ResponseWriter, req *http.Request) {
db := mustGetContextClientDB(req)
- rw.WriteHeader(http.StatusOK) // write the response before we restart to purge
+
+ // Send either 200 or 400 response based on whether the server is in dev mode
+ if IsDevMode {
+ rw.WriteHeader(http.StatusOK)
+ } else {
+ responseJSON(rw, http.StatusBadRequest, errPurgeRequestNonDeveloperMode)
+ }
+
db.Events().Publish(event.NewMessage(event.PurgeName, nil))
}
diff --git a/http/handler_extras_test.go b/http/handler_extras_test.go
index d7d1398e90..870cad02d3 100644
--- a/http/handler_extras_test.go
+++ b/http/handler_extras_test.go
@@ -20,8 +20,11 @@ import (
"github.com/stretchr/testify/require"
)
-func TestPurge(t *testing.T) {
+func TestPurgeDevModeTrue(t *testing.T) {
cdb := setupDatabase(t)
+
+ IsDevMode = true
+
url := "http://localhost:9181/api/v0/purge"
req := httptest.NewRequest(http.MethodPost, url, nil)
@@ -40,3 +43,27 @@ func TestPurge(t *testing.T) {
// test will timeout if purge never received
<-purgeSub.Message()
}
+
+func TestPurgeDevModeFalse(t *testing.T) {
+ cdb := setupDatabase(t)
+
+ IsDevMode = false
+
+ url := "http://localhost:9181/api/v0/purge"
+
+ req := httptest.NewRequest(http.MethodPost, url, nil)
+ rec := httptest.NewRecorder()
+
+ purgeSub, err := cdb.Events().Subscribe(event.PurgeName)
+ require.NoError(t, err)
+
+ handler, err := NewHandler(cdb)
+ require.NoError(t, err)
+ handler.ServeHTTP(rec, req)
+
+ res := rec.Result()
+ require.Equal(t, 400, res.StatusCode)
+
+ // test will timeout if purge never received
+ <-purgeSub.Message()
+}
From 648924e98f11310857ab56c148ac26d87865fc41 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 9 Dec 2024 18:11:42 -0500
Subject: [PATCH 45/47] bot: Update dependencies (bulk dependabot PRs)
10-12-2024 (#3312)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
✅ This PR was created by combining the following PRs:
#3310 bot: Bump @typescript-eslint/eslint-plugin from 8.17.0 to 8.18.0
in /playground
⚠️ The following PR was resolved manually due to merge conflicts:
#3309 bot: Bump @typescript-eslint/parser from 8.17.0 to 8.18.0 in
/playground
This PR was reverted and removed from this merge:
- #3311 bot: Bump github.com/bits-and-blooms/bitset from 1.18.0 to
1.19.0
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Shahzad Lone
---
playground/package-lock.json | 509 ++++-------------------------------
playground/package.json | 4 +-
2 files changed, 60 insertions(+), 453 deletions(-)
diff --git a/playground/package-lock.json b/playground/package-lock.json
index 9207c015d5..2c26c42209 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -18,8 +18,8 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.17.0",
- "@typescript-eslint/parser": "^8.17.0",
+ "@typescript-eslint/eslint-plugin": "^8.18.0",
+ "@typescript-eslint/parser": "^8.18.0",
"@vitejs/plugin-react-swc": "^3.7.2",
"eslint": "^9.16.0",
"eslint-plugin-react-hooks": "^5.1.0",
@@ -59,37 +59,6 @@
"integrity": "sha512-hPYRrKFoI+nuckPgDJfyYAkybFvheo4usS0Vw0HNAe+fmGBQA5Az37b/yStO284atBoqqdOUhKJ3d9Zw3PQkcQ==",
"license": "MIT"
},
- "node_modules/@codemirror/language": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz",
- "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==",
- "peer": true,
- "dependencies": {
- "@codemirror/state": "^6.0.0",
- "@codemirror/view": "^6.0.0",
- "@lezer/common": "^1.0.0",
- "@lezer/highlight": "^1.0.0",
- "@lezer/lr": "^1.0.0",
- "style-mod": "^4.0.0"
- }
- },
- "node_modules/@codemirror/state": {
- "version": "6.4.1",
- "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz",
- "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==",
- "peer": true
- },
- "node_modules/@codemirror/view": {
- "version": "6.35.0",
- "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.35.0.tgz",
- "integrity": "sha512-I0tYy63q5XkaWsJ8QRv5h6ves7kvtrBWjBcnf/bzohFJQc5c14a1AQRdE8QpPF9eMp5Mq2FMm59TCj1gDfE7kw==",
- "peer": true,
- "dependencies": {
- "@codemirror/state": "^6.4.0",
- "style-mod": "^4.1.0",
- "w3c-keyname": "^2.2.4"
- }
- },
"node_modules/@emotion/is-prop-valid": {
"version": "0.8.8",
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz",
@@ -818,30 +787,6 @@
"url": "https://github.com/sponsors/nzakas"
}
},
- "node_modules/@lezer/common": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz",
- "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==",
- "peer": true
- },
- "node_modules/@lezer/highlight": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz",
- "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==",
- "peer": true,
- "dependencies": {
- "@lezer/common": "^1.0.0"
- }
- },
- "node_modules/@lezer/lr": {
- "version": "1.4.2",
- "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz",
- "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==",
- "peer": true,
- "dependencies": {
- "@lezer/common": "^1.0.0"
- }
- },
"node_modules/@motionone/animation": {
"version": "10.18.0",
"resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz",
@@ -2658,7 +2603,7 @@
"version": "15.7.13",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.13.tgz",
"integrity": "sha512-hCZTSvwbzWGvhqxp/RqVqwU999pBf2vp7hzIjiYOsl8wqOmUxkQ6ddw1cV3l8811+kdUFus/q4d1Y3E3SyEifA==",
- "devOptional": true,
+ "dev": true,
"license": "MIT"
},
"node_modules/@types/ramda": {
@@ -2674,7 +2619,7 @@
"version": "18.3.12",
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.12.tgz",
"integrity": "sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==",
- "devOptional": true,
+ "dev": true,
"license": "MIT",
"dependencies": {
"@types/prop-types": "*",
@@ -2685,7 +2630,7 @@
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==",
- "devOptional": true,
+ "dev": true,
"license": "MIT",
"dependencies": {
"@types/react": "*"
@@ -2723,16 +2668,16 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.17.0.tgz",
- "integrity": "sha512-HU1KAdW3Tt8zQkdvNoIijfWDMvdSweFYm4hWh+KwhPstv+sCmWb89hCIP8msFm9N1R/ooh9honpSuvqKWlYy3w==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.18.0.tgz",
+ "integrity": "sha512-NR2yS7qUqCL7AIxdJUQf2MKKNDVNaig/dEB0GBLU7D+ZdHgK1NoH/3wsgO3OnPVipn51tG3MAwaODEGil70WEw==",
"dev": true,
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.17.0",
- "@typescript-eslint/type-utils": "8.17.0",
- "@typescript-eslint/utils": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0",
+ "@typescript-eslint/scope-manager": "8.18.0",
+ "@typescript-eslint/type-utils": "8.18.0",
+ "@typescript-eslint/utils": "8.18.0",
+ "@typescript-eslint/visitor-keys": "8.18.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -2747,83 +2692,20 @@
},
"peerDependencies": {
"@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0",
- "eslint": "^8.57.0 || ^9.0.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz",
- "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
- "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
- "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "eslint-visitor-keys": "^4.2.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin/node_modules/eslint-visitor-keys": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
- "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.8.0"
}
},
"node_modules/@typescript-eslint/parser": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.17.0.tgz",
- "integrity": "sha512-Drp39TXuUlD49F7ilHHCG7TTg8IkA+hxCuULdmzWYICxGXvDXmDmWEjJYZQYgf6l/TFfYNE167m7isnc3xlIEg==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.18.0.tgz",
+ "integrity": "sha512-hgUZ3kTEpVzKaK3uNibExUYm6SKKOmTU2BOxBSvOYwtJEPdVQ70kZJpPjstlnhCHcuc2WGfSbpKlb/69ttyN5Q==",
"dev": true,
"dependencies": {
- "@typescript-eslint/scope-manager": "8.17.0",
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/typescript-estree": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0",
+ "@typescript-eslint/scope-manager": "8.18.0",
+ "@typescript-eslint/types": "8.18.0",
+ "@typescript-eslint/typescript-estree": "8.18.0",
+ "@typescript-eslint/visitor-keys": "8.18.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2834,110 +2716,18 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz",
- "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
- "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz",
- "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
- "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "eslint-visitor-keys": "^4.2.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/parser/node_modules/eslint-visitor-keys": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
- "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.8.0"
}
},
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.16.0.tgz",
- "integrity": "sha512-mwsZWubQvBki2t5565uxF0EYvG+FwdFb8bMtDuGQLdCCnGPrDEDvm1gtfynuKlnpzeBRqdFCkMf9jg1fnAK8sg==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.0.tgz",
+ "integrity": "sha512-PNGcHop0jkK2WVYGotk/hxj+UFLhXtGPiGtiaWgVBVP1jhMoMCHlTyJA+hEj4rszoSdLTK3fN4oOatrL0Cp+Xw==",
"dev": true,
- "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.16.0",
- "@typescript-eslint/visitor-keys": "8.16.0"
+ "@typescript-eslint/types": "8.18.0",
+ "@typescript-eslint/visitor-keys": "8.18.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2948,13 +2738,13 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.17.0.tgz",
- "integrity": "sha512-q38llWJYPd63rRnJ6wY/ZQqIzPrBCkPdpIsaCfkR3Q4t3p6sb422zougfad4TFW9+ElIFLVDzWGiGAfbb/v2qw==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.18.0.tgz",
+ "integrity": "sha512-er224jRepVAVLnMF2Q7MZJCq5CsdH2oqjP4dT7K6ij09Kyd+R21r7UVJrF0buMVdZS5QRhDzpvzAxHxabQadow==",
"dev": true,
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.17.0",
- "@typescript-eslint/utils": "8.17.0",
+ "@typescript-eslint/typescript-estree": "8.18.0",
+ "@typescript-eslint/utils": "8.18.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -2966,90 +2756,15 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
- "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz",
- "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
- "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "eslint-visitor-keys": "^4.2.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils/node_modules/eslint-visitor-keys": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
- "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.8.0"
}
},
"node_modules/@typescript-eslint/types": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.16.0.tgz",
- "integrity": "sha512-NzrHj6thBAOSE4d9bsuRNMvk+BvaQvmY4dDglgkgGC0EW/tB3Kelnp3tAKH87GEwzoxgeQn9fNGRyFJM/xd+GQ==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.0.tgz",
+ "integrity": "sha512-FNYxgyTCAnFwTrzpBGq+zrnoTO4x0c1CKYY5MuUTzpScqmY5fmsh2o3+57lqdI3NZucBDCzDgdEbIaNfAjAHQA==",
"dev": true,
- "license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -3059,14 +2774,13 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.16.0.tgz",
- "integrity": "sha512-E2+9IzzXMc1iaBy9zmo+UYvluE3TW7bCGWSF41hVWUE01o8nzr1rvOQYSxelxr6StUvRcTMe633eY8mXASMaNw==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.0.tgz",
+ "integrity": "sha512-rqQgFRu6yPkauz+ms3nQpohwejS8bvgbPyIDq13cgEDbkXt4LH4OkDMT0/fN1RUtzG8e8AKJyDBoocuQh8qNeg==",
"dev": true,
- "license": "BSD-2-Clause",
"dependencies": {
- "@typescript-eslint/types": "8.16.0",
- "@typescript-eslint/visitor-keys": "8.16.0",
+ "@typescript-eslint/types": "8.18.0",
+ "@typescript-eslint/visitor-keys": "8.18.0",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -3081,22 +2795,20 @@
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
+ "peerDependencies": {
+ "typescript": ">=4.8.4 <5.8.0"
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.17.0.tgz",
- "integrity": "sha512-bQC8BnEkxqG8HBGKwG9wXlZqg37RKSMY7v/X8VEWD8JG2JuTHuNK0VFvMPMUKQcbk6B+tf05k+4AShAEtCtJ/w==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.18.0.tgz",
+ "integrity": "sha512-p6GLdY383i7h5b0Qrfbix3Vc3+J2k6QWw6UMUeY5JGfm3C5LbZ4QIZzJNoNOfgyRe0uuYKjvVOsO/jD4SJO+xg==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.17.0",
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/typescript-estree": "8.17.0"
+ "@typescript-eslint/scope-manager": "8.18.0",
+ "@typescript-eslint/types": "8.18.0",
+ "@typescript-eslint/typescript-estree": "8.18.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -3106,109 +2818,17 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz",
- "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz",
- "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz",
- "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "@typescript-eslint/visitor-keys": "8.17.0",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.17.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz",
- "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "8.17.0",
- "eslint-visitor-keys": "^4.2.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/utils/node_modules/eslint-visitor-keys": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
- "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.8.0"
}
},
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.16.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.16.0.tgz",
- "integrity": "sha512-pq19gbaMOmFE3CbL0ZB8J8BFCo2ckfHBfaIsaOZgBIF4EoISJIdLX5xRhd0FGB0LlHReNRuzoJoMGpTjq8F2CQ==",
+ "version": "8.18.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.0.tgz",
+ "integrity": "sha512-pCh/qEA8Lb1wVIqNvBke8UaRjJ6wrAWkJO5yyIbs8Yx6TNGYyfNjOo61tLv+WwLvoLPp4BQ8B7AHKijl8NGUfw==",
"dev": true,
- "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.16.0",
+ "@typescript-eslint/types": "8.18.0",
"eslint-visitor-keys": "^4.2.0"
},
"engines": {
@@ -3224,7 +2844,6 @@
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
- "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -3609,7 +3228,7 @@
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
"integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
- "devOptional": true,
+ "dev": true,
"license": "MIT"
},
"node_modules/debounce-promise": {
@@ -5810,12 +5429,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/style-mod": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz",
- "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==",
- "peer": true
- },
"node_modules/style-value-types": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz",
@@ -6202,12 +5815,6 @@
"integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==",
"license": "MIT"
},
- "node_modules/w3c-keyname": {
- "version": "2.2.8",
- "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
- "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
- "peer": true
- },
"node_modules/web-streams-polyfill": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
diff --git a/playground/package.json b/playground/package.json
index 89b6ab6d08..d5c542cb28 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -20,8 +20,8 @@
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^8.17.0",
- "@typescript-eslint/parser": "^8.17.0",
+ "@typescript-eslint/eslint-plugin": "^8.18.0",
+ "@typescript-eslint/parser": "^8.18.0",
"@vitejs/plugin-react-swc": "^3.7.2",
"eslint": "^9.16.0",
"eslint-plugin-react-hooks": "^5.1.0",
From e1502c5fcb630486023e881de029eb2331530cf3 Mon Sep 17 00:00:00 2001
From: Keenan Nemetz
Date: Wed, 11 Dec 2024 10:06:27 -0800
Subject: [PATCH 46/47] fix(i): Aggregate filter returns one result (#3316)
## Relevant issue(s)
Resolves #3313
## Description
This PR fixes an issue where aggregate filters would only return one
result when the first result did not match.
## Tasks
- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...
## How has this been tested?
Added integration tests.
Specify the platform(s) on which this was tested:
- MacOS
---
internal/planner/average.go | 61 +++--
internal/planner/count.go | 115 +++++----
internal/planner/max.go | 242 +++++++++---------
internal/planner/min.go | 242 +++++++++---------
internal/planner/sum.go | 183 ++++++-------
.../query/one_to_many/with_average_test.go | 153 +++++++++++
.../query/one_to_many/with_count_test.go | 66 +++++
.../query/one_to_many/with_max_test.go | 153 +++++++++++
.../query/one_to_many/with_min_test.go | 153 +++++++++++
.../query/one_to_many/with_sum_test.go | 153 +++++++++++
10 files changed, 1123 insertions(+), 398 deletions(-)
create mode 100644 tests/integration/query/one_to_many/with_average_test.go
create mode 100644 tests/integration/query/one_to_many/with_max_test.go
create mode 100644 tests/integration/query/one_to_many/with_min_test.go
create mode 100644 tests/integration/query/one_to_many/with_sum_test.go
diff --git a/internal/planner/average.go b/internal/planner/average.go
index 76bbfc107d..022ff014b6 100644
--- a/internal/planner/average.go
+++ b/internal/planner/average.go
@@ -75,38 +75,47 @@ func (n *averageNode) Close() error { return n.plan.Close()
func (n *averageNode) Source() planNode { return n.plan }
func (n *averageNode) Next() (bool, error) {
- n.execInfo.iterations++
+ for {
+ n.execInfo.iterations++
- hasNext, err := n.plan.Next()
- if err != nil || !hasNext {
- return hasNext, err
- }
+ hasNext, err := n.plan.Next()
+ if err != nil || !hasNext {
+ return hasNext, err
+ }
- n.currentValue = n.plan.Value()
+ n.currentValue = n.plan.Value()
- countProp := n.currentValue.Fields[n.countFieldIndex]
- typedCount, isInt := countProp.(int)
- if !isInt {
- return false, client.NewErrUnexpectedType[int]("count", countProp)
- }
- count := typedCount
+ countProp := n.currentValue.Fields[n.countFieldIndex]
+ typedCount, isInt := countProp.(int)
+ if !isInt {
+ return false, client.NewErrUnexpectedType[int]("count", countProp)
+ }
+ count := typedCount
- if count == 0 {
- n.currentValue.Fields[n.virtualFieldIndex] = float64(0)
- return true, nil
- }
+ if count == 0 {
+ n.currentValue.Fields[n.virtualFieldIndex] = float64(0)
+ return true, nil
+ }
- sumProp := n.currentValue.Fields[n.sumFieldIndex]
- switch sum := sumProp.(type) {
- case float64:
- n.currentValue.Fields[n.virtualFieldIndex] = sum / float64(count)
- case int64:
- n.currentValue.Fields[n.virtualFieldIndex] = float64(sum) / float64(count)
- default:
- return false, client.NewErrUnhandledType("sum", sumProp)
- }
+ sumProp := n.currentValue.Fields[n.sumFieldIndex]
+ switch sum := sumProp.(type) {
+ case float64:
+ n.currentValue.Fields[n.virtualFieldIndex] = sum / float64(count)
+ case int64:
+ n.currentValue.Fields[n.virtualFieldIndex] = float64(sum) / float64(count)
+ default:
+ return false, client.NewErrUnhandledType("sum", sumProp)
+ }
- return mapper.RunFilter(n.currentValue, n.aggregateFilter)
+ passes, err := mapper.RunFilter(n.currentValue, n.aggregateFilter)
+ if err != nil {
+ return false, err
+ }
+ if !passes {
+ continue
+ }
+ return true, nil
+ }
}
func (n *averageNode) SetPlan(p planNode) { n.plan = p }
diff --git a/internal/planner/count.go b/internal/planner/count.go
index 1b58109749..a7b243bb8d 100644
--- a/internal/planner/count.go
+++ b/internal/planner/count.go
@@ -125,65 +125,74 @@ func (n *countNode) Explain(explainType request.ExplainType) (map[string]any, er
}
func (n *countNode) Next() (bool, error) {
- n.execInfo.iterations++
+ for {
+ n.execInfo.iterations++
- hasValue, err := n.plan.Next()
- if err != nil || !hasValue {
- return hasValue, err
- }
-
- n.currentValue = n.plan.Value()
- // Can just scan for now, can be replaced later by something fancier if needed
- var count int
- for _, source := range n.aggregateMapping {
- property := n.currentValue.Fields[source.Index]
- v := reflect.ValueOf(property)
- switch v.Kind() {
- // v.Len will panic if v is not one of these types, we don't want it to panic
- case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
- if source.Filter == nil && source.Limit == nil {
- count = count + v.Len()
- } else {
- var arrayCount int
- var err error
- switch array := property.(type) {
- case []core.Doc:
- arrayCount = countDocs(array)
-
- case []bool:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
-
- case []immutable.Option[bool]:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
-
- case []int64:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
-
- case []immutable.Option[int64]:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
-
- case []float64:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
-
- case []immutable.Option[float64]:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
-
- case []string:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
+ hasValue, err := n.plan.Next()
+ if err != nil || !hasValue {
+ return hasValue, err
+ }
- case []immutable.Option[string]:
- arrayCount, err = countItems(array, source.Filter, source.Limit)
- }
- if err != nil {
- return false, err
+ n.currentValue = n.plan.Value()
+ // Can just scan for now, can be replaced later by something fancier if needed
+ var count int
+ for _, source := range n.aggregateMapping {
+ property := n.currentValue.Fields[source.Index]
+ v := reflect.ValueOf(property)
+ switch v.Kind() {
+ // v.Len will panic if v is not one of these types, we don't want it to panic
+ case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
+ if source.Filter == nil && source.Limit == nil {
+ count = count + v.Len()
+ } else {
+ var arrayCount int
+ var err error
+ switch array := property.(type) {
+ case []core.Doc:
+ arrayCount = countDocs(array)
+
+ case []bool:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []immutable.Option[bool]:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []int64:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []immutable.Option[int64]:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []float64:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []immutable.Option[float64]:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []string:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+
+ case []immutable.Option[string]:
+ arrayCount, err = countItems(array, source.Filter, source.Limit)
+ }
+ if err != nil {
+ return false, err
+ }
+ count += arrayCount
}
- count += arrayCount
}
}
- }
+ n.currentValue.Fields[n.virtualFieldIndex] = count
- n.currentValue.Fields[n.virtualFieldIndex] = count
- return mapper.RunFilter(n.currentValue, n.aggregateFilter)
+ passes, err := mapper.RunFilter(n.currentValue, n.aggregateFilter)
+ if err != nil {
+ return false, err
+ }
+ if !passes {
+ continue
+ }
+ return true, nil
+ }
}
// countDocs counts the number of documents in a slice, skipping over hidden items
diff --git a/internal/planner/max.go b/internal/planner/max.go
index 530e60e25e..502b401d8f 100644
--- a/internal/planner/max.go
+++ b/internal/planner/max.go
@@ -124,136 +124,146 @@ func (n *maxNode) Explain(explainType request.ExplainType) (map[string]any, erro
}
func (n *maxNode) Next() (bool, error) {
- n.execInfo.iterations++
+ for {
+ n.execInfo.iterations++
- hasNext, err := n.plan.Next()
- if err != nil || !hasNext {
- return hasNext, err
- }
- n.currentValue = n.plan.Value()
+ hasNext, err := n.plan.Next()
+ if err != nil || !hasNext {
+ return hasNext, err
+ }
+ n.currentValue = n.plan.Value()
- var max *big.Float
- isFloat := false
+ var max *big.Float
+ isFloat := false
- for _, source := range n.aggregateMapping {
- child := n.currentValue.Fields[source.Index]
- var collectionMax *big.Float
- var err error
- switch childCollection := child.(type) {
- case []core.Doc:
- collectionMax = reduceDocs(
- childCollection,
- nil,
- func(childItem core.Doc, value *big.Float) *big.Float {
- childProperty := childItem.Fields[source.ChildTarget.Index]
- res := &big.Float{}
- switch v := childProperty.(type) {
- case int:
- res = res.SetInt64(int64(v))
- case int64:
- res = res.SetInt64(v)
- case uint64:
- res = res.SetUint64(v)
- case float64:
- res = res.SetFloat64(v)
- default:
- return nil
- }
- if value == nil || res.Cmp(value) > 0 {
- return res
- }
- return value
- },
- )
+ for _, source := range n.aggregateMapping {
+ child := n.currentValue.Fields[source.Index]
+ var collectionMax *big.Float
+ var err error
+ switch childCollection := child.(type) {
+ case []core.Doc:
+ collectionMax = reduceDocs(
+ childCollection,
+ nil,
+ func(childItem core.Doc, value *big.Float) *big.Float {
+ childProperty := childItem.Fields[source.ChildTarget.Index]
+ res := &big.Float{}
+ switch v := childProperty.(type) {
+ case int:
+ res = res.SetInt64(int64(v))
+ case int64:
+ res = res.SetInt64(v)
+ case uint64:
+ res = res.SetUint64(v)
+ case float64:
+ res = res.SetFloat64(v)
+ default:
+ return nil
+ }
+ if value == nil || res.Cmp(value) > 0 {
+ return res
+ }
+ return value
+ },
+ )
- case []int64:
- collectionMax, err = reduceItems(
- childCollection,
- &source,
- lessN[int64],
- nil,
- func(childItem int64, value *big.Float) *big.Float {
- res := (&big.Float{}).SetInt64(childItem)
- if value == nil || res.Cmp(value) > 0 {
- return res
- }
- return value
- },
- )
+ case []int64:
+ collectionMax, err = reduceItems(
+ childCollection,
+ &source,
+ lessN[int64],
+ nil,
+ func(childItem int64, value *big.Float) *big.Float {
+ res := (&big.Float{}).SetInt64(childItem)
+ if value == nil || res.Cmp(value) > 0 {
+ return res
+ }
+ return value
+ },
+ )
- case []immutable.Option[int64]:
- collectionMax, err = reduceItems(
- childCollection,
- &source,
- lessO[int64],
- nil,
- func(childItem immutable.Option[int64], value *big.Float) *big.Float {
- if !childItem.HasValue() {
+ case []immutable.Option[int64]:
+ collectionMax, err = reduceItems(
+ childCollection,
+ &source,
+ lessO[int64],
+ nil,
+ func(childItem immutable.Option[int64], value *big.Float) *big.Float {
+ if !childItem.HasValue() {
+ return value
+ }
+ res := (&big.Float{}).SetInt64(childItem.Value())
+ if value == nil || res.Cmp(value) > 0 {
+ return res
+ }
return value
- }
- res := (&big.Float{}).SetInt64(childItem.Value())
- if value == nil || res.Cmp(value) > 0 {
- return res
- }
- return value
- },
- )
+ },
+ )
- case []float64:
- collectionMax, err = reduceItems(
- childCollection,
- &source,
- lessN[float64],
- nil,
- func(childItem float64, value *big.Float) *big.Float {
- res := big.NewFloat(childItem)
- if value == nil || res.Cmp(value) > 0 {
- return res
- }
- return value
- },
- )
+ case []float64:
+ collectionMax, err = reduceItems(
+ childCollection,
+ &source,
+ lessN[float64],
+ nil,
+ func(childItem float64, value *big.Float) *big.Float {
+ res := big.NewFloat(childItem)
+ if value == nil || res.Cmp(value) > 0 {
+ return res
+ }
+ return value
+ },
+ )
- case []immutable.Option[float64]:
- collectionMax, err = reduceItems(
- childCollection,
- &source,
- lessO[float64],
- nil,
- func(childItem immutable.Option[float64], value *big.Float) *big.Float {
- if !childItem.HasValue() {
+ case []immutable.Option[float64]:
+ collectionMax, err = reduceItems(
+ childCollection,
+ &source,
+ lessO[float64],
+ nil,
+ func(childItem immutable.Option[float64], value *big.Float) *big.Float {
+ if !childItem.HasValue() {
+ return value
+ }
+ res := big.NewFloat(childItem.Value())
+ if value == nil || res.Cmp(value) > 0 {
+ return res
+ }
return value
- }
- res := big.NewFloat(childItem.Value())
- if value == nil || res.Cmp(value) > 0 {
- return res
- }
- return value
- },
- )
+ },
+ )
+ }
+ if err != nil {
+ return false, err
+ }
+ if collectionMax == nil || (max != nil && collectionMax.Cmp(max) <= 0) {
+ continue
+ }
+ isTargetFloat, err := n.p.isValueFloat(n.parent, &source)
+ if err != nil {
+ return false, err
+ }
+ isFloat = isTargetFloat
+ max = collectionMax
}
+
+ if max == nil {
+ n.currentValue.Fields[n.virtualFieldIndex] = nil
+ } else if isFloat {
+ res, _ := max.Float64()
+ n.currentValue.Fields[n.virtualFieldIndex] = res
+ } else {
+ res, _ := max.Int64()
+ n.currentValue.Fields[n.virtualFieldIndex] = res
+ }
+
+ passes, err := mapper.RunFilter(n.currentValue, n.aggregateFilter)
if err != nil {
return false, err
}
- if collectionMax == nil || (max != nil && collectionMax.Cmp(max) <= 0) {
+ if !passes {
continue
}
- isTargetFloat, err := n.p.isValueFloat(n.parent, &source)
- if err != nil {
- return false, err
- }
- isFloat = isTargetFloat
- max = collectionMax
- }
-
- if max == nil {
- n.currentValue.Fields[n.virtualFieldIndex] = nil
- } else if isFloat {
- res, _ := max.Float64()
- n.currentValue.Fields[n.virtualFieldIndex] = res
- } else {
- res, _ := max.Int64()
- n.currentValue.Fields[n.virtualFieldIndex] = res
+ return true, nil
}
- return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
diff --git a/internal/planner/min.go b/internal/planner/min.go
index be70a8ccb9..ca67d8d553 100644
--- a/internal/planner/min.go
+++ b/internal/planner/min.go
@@ -124,136 +124,146 @@ func (n *minNode) Explain(explainType request.ExplainType) (map[string]any, erro
}
func (n *minNode) Next() (bool, error) {
- n.execInfo.iterations++
+ for {
+ n.execInfo.iterations++
- hasNext, err := n.plan.Next()
- if err != nil || !hasNext {
- return hasNext, err
- }
- n.currentValue = n.plan.Value()
+ hasNext, err := n.plan.Next()
+ if err != nil || !hasNext {
+ return hasNext, err
+ }
+ n.currentValue = n.plan.Value()
- var min *big.Float
- isFloat := false
+ var min *big.Float
+ isFloat := false
- for _, source := range n.aggregateMapping {
- child := n.currentValue.Fields[source.Index]
- var collectionMin *big.Float
- var err error
- switch childCollection := child.(type) {
- case []core.Doc:
- collectionMin = reduceDocs(
- childCollection,
- nil,
- func(childItem core.Doc, value *big.Float) *big.Float {
- childProperty := childItem.Fields[source.ChildTarget.Index]
- res := &big.Float{}
- switch v := childProperty.(type) {
- case int:
- res = res.SetInt64(int64(v))
- case int64:
- res = res.SetInt64(v)
- case uint64:
- res = res.SetUint64(v)
- case float64:
- res = res.SetFloat64(v)
- default:
- return nil
- }
- if value == nil || res.Cmp(value) < 0 {
- return res
- }
- return value
- },
- )
+ for _, source := range n.aggregateMapping {
+ child := n.currentValue.Fields[source.Index]
+ var collectionMin *big.Float
+ var err error
+ switch childCollection := child.(type) {
+ case []core.Doc:
+ collectionMin = reduceDocs(
+ childCollection,
+ nil,
+ func(childItem core.Doc, value *big.Float) *big.Float {
+ childProperty := childItem.Fields[source.ChildTarget.Index]
+ res := &big.Float{}
+ switch v := childProperty.(type) {
+ case int:
+ res = res.SetInt64(int64(v))
+ case int64:
+ res = res.SetInt64(v)
+ case uint64:
+ res = res.SetUint64(v)
+ case float64:
+ res = res.SetFloat64(v)
+ default:
+ return nil
+ }
+ if value == nil || res.Cmp(value) < 0 {
+ return res
+ }
+ return value
+ },
+ )
- case []int64:
- collectionMin, err = reduceItems(
- childCollection,
- &source,
- lessN[int64],
- nil,
- func(childItem int64, value *big.Float) *big.Float {
- res := (&big.Float{}).SetInt64(childItem)
- if value == nil || res.Cmp(value) < 0 {
- return res
- }
- return value
- },
- )
+ case []int64:
+ collectionMin, err = reduceItems(
+ childCollection,
+ &source,
+ lessN[int64],
+ nil,
+ func(childItem int64, value *big.Float) *big.Float {
+ res := (&big.Float{}).SetInt64(childItem)
+ if value == nil || res.Cmp(value) < 0 {
+ return res
+ }
+ return value
+ },
+ )
- case []immutable.Option[int64]:
- collectionMin, err = reduceItems(
- childCollection,
- &source,
- lessO[int64],
- nil,
- func(childItem immutable.Option[int64], value *big.Float) *big.Float {
- if !childItem.HasValue() {
+ case []immutable.Option[int64]:
+ collectionMin, err = reduceItems(
+ childCollection,
+ &source,
+ lessO[int64],
+ nil,
+ func(childItem immutable.Option[int64], value *big.Float) *big.Float {
+ if !childItem.HasValue() {
+ return value
+ }
+ res := (&big.Float{}).SetInt64(childItem.Value())
+ if value == nil || res.Cmp(value) < 0 {
+ return res
+ }
return value
- }
- res := (&big.Float{}).SetInt64(childItem.Value())
- if value == nil || res.Cmp(value) < 0 {
- return res
- }
- return value
- },
- )
+ },
+ )
- case []float64:
- collectionMin, err = reduceItems(
- childCollection,
- &source,
- lessN[float64],
- nil,
- func(childItem float64, value *big.Float) *big.Float {
- res := big.NewFloat(childItem)
- if value == nil || res.Cmp(value) < 0 {
- return res
- }
- return value
- },
- )
+ case []float64:
+ collectionMin, err = reduceItems(
+ childCollection,
+ &source,
+ lessN[float64],
+ nil,
+ func(childItem float64, value *big.Float) *big.Float {
+ res := big.NewFloat(childItem)
+ if value == nil || res.Cmp(value) < 0 {
+ return res
+ }
+ return value
+ },
+ )
- case []immutable.Option[float64]:
- collectionMin, err = reduceItems(
- childCollection,
- &source,
- lessO[float64],
- nil,
- func(childItem immutable.Option[float64], value *big.Float) *big.Float {
- if !childItem.HasValue() {
+ case []immutable.Option[float64]:
+ collectionMin, err = reduceItems(
+ childCollection,
+ &source,
+ lessO[float64],
+ nil,
+ func(childItem immutable.Option[float64], value *big.Float) *big.Float {
+ if !childItem.HasValue() {
+ return value
+ }
+ res := big.NewFloat(childItem.Value())
+ if value == nil || res.Cmp(value) < 0 {
+ return res
+ }
return value
- }
- res := big.NewFloat(childItem.Value())
- if value == nil || res.Cmp(value) < 0 {
- return res
- }
- return value
- },
- )
+ },
+ )
+ }
+ if err != nil {
+ return false, err
+ }
+ if collectionMin == nil || (min != nil && collectionMin.Cmp(min) >= 0) {
+ continue
+ }
+ isTargetFloat, err := n.p.isValueFloat(n.parent, &source)
+ if err != nil {
+ return false, err
+ }
+ isFloat = isTargetFloat
+ min = collectionMin
}
+
+ if min == nil {
+ n.currentValue.Fields[n.virtualFieldIndex] = nil
+ } else if isFloat {
+ res, _ := min.Float64()
+ n.currentValue.Fields[n.virtualFieldIndex] = res
+ } else {
+ res, _ := min.Int64()
+ n.currentValue.Fields[n.virtualFieldIndex] = res
+ }
+
+ passes, err := mapper.RunFilter(n.currentValue, n.aggregateFilter)
if err != nil {
return false, err
}
- if collectionMin == nil || (min != nil && collectionMin.Cmp(min) >= 0) {
+ if !passes {
continue
}
- isTargetFloat, err := n.p.isValueFloat(n.parent, &source)
- if err != nil {
- return false, err
- }
- isFloat = isTargetFloat
- min = collectionMin
- }
-
- if min == nil {
- n.currentValue.Fields[n.virtualFieldIndex] = nil
- } else if isFloat {
- res, _ := min.Float64()
- n.currentValue.Fields[n.virtualFieldIndex] = res
- } else {
- res, _ := min.Int64()
- n.currentValue.Fields[n.virtualFieldIndex] = res
+ return true, nil
}
- return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
diff --git a/internal/planner/sum.go b/internal/planner/sum.go
index a77e56da3d..26f49d9ab3 100644
--- a/internal/planner/sum.go
+++ b/internal/planner/sum.go
@@ -217,103 +217,112 @@ func (n *sumNode) Explain(explainType request.ExplainType) (map[string]any, erro
}
func (n *sumNode) Next() (bool, error) {
- n.execInfo.iterations++
+ for {
+ n.execInfo.iterations++
- hasNext, err := n.plan.Next()
- if err != nil || !hasNext {
- return hasNext, err
- }
-
- n.currentValue = n.plan.Value()
-
- sum := float64(0)
-
- for _, source := range n.aggregateMapping {
- child := n.currentValue.Fields[source.Index]
- var collectionSum float64
- var err error
- switch childCollection := child.(type) {
- case []core.Doc:
- collectionSum = reduceDocs(childCollection, 0, func(childItem core.Doc, value float64) float64 {
- childProperty := childItem.Fields[source.ChildTarget.Index]
- switch v := childProperty.(type) {
- case int:
- return value + float64(v)
- case int64:
- return value + float64(v)
- case uint64:
- return value + float64(v)
- case float64:
- return value + v
- default:
- // return nothing, cannot be summed
- return value + 0
- }
- })
- case []int64:
- collectionSum, err = reduceItems(
- childCollection,
- &source,
- lessN[int64],
- 0,
- func(childItem int64, value float64) float64 {
- return value + float64(childItem)
- },
- )
+ hasNext, err := n.plan.Next()
+ if err != nil || !hasNext {
+ return hasNext, err
+ }
- case []immutable.Option[int64]:
- collectionSum, err = reduceItems(
- childCollection,
- &source,
- lessO[int64],
- 0,
- func(childItem immutable.Option[int64], value float64) float64 {
- if !childItem.HasValue() {
+ n.currentValue = n.plan.Value()
+
+ sum := float64(0)
+
+ for _, source := range n.aggregateMapping {
+ child := n.currentValue.Fields[source.Index]
+ var collectionSum float64
+ var err error
+ switch childCollection := child.(type) {
+ case []core.Doc:
+ collectionSum = reduceDocs(childCollection, 0, func(childItem core.Doc, value float64) float64 {
+ childProperty := childItem.Fields[source.ChildTarget.Index]
+ switch v := childProperty.(type) {
+ case int:
+ return value + float64(v)
+ case int64:
+ return value + float64(v)
+ case uint64:
+ return value + float64(v)
+ case float64:
+ return value + v
+ default:
+ // return nothing, cannot be summed
return value + 0
}
- return value + float64(childItem.Value())
- },
- )
-
- case []float64:
- collectionSum, err = reduceItems(
- childCollection,
- &source,
- lessN[float64],
- 0,
- func(childItem float64, value float64) float64 {
- return value + childItem
- },
- )
+ })
+ case []int64:
+ collectionSum, err = reduceItems(
+ childCollection,
+ &source,
+ lessN[int64],
+ 0,
+ func(childItem int64, value float64) float64 {
+ return value + float64(childItem)
+ },
+ )
+
+ case []immutable.Option[int64]:
+ collectionSum, err = reduceItems(
+ childCollection,
+ &source,
+ lessO[int64],
+ 0,
+ func(childItem immutable.Option[int64], value float64) float64 {
+ if !childItem.HasValue() {
+ return value + 0
+ }
+ return value + float64(childItem.Value())
+ },
+ )
+
+ case []float64:
+ collectionSum, err = reduceItems(
+ childCollection,
+ &source,
+ lessN[float64],
+ 0,
+ func(childItem float64, value float64) float64 {
+ return value + childItem
+ },
+ )
+
+ case []immutable.Option[float64]:
+ collectionSum, err = reduceItems(
+ childCollection,
+ &source,
+ lessO[float64],
+ 0,
+ func(childItem immutable.Option[float64], value float64) float64 {
+ if !childItem.HasValue() {
+ return value + 0
+ }
+ return value + childItem.Value()
+ },
+ )
+ }
+ if err != nil {
+ return false, err
+ }
+ sum += collectionSum
+ }
- case []immutable.Option[float64]:
- collectionSum, err = reduceItems(
- childCollection,
- &source,
- lessO[float64],
- 0,
- func(childItem immutable.Option[float64], value float64) float64 {
- if !childItem.HasValue() {
- return value + 0
- }
- return value + childItem.Value()
- },
- )
+ var typedSum any
+ if n.isFloat {
+ typedSum = sum
+ } else {
+ typedSum = int64(sum)
}
+ n.currentValue.Fields[n.virtualFieldIndex] = typedSum
+ passes, err := mapper.RunFilter(n.currentValue, n.aggregateFilter)
if err != nil {
return false, err
}
- sum += collectionSum
- }
-
- var typedSum any
- if n.isFloat {
- typedSum = sum
- } else {
- typedSum = int64(sum)
+ if !passes {
+ continue
+ }
+ return true, nil
}
- n.currentValue.Fields[n.virtualFieldIndex] = typedSum
- return mapper.RunFilter(n.currentValue, n.aggregateFilter)
}
func (n *sumNode) SetPlan(p planNode) { n.plan = p }
diff --git a/tests/integration/query/one_to_many/with_average_test.go b/tests/integration/query/one_to_many/with_average_test.go
new file mode 100644
index 0000000000..40306a33b1
--- /dev/null
+++ b/tests/integration/query/one_to_many/with_average_test.go
@@ -0,0 +1,153 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package one_to_many
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryOneToMany_WithAverageAliasFilter_ShouldMatchAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with average alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {averageRating: {_gt: 0}}}) {
+ name
+ averageRating: _avg(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "Cornelia Funke",
+ "averageRating": 4.8,
+ },
+ {
+ "name": "John Grisham",
+ "averageRating": 4.7,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQueryOneToMany_WithAverageAliasFilter_ShouldMatchOne(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with average alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {averageRating: {_lt: 4.8}}}) {
+ name
+ averageRating: _avg(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "averageRating": 4.7,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/one_to_many/with_count_test.go b/tests/integration/query/one_to_many/with_count_test.go
index 77905ed748..567642a067 100644
--- a/tests/integration/query/one_to_many/with_count_test.go
+++ b/tests/integration/query/one_to_many/with_count_test.go
@@ -188,3 +188,69 @@ func TestQueryOneToMany_WithCountAliasFilter_ShouldMatchAll(t *testing.T) {
executeTestCase(t, test)
}
+
+func TestQueryOneToMany_WithCountAliasFilter_ShouldMatchOne(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with count alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {publishedCount: {_gt: 1}}}) {
+ name
+ publishedCount: _count(published: {})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "publishedCount": 2,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/one_to_many/with_max_test.go b/tests/integration/query/one_to_many/with_max_test.go
new file mode 100644
index 0000000000..2cc3031ea4
--- /dev/null
+++ b/tests/integration/query/one_to_many/with_max_test.go
@@ -0,0 +1,153 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package one_to_many
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryOneToMany_WithMaxAliasFilter_ShouldMatchAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with max alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {maxRating: {_gt: 0}}}) {
+ name
+ maxRating: _max(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "Cornelia Funke",
+ "maxRating": 4.8,
+ },
+ {
+ "name": "John Grisham",
+ "maxRating": 4.9,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQueryOneToMany_WithMaxAliasFilter_ShouldMatchOne(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with max alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {maxRating: {_gt: 4.8}}}) {
+ name
+ maxRating: _max(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "maxRating": 4.9,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/one_to_many/with_min_test.go b/tests/integration/query/one_to_many/with_min_test.go
new file mode 100644
index 0000000000..3325ccbe5a
--- /dev/null
+++ b/tests/integration/query/one_to_many/with_min_test.go
@@ -0,0 +1,153 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package one_to_many
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryOneToMany_WithMinAliasFilter_ShouldMatchAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with min alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {minRating: {_gt: 0}}}) {
+ name
+ minRating: _min(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "Cornelia Funke",
+ "minRating": 4.8,
+ },
+ {
+ "name": "John Grisham",
+ "minRating": 4.5,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQueryOneToMany_WithMinAliasFilter_ShouldMatchOne(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with min alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {minRating: {_lt: 4.7}}}) {
+ name
+ minRating: _min(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "minRating": 4.5,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
diff --git a/tests/integration/query/one_to_many/with_sum_test.go b/tests/integration/query/one_to_many/with_sum_test.go
new file mode 100644
index 0000000000..fb383d9a2f
--- /dev/null
+++ b/tests/integration/query/one_to_many/with_sum_test.go
@@ -0,0 +1,153 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package one_to_many
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryOneToMany_WithSumAliasFilter_ShouldMatchAll(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with sum alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {totalRating: {_gt: 0}}}) {
+ name
+ totalRating: _sum(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "Cornelia Funke",
+ "totalRating": 4.8,
+ },
+ {
+ "name": "John Grisham",
+ "totalRating": 9.4,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
+
+func TestQueryOneToMany_WithSumAliasFilter_ShouldMatchOne(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "One-to-many relation query from many side with sum alias",
+ Actions: []any{
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "John Grisham",
+ "age": 65,
+ "verified": true
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 1,
+ Doc: `{
+ "name": "Cornelia Funke",
+ "age": 62,
+ "verified": false
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Painted House",
+ "rating": 4.9,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "A Time for Mercy",
+ "rating": 4.5,
+ "author_id": testUtils.NewDocIndex(1, 0),
+ },
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ DocMap: map[string]any{
+ "name": "Theif Lord",
+ "rating": 4.8,
+ "author_id": testUtils.NewDocIndex(1, 1),
+ },
+ },
+ testUtils.Request{
+ Request: `query {
+ Author(filter: {_alias: {totalRating: {_gt: 5}}}) {
+ name
+ totalRating: _sum(published: {field: rating})
+ }
+ }`,
+ Results: map[string]any{
+ "Author": []map[string]any{
+ {
+ "name": "John Grisham",
+ "totalRating": 9.4,
+ },
+ },
+ },
+ },
+ },
+ }
+
+ executeTestCase(t, test)
+}
From fecc50f1b4b0397ca8dc73d34e0e938552d060c9 Mon Sep 17 00:00:00 2001
From: Fred Carle
Date: Fri, 13 Dec 2024 16:33:14 -0500
Subject: [PATCH 47/47] Release v0.15.0
---
CHANGELOG.md | 67 ++++++++++++++++++++++++++++++++++++++++++++++++
licenses/BSL.txt | 4 +--
2 files changed, 69 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b7c252a22e..d26600af14 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,70 @@
+
+## [v0.15.0](https://github.com/sourcenetwork/defradb/compare/v0.14.0...v0.15.0)
+
+> 2024-12-13
+
+DefraDB v0.15 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes.
+
+To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.14.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/.
+
+### Features
+
+* Add support for branchable collections ([#3216](https://github.com/sourcenetwork/defradb/issues/3216))
+* Aggregate filter alias targeting ([#3252](https://github.com/sourcenetwork/defradb/issues/3252))
+* Aggregate order alias targeting ([#3293](https://github.com/sourcenetwork/defradb/issues/3293))
+* Add ability to add/delete relationship for all actors ([#3254](https://github.com/sourcenetwork/defradb/issues/3254))
+* Add support for branchable collection time-traveling ([#3260](https://github.com/sourcenetwork/defradb/issues/3260))
+* Add support for cid-only time travel queries ([#3256](https://github.com/sourcenetwork/defradb/issues/3256))
+* Support for descending fields CLI index creation ([#3237](https://github.com/sourcenetwork/defradb/issues/3237))
+* Order alias target ([#3217](https://github.com/sourcenetwork/defradb/issues/3217))
+* Error if purge request made with dev mode disabled ([#3295](https://github.com/sourcenetwork/defradb/issues/3295))
+* Add ACP to pubsub KMS ([#3206](https://github.com/sourcenetwork/defradb/issues/3206))
+* Filter alias target ([#3201](https://github.com/sourcenetwork/defradb/issues/3201))
+* Add node identity ([#3125](https://github.com/sourcenetwork/defradb/issues/3125))
+
+### Fixes
+
+* Adjust OpenAPI index POST example request body ([#3268](https://github.com/sourcenetwork/defradb/issues/3268))
+* Make requests with no identity work with "*" target ([#3278](https://github.com/sourcenetwork/defradb/issues/3278))
+* Add support for operationName and variables in HTTP GET ([#3292](https://github.com/sourcenetwork/defradb/issues/3292))
+* Resolve CORS errors in OpenAPI tab of Playground ([#3263](https://github.com/sourcenetwork/defradb/issues/3263))
+* Prevent over span ([#3258](https://github.com/sourcenetwork/defradb/issues/3258))
+* Add Authorization header to CORS allowed headers ([#3178](https://github.com/sourcenetwork/defradb/issues/3178))
+
+### Documentation
+
+* Update discord link ([#3231](https://github.com/sourcenetwork/defradb/issues/3231))
+
+### Refactoring
+
+* Add unified JSON interface ([#3265](https://github.com/sourcenetwork/defradb/issues/3265))
+* Consolidate node-related fields into a struct ([#3232](https://github.com/sourcenetwork/defradb/issues/3232))
+* Rework core.Spans ([#3210](https://github.com/sourcenetwork/defradb/issues/3210))
+* Simplify merkle/crdt code ([#3200](https://github.com/sourcenetwork/defradb/issues/3200))
+* Breakup core/keys.go file ([#3198](https://github.com/sourcenetwork/defradb/issues/3198))
+* Remove indirection from crdt packages ([#3192](https://github.com/sourcenetwork/defradb/issues/3192))
+
+### Testing
+
+* Allow soft-referencing of Cids in tests ([#3176](https://github.com/sourcenetwork/defradb/issues/3176))
+
+### Continuous integration
+
+* Fix the gql mutation running in all tests ([#3267](https://github.com/sourcenetwork/defradb/issues/3267))
+* Freeze goreleaser version and fix amd64 path ([#3170](https://github.com/sourcenetwork/defradb/issues/3170))
+
+### Bot
+
+* Update dependencies (bulk dependabot PRs) 10-12-2024 ([#3312](https://github.com/sourcenetwork/defradb/issues/3312))
+* Update dependencies (bulk dependabot PRs) 09-12-2024 ([#3307](https://github.com/sourcenetwork/defradb/issues/3307))
+* Update dependencies (bulk dependabot PRs) 08-12-2024 ([#3296](https://github.com/sourcenetwork/defradb/issues/3296))
+* Update dependencies (bulk dependabot PRs) 03-12-2024 ([#3288](https://github.com/sourcenetwork/defradb/issues/3288))
+* Update dependencies (bulk dependabot PRs) 25-11-2024 ([#3273](https://github.com/sourcenetwork/defradb/issues/3273))
+* Update dependencies (bulk dependabot PRs) 11-11-2024 ([#3235](https://github.com/sourcenetwork/defradb/issues/3235))
+* Update dependencies (bulk dependabot PRs) 04-11-2024 ([#3207](https://github.com/sourcenetwork/defradb/issues/3207))
+* Update dependencies (bulk dependabot PRs) 2024-10-28 ([#3188](https://github.com/sourcenetwork/defradb/issues/3188))
+* Update dependencies (bulk dependabot PRs) 21-10-2024 ([#3168](https://github.com/sourcenetwork/defradb/issues/3168))
+
## [v0.14.0](https://github.com/sourcenetwork/defradb/compare/v0.13.0...v0.14.0)
diff --git a/licenses/BSL.txt b/licenses/BSL.txt
index 773ce8dc09..180453a428 100644
--- a/licenses/BSL.txt
+++ b/licenses/BSL.txt
@@ -7,7 +7,7 @@ Parameters
Licensor: Democratized Data (D2) Foundation
-Licensed Work: DefraDB v0.14.0
+Licensed Work: DefraDB v0.15.0
The Licensed Work is (c) 2023 D2 Foundation.
@@ -28,7 +28,7 @@ Additional Use Grant: You may only use the Licensed Work for the
-Change Date: 2028-10-18
+Change Date: 2028-12-13
Change License: Apache License, Version 2.0