diff --git a/client/index.go b/client/index.go index 6f87626c98..b09b258224 100644 --- a/client/index.go +++ b/client/index.go @@ -38,9 +38,11 @@ type IndexDescription struct { // CollectionIndex is an interface for indexing documents in a collection. type CollectionIndex interface { - // Save indexes a document by storing it + // Save indexes a document by storing indexed field values. + // It doesn't retire previous values. For this [Update] should be used. Save(context.Context, datastore.Txn, *Document) error - // Update updates an existing document in the index + // Update updates an existing document in the index. + // It removes the previous indexed field values and stores the new ones. Update(context.Context, datastore.Txn, *Document, *Document) error // Delete deletes an existing document from the index Delete(context.Context, datastore.Txn, *Document) error diff --git a/client/normal_array.go b/client/normal_array.go index 00133a0f74..b560a4bd9a 100644 --- a/client/normal_array.go +++ b/client/normal_array.go @@ -41,6 +41,10 @@ func (v normalBoolArray) BoolArray() ([]bool, bool) { return v.val, true } +func (v normalBoolArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.BoolArray) +} + type normalIntArray struct { baseArrayNormalValue[[]int64] } @@ -49,6 +53,10 @@ func (v normalIntArray) IntArray() ([]int64, bool) { return v.val, true } +func (v normalIntArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.IntArray) +} + type normalFloatArray struct { baseArrayNormalValue[[]float64] } @@ -57,6 +65,10 @@ func (v normalFloatArray) FloatArray() ([]float64, bool) { return v.val, true } +func (v normalFloatArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.FloatArray) +} + type normalStringArray struct { baseArrayNormalValue[[]string] } @@ -65,6 +77,10 @@ func (v normalStringArray) StringArray() ([]string, bool) { return v.val, true } +func (v normalStringArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.StringArray) +} + type normalBytesArray struct { baseArrayNormalValue[[][]byte] } @@ -73,6 +89,13 @@ func (v normalBytesArray) BytesArray() ([][]byte, bool) { return v.val, true } +func (v normalBytesArray) Equal(other NormalValue) bool { + if otherVal, ok := other.BytesArray(); ok { + return are2DArraysEqual(v.val, otherVal) + } + return false +} + type normalTimeArray struct { baseArrayNormalValue[[]time.Time] } @@ -81,6 +104,10 @@ func (v normalTimeArray) TimeArray() ([]time.Time, bool) { return v.val, true } +func (v normalTimeArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.TimeArray) +} + type normalDocumentArray struct { baseArrayNormalValue[[]*Document] } @@ -89,6 +116,10 @@ func (v normalDocumentArray) DocumentArray() ([]*Document, bool) { return v.val, true } +func (v normalDocumentArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.DocumentArray) +} + // NewNormalBoolArray creates a new NormalValue that represents a `[]bool` value. func NewNormalBoolArray(val []bool) NormalValue { return normalBoolArray{newBaseArrayNormalValue(val)} @@ -147,3 +178,34 @@ func normalizeCharsArr[R string | []byte, T string | []byte](val []T) []R { } return arr } + +func areArraysEqual[T comparable](arr1, arr2 []T) bool { + if len(arr1) != len(arr2) { + return false + } + for i, v := range arr1 { + if v != arr2[i] { + return false + } + } + return true +} + +func areNormalArraysEqual[T comparable](val []T, f func() ([]T, bool)) bool { + if otherVal, ok := f(); ok { + return areArraysEqual(val, otherVal) + } + return false +} + +func are2DArraysEqual[T comparable](arr1, arr2 [][]T) bool { + if len(arr1) != len(arr2) { + return false + } + for i, v := range arr1 { + if !areArraysEqual(v, arr2[i]) { + return false + } + } + return true +} diff --git a/client/normal_array_of_nillables.go b/client/normal_array_of_nillables.go index 53461f6afa..35f3b50bcb 100644 --- a/client/normal_array_of_nillables.go +++ b/client/normal_array_of_nillables.go @@ -11,6 +11,7 @@ package client import ( + "bytes" "time" "github.com/sourcenetwork/immutable" @@ -25,6 +26,10 @@ func (v normalNillableBoolArray) NillableBoolArray() ([]immutable.Option[bool], return v.val, true } +func (v normalNillableBoolArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableBoolArray) +} + type normalNillableIntArray struct { baseArrayNormalValue[[]immutable.Option[int64]] } @@ -33,6 +38,10 @@ func (v normalNillableIntArray) NillableIntArray() ([]immutable.Option[int64], b return v.val, true } +func (v normalNillableIntArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableIntArray) +} + type normalNillableFloatArray struct { baseArrayNormalValue[[]immutable.Option[float64]] } @@ -41,6 +50,10 @@ func (v normalNillableFloatArray) NillableFloatArray() ([]immutable.Option[float return v.val, true } +func (v normalNillableFloatArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableFloatArray) +} + type normalNillableStringArray struct { baseArrayNormalValue[[]immutable.Option[string]] } @@ -49,6 +62,10 @@ func (v normalNillableStringArray) NillableStringArray() ([]immutable.Option[str return v.val, true } +func (v normalNillableStringArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableStringArray) +} + type normalNillableBytesArray struct { baseArrayNormalValue[[]immutable.Option[[]byte]] } @@ -57,6 +74,13 @@ func (v normalNillableBytesArray) NillableBytesArray() ([]immutable.Option[[]byt return v.val, true } +func (v normalNillableBytesArray) Equal(other NormalValue) bool { + if otherVal, ok := other.NillableBytesArray(); ok { + return areArraysOfNillableBytesEqual(v.val, otherVal) + } + return false +} + type normalNillableTimeArray struct { baseArrayNormalValue[[]immutable.Option[time.Time]] } @@ -65,6 +89,10 @@ func (v normalNillableTimeArray) NillableTimeArray() ([]immutable.Option[time.Ti return v.val, true } +func (v normalNillableTimeArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableTimeArray) +} + type normalNillableDocumentArray struct { baseArrayNormalValue[[]immutable.Option[*Document]] } @@ -73,6 +101,10 @@ func (v normalNillableDocumentArray) NillableDocumentArray() ([]immutable.Option return v.val, true } +func (v normalNillableDocumentArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableDocumentArray) +} + // NewNormalNillableBoolNillableArray creates a new NormalValue that represents a // `immutable.Option[[]immutable.Option[bool]]` value. func NewNormalNillableBoolArray(val []immutable.Option[bool]) NormalValue { @@ -140,3 +172,41 @@ func normalizeNillableCharsArr[R string | []byte, T string | []byte](val []immut } return arr } + +func areNormalArraysOfNillablesEqual[T comparable]( + val []immutable.Option[T], + f func() ([]immutable.Option[T], bool), +) bool { + if otherVal, ok := f(); ok { + return areArraysOfNillablesEqual(val, otherVal) + } + return false +} + +func areArraysOfNillablesEqual[T comparable](a, b []immutable.Option[T]) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if v != b[i] { + return false + } + } + return true +} + +func areArraysOfNillableBytesEqual(a, b []immutable.Option[[]byte]) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if v.HasValue() && b[i].HasValue() { + if !bytes.Equal(v.Value(), b[i].Value()) { + return false + } + } else if v.HasValue() || b[i].HasValue() { + return false + } + } + return true +} diff --git a/client/normal_nillable_array.go b/client/normal_nillable_array.go index fa6bdc4bbb..9b30c342e2 100644 --- a/client/normal_nillable_array.go +++ b/client/normal_nillable_array.go @@ -52,6 +52,10 @@ func (v normalBoolNillableArray) BoolNillableArray() (immutable.Option[[]bool], return v.val, true } +func (v normalBoolNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.BoolNillableArray) +} + type normalIntNillableArray struct { baseNillableArrayNormalValue[[]int64] } @@ -60,6 +64,10 @@ func (v normalIntNillableArray) IntNillableArray() (immutable.Option[[]int64], b return v.val, true } +func (v normalIntNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.IntNillableArray) +} + type normalFloatNillableArray struct { baseNillableArrayNormalValue[[]float64] } @@ -68,6 +76,10 @@ func (v normalFloatNillableArray) FloatNillableArray() (immutable.Option[[]float return v.val, true } +func (v normalFloatNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.FloatNillableArray) +} + type normalStringNillableArray struct { baseNillableArrayNormalValue[[]string] } @@ -76,6 +88,10 @@ func (v normalStringNillableArray) StringNillableArray() (immutable.Option[[]str return v.val, true } +func (v normalStringNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.StringNillableArray) +} + type normalBytesNillableArray struct { baseNillableArrayNormalValue[[][]byte] } @@ -84,6 +100,16 @@ func (v normalBytesNillableArray) BytesNillableArray() (immutable.Option[[][]byt return v.val, true } +func (v normalBytesNillableArray) Equal(other NormalValue) bool { + if otherVal, ok := other.BytesNillableArray(); ok { + if v.val.HasValue() && otherVal.HasValue() { + return are2DArraysEqual(v.val.Value(), otherVal.Value()) + } + return !v.val.HasValue() && !otherVal.HasValue() + } + return false +} + type normalTimeNillableArray struct { baseNillableArrayNormalValue[[]time.Time] } @@ -92,6 +118,10 @@ func (v normalTimeNillableArray) TimeNillableArray() (immutable.Option[[]time.Ti return v.val, true } +func (v normalTimeNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.TimeNillableArray) +} + type normalDocumentNillableArray struct { baseNillableArrayNormalValue[[]*Document] } @@ -100,6 +130,10 @@ func (v normalDocumentNillableArray) DocumentNillableArray() (immutable.Option[[ return v.val, true } +func (v normalDocumentNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.DocumentNillableArray) +} + // NewNormalNillableBoolArray creates a new NormalValue that represents a `immutable.Option[[]bool]` value. func NewNormalBoolNillableArray(val immutable.Option[[]bool]) NormalValue { return normalBoolNillableArray{newBaseNillableArrayNormalValue(val)} @@ -150,3 +184,13 @@ func normalizeCharsNillableArr[R string | []byte, T string | []byte](val immutab } return immutable.None[[]R]() } + +func areOptionsArrEqual[T comparable](val immutable.Option[[]T], f func() (immutable.Option[[]T], bool)) bool { + if otherVal, ok := f(); ok { + if val.HasValue() && otherVal.HasValue() { + return areArraysEqual(val.Value(), otherVal.Value()) + } + return !val.HasValue() && !otherVal.HasValue() + } + return false +} diff --git a/client/normal_nillable_array_of_nillables.go b/client/normal_nillable_array_of_nillables.go index 3594186ba2..47052176e2 100644 --- a/client/normal_nillable_array_of_nillables.go +++ b/client/normal_nillable_array_of_nillables.go @@ -27,6 +27,10 @@ func (v normalNillableBoolNillableArray) NillableBoolNillableArray() ( return v.val, true } +func (v normalNillableBoolNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableBoolNillableArray) +} + type normalNillableIntNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[int64]] } @@ -37,6 +41,10 @@ func (v normalNillableIntNillableArray) NillableIntNillableArray() ( return v.val, true } +func (v normalNillableIntNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableIntNillableArray) +} + type normalNillableFloatNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[float64]] } @@ -47,6 +55,10 @@ func (v normalNillableFloatNillableArray) NillableFloatNillableArray() ( return v.val, true } +func (v normalNillableFloatNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableFloatNillableArray) +} + type normalNillableStringNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[string]] } @@ -57,6 +69,10 @@ func (v normalNillableStringNillableArray) NillableStringNillableArray() ( return v.val, true } +func (v normalNillableStringNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableStringNillableArray) +} + type normalNillableBytesNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[[]byte]] } @@ -67,6 +83,16 @@ func (v normalNillableBytesNillableArray) NillableBytesNillableArray() ( return v.val, true } +func (v normalNillableBytesNillableArray) Equal(other NormalValue) bool { + if otherVal, ok := other.NillableBytesNillableArray(); ok { + if v.val.HasValue() && otherVal.HasValue() { + return areArraysOfNillableBytesEqual(v.val.Value(), otherVal.Value()) + } + return !v.val.HasValue() && !otherVal.HasValue() + } + return false +} + type normalNillableTimeNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[time.Time]] } @@ -77,6 +103,10 @@ func (v normalNillableTimeNillableArray) NillableTimeNillableArray() ( return v.val, true } +func (v normalNillableTimeNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableTimeNillableArray) +} + type normalNillableDocumentNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[*Document]] } @@ -87,6 +117,10 @@ func (v normalNillableDocumentNillableArray) NillableDocumentNillableArray() ( return v.val, true } +func (v normalNillableDocumentNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableDocumentNillableArray) +} + // NewNormalNillableBoolNillableArray creates a new NormalValue that represents a // `immutable.Option[[]immutable.Option[bool]]` value. func NewNormalNillableBoolNillableArray(val immutable.Option[[]immutable.Option[bool]]) NormalValue { @@ -158,3 +192,20 @@ func normalizeNillableCharsNillableArr[R string | []byte, T string | []byte]( } return immutable.None[[]immutable.Option[R]]() } + +func areNormalNillableArraysOfNillablesEqual[T comparable]( + val immutable.Option[[]immutable.Option[T]], + f func() (immutable.Option[[]immutable.Option[T]], bool), +) bool { + if otherVal, ok := f(); ok { + return areNillableArraysOfNillablesEqual(val, otherVal) + } + return false +} + +func areNillableArraysOfNillablesEqual[T comparable](a, b immutable.Option[[]immutable.Option[T]]) bool { + if a.HasValue() && b.HasValue() { + return areArraysOfNillablesEqual(a.Value(), b.Value()) + } + return !a.HasValue() && !b.HasValue() +} diff --git a/client/normal_nillable_scalar.go b/client/normal_nillable_scalar.go index 88876c9d7e..86b4827bf7 100644 --- a/client/normal_nillable_scalar.go +++ b/client/normal_nillable_scalar.go @@ -48,6 +48,10 @@ func (v normalNillableBool) NillableBool() (immutable.Option[bool], bool) { return v.val, true } +func (v normalNillableBool) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableBool) +} + type normalNillableInt struct { baseNillableNormalValue[int64] } @@ -56,6 +60,10 @@ func (v normalNillableInt) NillableInt() (immutable.Option[int64], bool) { return v.val, true } +func (v normalNillableInt) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableInt) +} + type normalNillableFloat struct { baseNillableNormalValue[float64] } @@ -64,6 +72,10 @@ func (v normalNillableFloat) NillableFloat() (immutable.Option[float64], bool) { return v.val, true } +func (v normalNillableFloat) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableFloat) +} + type normalNillableString struct { baseNillableNormalValue[string] } @@ -72,6 +84,10 @@ func (v normalNillableString) NillableString() (immutable.Option[string], bool) return v.val, true } +func (v normalNillableString) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableString) +} + type normalNillableBytes struct { baseNillableNormalValue[[]byte] } @@ -80,6 +96,10 @@ func (v normalNillableBytes) NillableBytes() (immutable.Option[[]byte], bool) { return v.val, true } +func (v normalNillableBytes) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.NillableBytes) +} + type normalNillableTime struct { baseNillableNormalValue[time.Time] } @@ -88,6 +108,10 @@ func (v normalNillableTime) NillableTime() (immutable.Option[time.Time], bool) { return v.val, true } +func (v normalNillableTime) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableTime) +} + type normalNillableDocument struct { baseNillableNormalValue[*Document] } @@ -96,6 +120,10 @@ func (v normalNillableDocument) NillableDocument() (immutable.Option[*Document], return v.val, true } +func (v normalNillableDocument) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableDocument) +} + // NewNormalNillableBool creates a new NormalValue that represents a `immutable.Option[bool]` value. func NewNormalNillableBool(val immutable.Option[bool]) NormalValue { return normalNillableBool{newBaseNillableNormalValue(val)} diff --git a/client/normal_scalar.go b/client/normal_scalar.go index f4378f5474..f30eca78d7 100644 --- a/client/normal_scalar.go +++ b/client/normal_scalar.go @@ -11,6 +11,7 @@ package client import ( + "bytes" "time" "golang.org/x/exp/constraints" @@ -38,6 +39,10 @@ func (v normalBool) Bool() (bool, bool) { return v.val, true } +func (v normalBool) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Bool) +} + type normalInt struct { baseNormalValue[int64] } @@ -46,6 +51,10 @@ func (v normalInt) Int() (int64, bool) { return v.val, true } +func (v normalInt) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Int) +} + type normalFloat struct { baseNormalValue[float64] } @@ -54,6 +63,10 @@ func (v normalFloat) Float() (float64, bool) { return v.val, true } +func (v normalFloat) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Float) +} + type normalString struct { baseNormalValue[string] } @@ -62,6 +75,10 @@ func (v normalString) String() (string, bool) { return v.val, true } +func (v normalString) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.String) +} + type normalBytes struct { baseNormalValue[[]byte] } @@ -70,6 +87,13 @@ func (v normalBytes) Bytes() ([]byte, bool) { return v.val, true } +func (v normalBytes) Equal(other NormalValue) bool { + if otherVal, ok := other.Bytes(); ok { + return bytes.Equal(v.val, otherVal) + } + return false +} + type normalTime struct { baseNormalValue[time.Time] } @@ -78,10 +102,18 @@ func (v normalTime) Time() (time.Time, bool) { return v.val, true } +func (v normalTime) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Time) +} + type normalDocument struct { baseNormalValue[*Document] } +func (v normalDocument) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Document) +} + func (v normalDocument) Document() (*Document, bool) { return v.val, true } @@ -128,3 +160,10 @@ func NewNormalTime(val time.Time) NormalValue { func NewNormalDocument(val *Document) NormalValue { return normalDocument{baseNormalValue[*Document]{val: val}} } + +func areNormalScalarsEqual[T comparable](val T, f func() (T, bool)) bool { + if otherVal, ok := f(); ok { + return val == otherVal + } + return false +} diff --git a/client/normal_value.go b/client/normal_value.go index 3f0681fbfc..18bdd74ff0 100644 --- a/client/normal_value.go +++ b/client/normal_value.go @@ -31,6 +31,9 @@ type NormalValue interface { // if the option has value, otherwise it will return nil. Unwrap() any + // Equal returns if the value is equal to the given value. + Equal(NormalValue) bool + // IsNil returns if the value is nil. For not nillable values it will always return false. IsNil() bool // IsNillable returns if the value can be nil. diff --git a/client/normal_value_test.go b/client/normal_value_test.go index ce454a55b4..c368a300e3 100644 --- a/client/normal_value_test.go +++ b/client/normal_value_test.go @@ -1647,3 +1647,1388 @@ func TestArrayValue_IsNillable(t *testing.T) { assert.True(t, v.IsNil()) } } + +func TestNormalValue_IsEqual(t *testing.T) { + now := time.Now() + later := now.Add(time.Hour) + doc1 := &Document{} + doc2 := &Document{} + doc3 := &Document{} + + tests := []struct { + name string + v1 NormalValue + v2 NormalValue + expected bool + }{ + // Values + { + name: "void", + v1: NormalVoid{}, + v2: NormalVoid{}, + expected: true, + }, + { + name: "void not equal", + v1: NormalVoid{}, + v2: NewNormalInt(1), + expected: false, + }, + { + name: "bool", + v1: NewNormalBool(true), + v2: NewNormalBool(true), + expected: true, + }, + { + name: "bool not equal", + v1: NewNormalBool(true), + v2: NewNormalBool(false), + expected: false, + }, + { + name: "bool different type", + v1: NewNormalBool(true), + v2: NewNormalInt(1), + expected: false, + }, + { + name: "int", + v1: NewNormalInt(1), + v2: NewNormalInt(1), + expected: true, + }, + { + name: "int not equal", + v1: NewNormalInt(1), + v2: NewNormalInt(2), + expected: false, + }, + { + name: "int different type", + v1: NewNormalInt(1), + v2: NewNormalFloat(1.0), + expected: false, + }, + { + name: "float", + v1: NewNormalFloat(1.0), + v2: NewNormalFloat(1.0), + expected: true, + }, + { + name: "float not equal", + v1: NewNormalFloat(1.0), + v2: NewNormalFloat(1.1), + expected: false, + }, + { + name: "float different type", + v1: NewNormalFloat(1.0), + v2: NewNormalString("1.0"), + expected: false, + }, + { + name: "string", + v1: NewNormalString("test"), + v2: NewNormalString("test"), + expected: true, + }, + { + name: "string not equal", + v1: NewNormalString("test"), + v2: NewNormalString("test2"), + expected: false, + }, + { + name: "string different type", + v1: NewNormalString("test"), + v2: NewNormalBytes([]byte("test")), + expected: false, + }, + { + name: "bytes", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalBytes([]byte{1, 2, 3}), + expected: true, + }, + { + name: "bytes not equal", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalBytes([]byte{1, 2, 4}), + expected: false, + }, + { + name: "bytes different length", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalBytes([]byte{1, 2, 3, 4}), + expected: false, + }, + { + name: "bytes different type", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalString("123"), + expected: false, + }, + { + name: "time", + v1: NewNormalTime(now), + v2: NewNormalTime(now), + expected: true, + }, + { + name: "time not equal", + v1: NewNormalTime(now), + v2: NewNormalTime(later), + expected: false, + }, + { + name: "time different type", + v1: NewNormalTime(now), + v2: NewNormalString(now.String()), + expected: false, + }, + { + name: "document", + v1: NewNormalDocument(doc1), + v2: NewNormalDocument(doc1), + expected: true, + }, + { + name: "document not equal", + v1: NewNormalDocument(doc1), + v2: NewNormalDocument(doc2), + expected: false, + }, + { + name: "document different type", + v1: NewNormalDocument(doc1), + v2: NewNormalString("document"), + expected: false, + }, + // Nillable values + { + name: "bool nillable", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalNillableBool(immutable.Some(true)), + expected: true, + }, + { + name: "bool nillable not equal", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalNillableBool(immutable.Some(false)), + expected: false, + }, + { + name: "bool nillable one nil", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalNillableBool(immutable.None[bool]()), + expected: false, + }, + { + name: "bool nillable different type", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalBool(true), + expected: false, + }, + { + name: "int nillable", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalNillableInt(immutable.Some(int64(1))), + expected: true, + }, + { + name: "int nillable not equal", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalNillableInt(immutable.Some(int64(2))), + expected: false, + }, + { + name: "int nillable one nil", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalNillableInt(immutable.None[int64]()), + expected: false, + }, + { + name: "int nillable different type", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalInt(1), + expected: false, + }, + { + name: "float nillable", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalNillableFloat(immutable.Some(1.0)), + expected: true, + }, + { + name: "float nillable not equal", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalNillableFloat(immutable.Some(2.0)), + expected: false, + }, + { + name: "float nillable one nil", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalNillableFloat(immutable.None[float64]()), + expected: false, + }, + { + name: "float nillable different type", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalFloat(1.0), + expected: false, + }, + { + name: "string nillable", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalNillableString(immutable.Some("test")), + expected: true, + }, + { + name: "string nillable not equal", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalNillableString(immutable.Some("test2")), + expected: false, + }, + { + name: "string nillable one nil", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalNillableString(immutable.None[string]()), + expected: false, + }, + { + name: "string nillable different type", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalString("test"), + expected: false, + }, + { + name: "bytes nillable", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + expected: true, + }, + { + name: "bytes nillable not equal", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 4})), + expected: false, + }, + { + name: "bytes nillable one nil", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.None[[]byte]()), + expected: false, + }, + { + name: "bytes nillable different length", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3, 4})), + expected: false, + }, + { + name: "bytes nillable different type", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalBytes([]byte{1, 2, 3}), + expected: false, + }, + { + name: "time nillable", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalNillableTime(immutable.Some(now)), + expected: true, + }, + { + name: "time nillable not equal", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalNillableTime(immutable.Some(later)), + expected: false, + }, + { + name: "time nillable one nil", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalNillableTime(immutable.None[time.Time]()), + expected: false, + }, + { + name: "time nillable different type", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalTime(now), + expected: false, + }, + { + name: "document nillable", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalNillableDocument(immutable.Some(doc1)), + expected: true, + }, + { + name: "document nillable not equal", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalNillableDocument(immutable.Some(doc2)), + expected: false, + }, + { + name: "document nillable one nil", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalNillableDocument(immutable.None[*Document]()), + expected: false, + }, + { + name: "document nillable different type", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalDocument(doc1), + expected: false, + }, + // Arrays + { + name: "bool array", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalBoolArray([]bool{true, false}), + expected: true, + }, + { + name: "bool array not equal", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalBoolArray([]bool{true, true}), + expected: false, + }, + { + name: "bool array different length", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalBoolArray([]bool{true}), + expected: false, + }, + { + name: "bool array different type", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalIntArray([]int64{1, 0}), + expected: false, + }, + { + name: "int array", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalIntArray([]int64{1, 2}), + expected: true, + }, + { + name: "int array not equal", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalIntArray([]int64{1, 3}), + expected: false, + }, + { + name: "int array different length", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalIntArray([]int64{1}), + expected: false, + }, + { + name: "int array different type", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: false, + }, + { + name: "float array", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: true, + }, + { + name: "float array not equal", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalFloatArray([]float64{1.0, 3.0}), + expected: false, + }, + { + name: "float array different length", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalFloatArray([]float64{1.0}), + expected: false, + }, + { + name: "float array different type", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalStringArray([]string{"1.0", "2.0"}), + expected: false, + }, + { + name: "string array", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalStringArray([]string{"test", "test2"}), + expected: true, + }, + { + name: "string array not equal", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalStringArray([]string{"test", "test3"}), + expected: false, + }, + { + name: "string array different length", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalStringArray([]string{"test"}), + expected: false, + }, + { + name: "string array different type", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: false, + }, + { + name: "bytes array", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: true, + }, + { + name: "bytes array not equal", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 7}}), + expected: false, + }, + { + name: "bytes array different length", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}}), + expected: false, + }, + { + name: "bytes array different type", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalStringArray([]string{"123", "456"}), + expected: false, + }, + { + name: "time array", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalTimeArray([]time.Time{now, now}), + expected: true, + }, + { + name: "time array not equal", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalTimeArray([]time.Time{now, later}), + expected: false, + }, + { + name: "time array different length", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalTimeArray([]time.Time{now}), + expected: false, + }, + { + name: "time array different type", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalStringArray([]string{now.String(), now.String()}), + expected: false, + }, + { + name: "document array", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalDocumentArray([]*Document{doc1, doc2}), + expected: true, + }, + { + name: "document array not equal", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalDocumentArray([]*Document{doc1, doc1}), + expected: false, + }, + { + name: "document array different length", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalDocumentArray([]*Document{doc1}), + expected: false, + }, + { + name: "document array different type", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalStringArray([]string{"doc1", "doc2"}), + expected: false, + }, + // Arrays of nillables + { + name: "array of nillable bools", + v1: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + expected: true, + }, + { + name: "array of nillable bools not equal", + v1: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(false), immutable.Some(true)}), + expected: false, + }, + { + name: "array of nillable bools with nil values equal", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()}), + v2: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()}), + expected: true, + }, + { + name: "array of nillable bools with nil values not equal", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()}), + v2: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.None[bool](), immutable.Some(true)}), + expected: false, + }, + { + name: "array of nillable bools different lengths", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true)}), + expected: false, + }, + { + name: "array of nillable bools vs non-nillable array", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalBoolArray([]bool{true, false}), + expected: false, + }, + { + name: "array of nillable ints equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + expected: true, + }, + { + name: "array of nillable ints not equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(2)), immutable.Some(int64(1))}), + expected: false, + }, + { + name: "array of nillable ints with nil values equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()}), + expected: true, + }, + { + name: "array of nillable ints with nil values not equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.None[int64](), immutable.Some(int64(1))}), + expected: false, + }, + { + name: "array of nillable ints different lengths", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalNillableIntArray([]immutable.Option[int64]{immutable.Some(int64(1))}), + expected: false, + }, + { + name: "array of nillable ints vs non-nillable array", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalIntArray([]int64{1, 2}), + expected: false, + }, + { + name: "float nillable array equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + expected: true, + }, + { + name: "float nillable array not equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(2.0), immutable.Some(1.0)}), + expected: false, + }, + { + name: "float nillable array with nil values equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()}), + expected: true, + }, + { + name: "float nillable array with nil values not equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.None[float64](), immutable.Some(1.0)}), + expected: false, + }, + { + name: "float nillable array different lengths", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalNillableFloatArray([]immutable.Option[float64]{immutable.Some(1.0)}), + expected: false, + }, + { + name: "float nillable array vs non-nillable array", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: false, + }, + { + name: "array of nillable strings", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + expected: true, + }, + { + name: "array of nillable strings not equal", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test3")}), + expected: false, + }, + { + name: "array of nillable strings with nil", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.None[string]()}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.None[string]()}), + expected: true, + }, + { + name: "array of nillable strings different length", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test")}), + expected: false, + }, + { + name: "array of nillable strings different type", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalStringArray([]string{"test", "test2"}), + expected: false, + }, + { + name: "array of nillable bytes", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + expected: true, + }, + { + name: "array of nillable bytes not equal", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 7})}), + expected: false, + }, + { + name: "array of nillable bytes with nil", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()}), + expected: true, + }, + { + name: "array of nillable bytes different nil values", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()}), + expected: false, + }, + { + name: "array of nillable bytes different length", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}), + expected: false, + }, + { + name: "array of nillable bytes different type", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: false, + }, + { + name: "array of nillable time values", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + expected: true, + }, + { + name: "array of nillable time values not equal", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now.Add(time.Hour))}), + expected: false, + }, + { + name: "array of nillable time values different lengths", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now)}), + expected: false, + }, + { + name: "array of nillable time values different type", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalTimeArray([]time.Time{now, now}), + expected: false, + }, + { + name: "array of nillable time values with nil", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + expected: true, + }, + { + name: "array of nillable time values different nil vals", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + expected: false, + }, + { + name: "array of nillable time values with nil", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + expected: true, + }, + { + name: "document nillable array", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + expected: true, + }, + { + name: "document nillable array not equal", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc3)}), + expected: false, + }, + { + name: "document nillable array different lengths", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1)}), + expected: false, + }, + { + name: "document nillable array with nil", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()}), + expected: true, + }, + // Nillable arrays + { + name: "bool nillable array equal", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: true, + }, + { + name: "bool nillable array not equal", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{false})), + expected: false, + }, + { + name: "bool nillable array nil vs non-nil", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + v2: NewNormalBoolNillableArray(immutable.None[[]bool]()), + expected: false, + }, + { + name: "bool nillable array different lengths", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true, false})), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: false, + }, + { + name: "bool nillable array nil array equal", + v1: NewNormalBoolNillableArray(immutable.None[[]bool]()), + v2: NewNormalBoolNillableArray(immutable.None[[]bool]()), + expected: true, + }, + { + name: "bool nillable array nil array not equal", + v1: NewNormalBoolNillableArray(immutable.None[[]bool]()), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: false, + }, + { + name: "int nillable array", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1})), + v2: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: true, + }, + { + name: "int nillable array not equal", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1})), + v2: NewNormalIntNillableArray(immutable.Some([]int64{2})), + expected: false, + }, + { + name: "int nillable array nil vs non-nil", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1})), + v2: NewNormalIntNillableArray(immutable.None[[]int64]()), + expected: false, + }, + { + name: "int nillable array different lengths", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1, 2})), + v2: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: false, + }, + { + name: "int nillable array nil array equal", + v1: NewNormalIntNillableArray(immutable.None[[]int64]()), + v2: NewNormalIntNillableArray(immutable.None[[]int64]()), + expected: true, + }, + { + name: "int nillable array nil array not equal", + v1: NewNormalIntNillableArray(immutable.None[[]int64]()), + v2: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: false, + }, + { + name: "float nillable array equal", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: true, + }, + { + name: "float nillable array not equal", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{2.0})), + expected: false, + }, + { + name: "float nillable array nil vs non-nil", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + v2: NewNormalFloatNillableArray(immutable.None[[]float64]()), + expected: false, + }, + { + name: "float nillable array different lengths", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0, 2.0})), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: false, + }, + { + name: "float nillable array nil array equal", + v1: NewNormalFloatNillableArray(immutable.None[[]float64]()), + v2: NewNormalFloatNillableArray(immutable.None[[]float64]()), + expected: true, + }, + { + name: "float nillable array nil array not equal", + v1: NewNormalFloatNillableArray(immutable.None[[]float64]()), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: false, + }, + { + name: "string nillable array equal", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + expected: true, + }, + { + name: "string nillable array not equal", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"different"})), + expected: false, + }, + { + name: "string nillable array nil vs non-nil", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + v2: NewNormalStringNillableArray(immutable.None[[]string]()), + expected: false, + }, + { + name: "string nillable array different lengths", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test", "another"})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + expected: false, + }, + { + name: "string nillable array empty strings", + v1: NewNormalStringNillableArray(immutable.Some([]string{"", ""})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"", ""})), + expected: true, + }, + { + name: "bytes nillable array equal", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + expected: true, + }, + { + name: "bytes nillable array not equal", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 4}})), + expected: false, + }, + { + name: "bytes nillable array nil vs non-nil", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + v2: NewNormalBytesNillableArray(immutable.None[[][]byte]()), + expected: false, + }, + { + name: "bytes nillable array different lengths", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}, {4, 5, 6}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + expected: false, + }, + { + name: "bytes nillable array empty slices", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{}, {}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{}, {}})), + expected: true, + }, + { + name: "time nillable array", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: true, + }, + { + name: "time nillable array equal", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: true, + }, + { + name: "time nillable array not equal", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now.Add(time.Hour)})), + expected: false, + }, + { + name: "time nillable array nil vs non-nil", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.None[[]time.Time]()), + expected: false, + }, + { + name: "time nillable array different lengths", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now, now.Add(time.Hour)})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: false, + }, + { + name: "time nillable array zero times", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{{}, {}})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{{}, {}})), + expected: true, + }, + { + name: "document nillable array", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: true, + }, + { + name: "document nillable array equal", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: true, + }, + { + name: "document nillable array not equal", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc2})), + expected: false, + }, + { + name: "document nillable array nil vs non-nil", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.None[[]*Document]()), + expected: false, + }, + { + name: "document nillable array different lengths", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1, doc2})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: false, + }, + { + name: "document nillable array with nil documents", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{nil, nil})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{nil, nil})), + expected: true, + }, + // Nillable arrays of nillables + { + name: "nillable bool nillable array", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + expected: true, + }, + { + name: "nillable bool nillable array equal", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + expected: true, + }, + { + name: "nillable bool nillable array not equal", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(false)})), + expected: false, + }, + { + name: "nillable bool nillable array outer nil", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray(immutable.None[[]immutable.Option[bool]]()), + expected: false, + }, + { + name: "nillable bool nillable array inner nil", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()})), + expected: true, + }, + { + name: "nillable bool nillable array different type", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true)}), + expected: false, + }, + { + name: "nillable int nillable array", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + expected: true, + }, + { + name: "nillable int nillable array equal", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + expected: true, + }, + { + name: "nillable int nillable array not equal", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(2))})), + expected: false, + }, + { + name: "nillable int nillable array outer nil", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray(immutable.None[[]immutable.Option[int64]]()), + expected: false, + }, + { + name: "nillable int nillable array inner nil", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()})), + expected: true, + }, + { + name: "nillable int nillable array different type", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntArray([]immutable.Option[int64]{immutable.Some(int64(1))}), + expected: false, + }, + { + name: "nillable float nillable array", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + expected: true, + }, + { + name: "nillable float nillable array equal", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + expected: true, + }, + { + name: "nillable float nillable array not equal", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(2.0)})), + expected: false, + }, + { + name: "nillable float nillable array outer nil", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray(immutable.None[[]immutable.Option[float64]]()), + expected: false, + }, + { + name: "nillable float nillable array inner nil", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()})), + expected: true, + }, + { + name: "nillable float nillable array different type", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatArray([]immutable.Option[float64]{immutable.Some(1.0)}), + expected: false, + }, + { + name: "nillable string nillable array", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + expected: true, + }, + { + name: "nillable string nillable array equal", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + expected: true, + }, + { + name: "nillable string nillable array not equal", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("different")})), + expected: false, + }, + { + name: "nillable string nillable array outer nil", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray(immutable.None[[]immutable.Option[string]]()), + expected: false, + }, + { + name: "nillable string nillable array inner nil", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test"), immutable.None[string]()})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test"), immutable.None[string]()})), + expected: true, + }, + { + name: "nillable string nillable array different type", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringArray([]immutable.Option[string]{immutable.Some("test")}), + expected: false, + }, + { + name: "nillable bytes nillable array", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + expected: true, + }, + { + name: "nillable bytes nillable array equal", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + expected: true, + }, + { + name: "nillable bytes nillable array not equal", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 4})})), + expected: false, + }, + { + name: "nillable bytes nillable array outer nil", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray(immutable.None[[]immutable.Option[[]byte]]()), + expected: false, + }, + { + name: "nillable bytes nillable array inner nil", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()})), + expected: true, + }, + { + name: "nillable bytes nillable array different type", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesArray([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}), + expected: false, + }, + { + name: "nillable time nillable array", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + expected: true, + }, + { + name: "nillable time nillable array equal", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + expected: true, + }, + { + name: "nillable time nillable array not equal", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now.Add(time.Hour))})), + expected: false, + }, + { + name: "nillable time nillable array outer nil", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray(immutable.None[[]immutable.Option[time.Time]]()), + expected: false, + }, + { + name: "nillable time nillable array inner nil", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()})), + expected: true, + }, + { + name: "nillable time nillable array different type", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeArray([]immutable.Option[time.Time]{immutable.Some(now)}), + expected: false, + }, + { + name: "nillable document nillable array", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + expected: true, + }, + { + name: "nillable document nillable array equal", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + expected: true, + }, + { + name: "nillable document nillable array not equal", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc2)})), + expected: false, + }, + { + name: "nillable document nillable array outer nil", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray(immutable.None[[]immutable.Option[*Document]]()), + expected: false, + }, + { + name: "nillable document nillable array inner nil", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()})), + expected: true, + }, + { + name: "nillable document nillable array different type", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentArray([]immutable.Option[*Document]{immutable.Some(doc1)}), + expected: false, + }, + } + + for _, tt := range tests { + tStr := string(tt.name) + t.Run(tStr, func(t *testing.T) { + actual := tt.v1.Equal(tt.v2) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/client/normal_void.go b/client/normal_void.go index e3e29b5094..3e13fe489d 100644 --- a/client/normal_void.go +++ b/client/normal_void.go @@ -24,6 +24,11 @@ func (NormalVoid) Unwrap() any { return nil } +func (NormalVoid) Equal(other NormalValue) bool { + _, ok := other.(NormalVoid) + return ok +} + func (NormalVoid) IsNil() bool { return false } diff --git a/client/schema_field_description.go b/client/schema_field_description.go index cc5690b72c..0619aebdf1 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -186,6 +186,29 @@ func (k ScalarArrayKind) IsArray() bool { return true } +func (k ScalarArrayKind) SubKind() FieldKind { + switch k { + case FieldKind_NILLABLE_BOOL_ARRAY: + return FieldKind_NILLABLE_BOOL + case FieldKind_BOOL_ARRAY: + return FieldKind_NILLABLE_BOOL + case FieldKind_NILLABLE_INT_ARRAY: + return FieldKind_NILLABLE_INT + case FieldKind_INT_ARRAY: + return FieldKind_NILLABLE_INT + case FieldKind_NILLABLE_FLOAT_ARRAY: + return FieldKind_NILLABLE_FLOAT + case FieldKind_FLOAT_ARRAY: + return FieldKind_NILLABLE_FLOAT + case FieldKind_NILLABLE_STRING_ARRAY: + return FieldKind_NILLABLE_STRING + case FieldKind_STRING_ARRAY: + return FieldKind_NILLABLE_STRING + default: + return FieldKind_None + } +} + func NewCollectionKind(root uint32, isArray bool) *CollectionKind { return &CollectionKind{ Root: root, diff --git a/client/schema_field_description_test.go b/client/schema_field_description_test.go new file mode 100644 index 0000000000..39dea4cf60 --- /dev/null +++ b/client/schema_field_description_test.go @@ -0,0 +1,77 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestField_ScalarArray_HasSubKind(t *testing.T) { + tests := []struct { + name string + arrKind ScalarArrayKind + subKind ScalarKind + }{ + { + name: "bool array", + arrKind: FieldKind_BOOL_ARRAY, + subKind: FieldKind_NILLABLE_BOOL, + }, + { + name: "int array", + arrKind: FieldKind_INT_ARRAY, + subKind: FieldKind_NILLABLE_INT, + }, + { + name: "float array", + arrKind: FieldKind_FLOAT_ARRAY, + subKind: FieldKind_NILLABLE_FLOAT, + }, + { + name: "string array", + arrKind: FieldKind_STRING_ARRAY, + subKind: FieldKind_NILLABLE_STRING, + }, + { + name: "nillable bool array", + arrKind: FieldKind_NILLABLE_BOOL_ARRAY, + subKind: FieldKind_NILLABLE_BOOL, + }, + { + name: "nillable int array", + arrKind: FieldKind_NILLABLE_INT_ARRAY, + subKind: FieldKind_NILLABLE_INT, + }, + { + name: "nillable float array", + arrKind: FieldKind_NILLABLE_FLOAT_ARRAY, + subKind: FieldKind_NILLABLE_FLOAT, + }, + { + name: "nillable string array", + arrKind: FieldKind_NILLABLE_STRING_ARRAY, + subKind: FieldKind_NILLABLE_STRING, + }, + { + name: "nillable string array", + arrKind: ScalarArrayKind(0), + subKind: FieldKind_None, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.subKind, tt.arrKind.SubKind()) + }) + } +} diff --git a/internal/core/encoding.go b/internal/core/encoding.go index 14dfb072b1..6e2e0e0dcb 100644 --- a/internal/core/encoding.go +++ b/internal/core/encoding.go @@ -254,6 +254,12 @@ func DecodeIndexDataStoreKey( return IndexDataStoreKey{}, ErrInvalidKey } + if kind != nil && kind.IsArray() { + if arrKind, ok := kind.(client.ScalarArrayKind); ok { + kind = arrKind.SubKind() + } + } + var val client.NormalValue data, val, err = encoding.DecodeFieldValue(data, descending, kind) if err != nil { diff --git a/internal/core/key.go b/internal/core/key.go index 0e7942411d..60601795b2 100644 --- a/internal/core/key.go +++ b/internal/core/key.go @@ -626,6 +626,25 @@ func (k *IndexDataStoreKey) ToString() string { return string(k.Bytes()) } +// Equal returns true if the two keys are equal +func (k *IndexDataStoreKey) Equal(other IndexDataStoreKey) bool { + if k.CollectionID != other.CollectionID || k.IndexID != other.IndexID { + return false + } + + if len(k.Fields) != len(other.Fields) { + return false + } + + for i, field := range k.Fields { + if !field.Value.Equal(other.Fields[i].Value) || field.Descending != other.Fields[i].Descending { + return false + } + } + + return true +} + func (k PrimaryDataStoreKey) ToDataStoreKey() DataStoreKey { return DataStoreKey{ CollectionRootID: k.CollectionRootID, diff --git a/internal/core/key_test.go b/internal/core/key_test.go index b564fb001b..4cdb46b72d 100644 --- a/internal/core/key_test.go +++ b/internal/core/key_test.go @@ -323,3 +323,71 @@ func TestDecodeIndexDataStoreKey_InvalidKey(t *testing.T) { }) } } + +func TestIndexDataStoreKey_IsEqual(t *testing.T) { + const colID, indexID = 1, 2 + + cases := []struct { + name string + key1 IndexDataStoreKey + key2 IndexDataStoreKey + shouldMatch bool + }{ + { + name: "empty", + key1: IndexDataStoreKey{}, + key2: IndexDataStoreKey{}, + shouldMatch: true, + }, + { + name: "same", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + shouldMatch: true, + }, + { + name: "different collection", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID+1, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + shouldMatch: false, + }, + { + name: "different index", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID+1, []IndexedField{{Value: client.NewNormalInt(5)}}), + shouldMatch: false, + }, + { + name: "different field", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(6)}}), + shouldMatch: false, + }, + { + name: "different field count", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, + []IndexedField{{Value: client.NewNormalInt(5)}, {Value: client.NewNormalInt(6)}}), + shouldMatch: false, + }, + { + name: "different field type", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalString("5")}}), + shouldMatch: false, + }, + { + name: "different field descending", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, + []IndexedField{{Value: client.NewNormalInt(5), Descending: true}}), + shouldMatch: false, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + res := c.key1.Equal(c.key2) + assert.Equal(t, res, c.shouldMatch, c.name) + }) + } +} diff --git a/internal/db/collection.go b/internal/db/collection.go index 785c96641e..8f78e51429 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -852,6 +852,11 @@ func (c *collection) Delete( primaryKey := c.getPrimaryKeyFromDocID(docID) + err = c.deleteIndexedDocWithID(ctx, docID) + if err != nil { + return false, err + } + err = c.applyDelete(ctx, primaryKey) if err != nil { return false, err diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go index c606cc45b7..a0786eb8c8 100644 --- a/internal/db/collection_index.go +++ b/internal/db/collection_index.go @@ -181,6 +181,25 @@ func (c *collection) deleteIndexedDoc( return nil } +// deleteIndexedDocWithID deletes an indexed document with the provided document ID. +func (c *collection) deleteIndexedDocWithID( + ctx context.Context, + docID client.DocID, +) error { + // we need to fetch the document to delete it from the indexes, because in order to do so + // we need to know the values of the fields that are indexed. + doc, err := c.get( + ctx, + c.getPrimaryKeyFromDocID(docID), + c.Definition().CollectIndexedFields(), + false, + ) + if err != nil { + return err + } + return c.deleteIndexedDoc(ctx, doc) +} + // CreateIndex creates a new index on the collection. // // If the index name is empty, a name will be automatically generated. diff --git a/internal/db/fetcher/errors.go b/internal/db/fetcher/errors.go index 2a2967bbdb..22f0c8b182 100644 --- a/internal/db/fetcher/errors.go +++ b/internal/db/fetcher/errors.go @@ -13,6 +13,7 @@ package fetcher import ( "fmt" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" ) @@ -30,6 +31,7 @@ const ( errMissingMapper string = "missing document mapper" errInvalidInOperatorValue string = "invalid _in/_nin value" errInvalidFilterOperator string = "invalid filter operator is provided" + errNotSupportedKindByIndex string = "kind is not supported by index" errUnexpectedTypeValue string = "unexpected type value" ) @@ -107,6 +109,11 @@ func NewErrInvalidFilterOperator(operator string) error { return errors.New(errInvalidFilterOperator, errors.NewKV("Operator", operator)) } +// NewErrNotSupportedKindByIndex returns an error indicating that the given kind is not supported by index. +func NewErrNotSupportedKindByIndex(kind client.FieldKind) error { + return errors.New(errNotSupportedKindByIndex, errors.NewKV("Kind", kind.String())) +} + // NewErrUnexpectedTypeValue returns an error indicating that the given value is of an unexpected type. func NewErrUnexpectedTypeValue[T any](value any) error { var t T diff --git a/internal/db/fetcher/indexer.go b/internal/db/fetcher/indexer.go index 7eb8f5b117..4d370146ed 100644 --- a/internal/db/fetcher/indexer.go +++ b/internal/db/fetcher/indexer.go @@ -85,7 +85,10 @@ func (f *IndexFetcher) Init( outer: for i := range fields { for j := range f.indexedFields { - if fields[i].Name == f.indexedFields[j].Name { + // If the field is array, we want to keep it also for the document fetcher + // because the index only contains one array elements, not the whole array. + // The doc fetcher will fetch the whole array for us. + if fields[i].Name == f.indexedFields[j].Name && !fields[i].Kind.IsArray() { continue outer } } @@ -156,6 +159,12 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo hasNilField = true } + // Index will fetch only 1 array element. So we skip it here and let doc fetcher + // fetch the whole array. + if indexedField.Kind.IsArray() { + continue + } + // We need to convert it to cbor bytes as this is what it will be encoded from on value retrieval. // In the future we have to either get rid of CBOR or properly handle different encoding // for properties in a single document. diff --git a/internal/db/fetcher/indexer_iterators.go b/internal/db/fetcher/indexer_iterators.go index e4e69c99c3..ecf964185d 100644 --- a/internal/db/fetcher/indexer_iterators.go +++ b/internal/db/fetcher/indexer_iterators.go @@ -29,24 +29,27 @@ import ( ) const ( - opEq = "_eq" - opGt = "_gt" - opGe = "_ge" - opLt = "_lt" - opLe = "_le" - opNe = "_ne" - opIn = "_in" - opNin = "_nin" - opLike = "_like" - opNlike = "_nlike" - opILike = "_ilike" - opNILike = "_nilike" + opEq = "_eq" + opGt = "_gt" + opGe = "_ge" + opLt = "_lt" + opLe = "_le" + opNe = "_ne" + opIn = "_in" + opNin = "_nin" + opLike = "_like" + opNlike = "_nlike" + opILike = "_ilike" + opNILike = "_nilike" + compOpAny = "_any" + compOpAll = "_all" + compOpNone = "_none" // it's just there for composite indexes. We construct a slice of value matchers with // every matcher being responsible for a corresponding field in the index to match. // For some fields there might not be any criteria to match. For examples if you have // composite index of /name/age/email/ and in the filter you specify only "name" and "email". - // Then the "_any" matcher will be used for "age". - opAny = "_any" + // Then the "__any" matcher will be used for "age". + opAny = "__any" ) // indexIterator is an iterator over index keys. @@ -155,6 +158,8 @@ type eqSingleIndexIterator struct { store datastore.DSReaderWriter } +var _ indexIterator = (*eqSingleIndexIterator)(nil) + func (iter *eqSingleIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { iter.ctx = ctx iter.store = store @@ -177,7 +182,7 @@ func (iter *eqSingleIndexIterator) Next() (indexIterResult, error) { return indexIterResult{key: iter.indexKey, value: val, foundKey: true}, nil } -func (i *eqSingleIndexIterator) Close() error { +func (iter *eqSingleIndexIterator) Close() error { return nil } @@ -190,6 +195,8 @@ type inIndexIterator struct { hasIterator bool } +var _ indexIterator = (*inIndexIterator)(nil) + func (iter *inIndexIterator) nextIterator() (bool, error) { if iter.nextValIndex > 0 { err := iter.indexIterator.Close() @@ -246,6 +253,58 @@ func (iter *inIndexIterator) Close() error { return nil } +// arrayIndexIterator is an iterator indexed array elements. +// It keeps track of the already fetched documents to avoid duplicates. +type arrayIndexIterator struct { + inner indexIterator + + fetchedDocs map[string]struct{} + + ctx context.Context + store datastore.DSReaderWriter +} + +var _ indexIterator = (*arrayIndexIterator)(nil) + +func (iter *arrayIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { + iter.ctx = ctx + iter.store = store + iter.fetchedDocs = make(map[string]struct{}) + return iter.inner.Init(ctx, store) +} + +func (iter *arrayIndexIterator) Next() (indexIterResult, error) { + for { + res, err := iter.inner.Next() + if err != nil { + return indexIterResult{}, err + } + if !res.foundKey { + return res, nil + } + var docID string + if len(res.value) > 0 { + docID = string(res.value) + } else { + lastField := &res.key.Fields[len(res.key.Fields)-1] + var ok bool + docID, ok = lastField.Value.String() + if !ok { + return indexIterResult{}, NewErrUnexpectedTypeValue[string](lastField.Value) + } + } + if _, ok := iter.fetchedDocs[docID]; ok { + continue + } + iter.fetchedDocs[docID] = struct{}{} + return res, nil + } +} + +func (iter *arrayIndexIterator) Close() error { + return iter.inner.Close() +} + func executeValueMatchers(matchers []valueMatcher, fields []core.IndexedField) (bool, error) { for i := range matchers { res, err := matchers[i].Match(fields[i].Value) @@ -349,6 +408,31 @@ func (m *timeMatcher) Match(value client.NormalValue) (bool, error) { return false, NewErrInvalidFilterOperator(m.op) } +type boolMatcher struct { + value bool + isEq bool +} + +func (m *boolMatcher) Match(value client.NormalValue) (bool, error) { + boolVal, ok := value.Bool() + if !ok { + if boolOptVal, ok := value.NillableBool(); ok { + boolVal = boolOptVal.Value() + } else { + intVal, ok := value.Int() + if !ok { + if intOptVal, ok := value.NillableInt(); ok { + intVal = intOptVal.Value() + } else { + return false, NewErrUnexpectedTypeValue[bool](value) + } + } + boolVal = intVal != 0 + } + } + return boolVal == m.value == m.isEq, nil +} + type nilMatcher struct { matchNil bool } @@ -449,15 +533,29 @@ type anyMatcher struct{} func (m *anyMatcher) Match(client.NormalValue) (bool, error) { return true, nil } -// newPrefixIndexIterator creates a new eqPrefixIndexIterator for fetching indexed data. +// invertedMatcher inverts the result of the inner matcher. +type invertedMatcher struct { + matcher valueMatcher +} + +func (m *invertedMatcher) Match(val client.NormalValue) (bool, error) { + res, err := m.matcher.Match(val) + if err != nil { + return false, err + } + return !res, nil +} + +// newPrefixIteratorFromConditions creates a new eqPrefixIndexIterator for fetching indexed data. // It can modify the input matchers slice. -func (f *IndexFetcher) newPrefixIndexIterator( +func (f *IndexFetcher) newPrefixIteratorFromConditions( fieldConditions []fieldFilterCond, matchers []valueMatcher, ) (*indexPrefixIterator, error) { keyFieldValues := make([]client.NormalValue, 0, len(fieldConditions)) for i := range fieldConditions { - if fieldConditions[i].op != opEq { + c := &fieldConditions[i] + if c.op != opEq || c.arrOp == compOpNone { // prefix can be created only for subsequent _eq conditions // if we encounter any other condition, we built the longest prefix we could break @@ -474,10 +572,10 @@ func (f *IndexFetcher) newPrefixIndexIterator( key := f.newIndexDataStoreKeyWithValues(keyFieldValues) - return f.newQueryResultIterator(key, matchers, &f.execInfo), nil + return f.newPrefixIterator(key, matchers, &f.execInfo), nil } -func (f *IndexFetcher) newQueryResultIterator( +func (f *IndexFetcher) newPrefixIterator( indexKey core.IndexDataStoreKey, matchers []valueMatcher, execInfo *ExecInfo, @@ -528,7 +626,7 @@ func (f *IndexFetcher) newInIndexIterator( indexKey := f.newIndexDataStoreKey() indexKey.Fields = []core.IndexedField{{Descending: f.indexDesc.Fields[0].Descending}} - iter = f.newQueryResultIterator(indexKey, matchers, &f.execInfo) + iter = f.newPrefixIterator(indexKey, matchers, &f.execInfo) } return &inIndexIterator{ indexIterator: iter, @@ -566,8 +664,19 @@ func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { return nil, err } - switch fieldConditions[0].op { - case opEq: + hasArray := false + for i := range fieldConditions { + if len(fieldConditions[i].arrOp) > 0 { + hasArray = true + if fieldConditions[i].arrOp == compOpNone { + matchers[i] = &invertedMatcher{matcher: matchers[i]} + } + } + } + + var iter indexIterator + + if fieldConditions[0].op == opEq { if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) { keyFieldValues := make([]client.NormalValue, len(fieldConditions)) for i := range fieldConditions { @@ -575,21 +684,29 @@ func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { } key := f.newIndexDataStoreKeyWithValues(keyFieldValues) - - return &eqSingleIndexIterator{ - indexKey: key, - execInfo: &f.execInfo, - }, nil + iter = &eqSingleIndexIterator{indexKey: key, execInfo: &f.execInfo} } else { - return f.newPrefixIndexIterator(fieldConditions, matchers) + iter, err = f.newPrefixIteratorFromConditions(fieldConditions, matchers) } - case opIn: - return f.newInIndexIterator(fieldConditions, matchers) - case opGt, opGe, opLt, opLe, opNe, opNin, opLike, opNlike, opILike, opNILike: - return f.newQueryResultIterator(f.newIndexDataStoreKey(), matchers, &f.execInfo), nil + } else if fieldConditions[0].op == opIn && fieldConditions[0].arrOp != compOpNone { + iter, err = f.newInIndexIterator(fieldConditions, matchers) + } else { + iter, err = f.newPrefixIterator(f.newIndexDataStoreKey(), matchers, &f.execInfo), nil + } + + if err != nil { + return nil, err + } + + if iter == nil { + return nil, NewErrInvalidFilterOperator(fieldConditions[0].op) + } + + if hasArray { + iter = &arrayIndexIterator{inner: iter} } - return nil, NewErrInvalidFilterOperator(fieldConditions[0].op) + return iter, nil } func createValueMatcher(condition *fieldFilterCond) (valueMatcher, error) { @@ -606,26 +723,17 @@ func createValueMatcher(condition *fieldFilterCond) (valueMatcher, error) { if v, ok := condition.val.Int(); ok { return &intMatcher{value: v, evalFunc: getCompareValsFunc[int64](condition.op)}, nil } - if v, ok := condition.val.NillableInt(); ok { - return &intMatcher{value: v.Value(), evalFunc: getCompareValsFunc[int64](condition.op)}, nil - } if v, ok := condition.val.Float(); ok { return &floatMatcher{value: v, evalFunc: getCompareValsFunc[float64](condition.op)}, nil } - if v, ok := condition.val.NillableFloat(); ok { - return &floatMatcher{value: v.Value(), evalFunc: getCompareValsFunc[float64](condition.op)}, nil - } if v, ok := condition.val.String(); ok { return &stringMatcher{value: v, evalFunc: getCompareValsFunc[string](condition.op)}, nil } - if v, ok := condition.val.NillableString(); ok { - return &stringMatcher{value: v.Value(), evalFunc: getCompareValsFunc[string](condition.op)}, nil - } if v, ok := condition.val.Time(); ok { return &timeMatcher{value: v, op: condition.op}, nil } - if v, ok := condition.val.NillableTime(); ok { - return &timeMatcher{value: v.Value(), op: condition.op}, nil + if v, ok := condition.val.Bool(); ok { + return &boolMatcher{value: v, isEq: condition.op == opEq}, nil } case opIn, opNin: inVals, err := client.ToArrayOfNormalValues(condition.val) @@ -665,9 +773,10 @@ func createValueMatchers(conditions []fieldFilterCond) ([]valueMatcher, error) { } type fieldFilterCond struct { - op string - val client.NormalValue - kind client.FieldKind + op string + arrOp string + val client.NormalValue + kind client.FieldKind } // determineFieldFilterConditions determines the conditions and their corresponding operation @@ -689,22 +798,36 @@ func (f *IndexFetcher) determineFieldFilterConditions() ([]fieldFilterCond, erro condMap := indexFilterCond.(map[connor.FilterKey]any) for key, filterVal := range condMap { - opKey := key.(*mapper.Operator) - var normalVal client.NormalValue + cond := fieldFilterCond{ + op: key.(*mapper.Operator).Operation, + kind: f.indexedFields[i].Kind, + } + var err error if filterVal == nil { - normalVal, err = client.NewNormalNil(f.indexedFields[i].Kind) + cond.val, err = client.NewNormalNil(cond.kind) + } else if !f.indexedFields[i].Kind.IsArray() { + cond.val, err = client.NewNormalValue(filterVal) } else { - normalVal, err = client.NewNormalValue(filterVal) + subCondMap := filterVal.(map[connor.FilterKey]any) + for subKey, subVal := range subCondMap { + arrKind := cond.kind.(client.ScalarArrayKind) + if subVal == nil { + cond.val, err = client.NewNormalNil(arrKind.SubKind()) + } else { + cond.val, err = client.NewNormalValue(subVal) + } + cond.arrOp = cond.op + cond.op = subKey.(*mapper.Operator).Operation + // the sub condition is supposed to have only 1 record + break + } } + if err != nil { return nil, err } - result = append(result, fieldFilterCond{ - op: opKey.Operation, - val: normalVal, - kind: f.indexedFields[i].Kind, - }) + result = append(result, cond) break } break diff --git a/internal/db/index.go b/internal/db/index.go index bd11e9f94b..c3860dca5a 100644 --- a/internal/db/index.go +++ b/internal/db/index.go @@ -12,13 +12,12 @@ package db import ( "context" - "time" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/internal/core" - "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" + "github.com/sourcenetwork/defradb/internal/utils/slice" ) // CollectionIndex is an interface for collection indexes @@ -30,55 +29,34 @@ type CollectionIndex interface { RemoveAll(context.Context, datastore.Txn) error } -func canConvertIndexFieldValue[T any](val any) bool { - _, ok := val.(T) - return ok -} - -func getValidateIndexFieldFunc(kind client.FieldKind) func(any) bool { +func isSupportedKind(kind client.FieldKind) bool { if kind.IsObject() && !kind.IsArray() { - return canConvertIndexFieldValue[string] + return true } switch kind { - case client.FieldKind_NILLABLE_STRING, client.FieldKind_DocID: - return canConvertIndexFieldValue[string] - case client.FieldKind_NILLABLE_INT: - return canConvertIndexFieldValue[int64] - case client.FieldKind_NILLABLE_FLOAT: - return canConvertIndexFieldValue[float64] - case client.FieldKind_NILLABLE_BOOL: - return canConvertIndexFieldValue[bool] - case client.FieldKind_NILLABLE_BLOB: - return func(val any) bool { - blobStrVal, ok := val.(string) - if !ok { - return false - } - return types.BlobPattern.MatchString(blobStrVal) - } - case client.FieldKind_NILLABLE_DATETIME: - return func(val any) bool { - timeStrVal, ok := val.(string) - if !ok { - return false - } - _, err := time.Parse(time.RFC3339, timeStrVal) - return err == nil - } + case + client.FieldKind_DocID, + client.FieldKind_STRING_ARRAY, + client.FieldKind_INT_ARRAY, + client.FieldKind_BOOL_ARRAY, + client.FieldKind_FLOAT_ARRAY, + client.FieldKind_NILLABLE_STRING, + client.FieldKind_NILLABLE_INT, + client.FieldKind_NILLABLE_FLOAT, + client.FieldKind_NILLABLE_BOOL, + client.FieldKind_NILLABLE_BLOB, + client.FieldKind_NILLABLE_DATETIME, + client.FieldKind_NILLABLE_BOOL_ARRAY, + client.FieldKind_NILLABLE_INT_ARRAY, + client.FieldKind_NILLABLE_FLOAT_ARRAY, + client.FieldKind_NILLABLE_STRING_ARRAY: + return true default: - return nil + return false } } -func getFieldValidateFunc(kind client.FieldKind) (func(any) bool, error) { - validateFunc := getValidateIndexFieldFunc(kind) - if validateFunc == nil { - return nil, NewErrUnsupportedIndexFieldType(kind) - } - return validateFunc, nil -} - // NewCollectionIndex creates a new collection index func NewCollectionIndex( collection client.Collection, @@ -88,21 +66,26 @@ func NewCollectionIndex( return nil, NewErrIndexDescHasNoFields(desc) } base := collectionBaseIndex{collection: collection, desc: desc} - base.validateFieldFuncs = make([]func(any) bool, len(desc.Fields)) base.fieldsDescs = make([]client.SchemaFieldDescription, len(desc.Fields)) + isArray := false for i := range desc.Fields { field, foundField := collection.Schema().GetFieldByName(desc.Fields[i].Name) if !foundField { return nil, client.NewErrFieldNotExist(desc.Fields[i].Name) } base.fieldsDescs[i] = field - validateFunc, err := getFieldValidateFunc(field.Kind) - if err != nil { - return nil, err + if !isSupportedKind(field.Kind) { + return nil, NewErrUnsupportedIndexFieldType(field.Kind) } - base.validateFieldFuncs[i] = validateFunc + isArray = isArray || field.Kind.IsArray() } - if desc.Unique { + if isArray { + if desc.Unique { + return newCollectionArrayUniqueIndex(base), nil + } else { + return newCollectionArrayIndex(base), nil + } + } else if desc.Unique { return &collectionUniqueIndex{collectionBaseIndex: base}, nil } else { return &collectionSimpleIndex{collectionBaseIndex: base}, nil @@ -110,10 +93,9 @@ func NewCollectionIndex( } type collectionBaseIndex struct { - collection client.Collection - desc client.IndexDescription - validateFieldFuncs []func(any) bool - fieldsDescs []client.SchemaFieldDescription + collection client.Collection + desc client.IndexDescription + fieldsDescs []client.SchemaFieldDescription } func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]client.NormalValue, error) { @@ -138,6 +120,7 @@ func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]cli func (index *collectionBaseIndex) getDocumentsIndexKey( doc *client.Document, + appendDocID bool, ) (core.IndexDataStoreKey, error) { fieldValues, err := index.getDocFieldValues(doc) if err != nil { @@ -149,6 +132,10 @@ func (index *collectionBaseIndex) getDocumentsIndexKey( fields[i].Value = fieldValues[i] fields[i].Descending = index.desc.Fields[i].Descending } + + if appendDocID { + fields = append(fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) + } return core.NewIndexDataStoreKey(index.collection.ID(), index.desc.ID, fields), nil } @@ -210,13 +197,8 @@ var _ CollectionIndex = (*collectionSimpleIndex)(nil) func (index *collectionSimpleIndex) getDocumentsIndexKey( doc *client.Document, ) (core.IndexDataStoreKey, error) { - key, err := index.collectionBaseIndex.getDocumentsIndexKey(doc) - if err != nil { - return core.IndexDataStoreKey{}, err - } - - key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) - return key, nil + // docID is appended, as it's part of the key for non-unique indexes + return index.collectionBaseIndex.getDocumentsIndexKey(doc, true) } // Save indexes a document by storing the indexed field value. @@ -303,19 +285,17 @@ func (index *collectionUniqueIndex) Save( txn datastore.Txn, doc *client.Document, ) error { - key, val, err := index.prepareIndexRecordToStore(ctx, txn, doc) + key, val, err := index.prepareUniqueIndexRecordToStore(ctx, txn, doc) if err != nil { return err } return index.save(ctx, txn, &key, val) } -func (index *collectionUniqueIndex) newUniqueIndexError( - doc *client.Document, -) error { - kvs := make([]errors.KV, 0, len(index.fieldsDescs)) - for iter := range index.fieldsDescs { - fieldVal, err := doc.TryGetValue(index.fieldsDescs[iter].Name) +func newUniqueIndexError(doc *client.Document, fieldsDescs []client.SchemaFieldDescription) error { + kvs := make([]errors.KV, 0, len(fieldsDescs)) + for iter := range fieldsDescs { + fieldVal, err := doc.TryGetValue(fieldsDescs[iter].Name) var val any if err != nil { return err @@ -324,19 +304,26 @@ func (index *collectionUniqueIndex) newUniqueIndexError( if fieldVal != nil { val = fieldVal.Value() } - kvs = append(kvs, errors.NewKV(index.fieldsDescs[iter].Name, val)) + kvs = append(kvs, errors.NewKV(fieldsDescs[iter].Name, val)) } return NewErrCanNotIndexNonUniqueFields(doc.ID().String(), kvs...) } -func (index *collectionUniqueIndex) getDocumentsIndexRecord( +func (index *collectionBaseIndex) getDocumentsUniqueIndexRecord( doc *client.Document, ) (core.IndexDataStoreKey, []byte, error) { - key, err := index.getDocumentsIndexKey(doc) + key, err := index.getDocumentsIndexKey(doc, false) if err != nil { return core.IndexDataStoreKey{}, nil, err } + return makeUniqueKeyValueRecord(key, doc) +} + +func makeUniqueKeyValueRecord( + key core.IndexDataStoreKey, + doc *client.Document, +) (core.IndexDataStoreKey, []byte, error) { if hasIndexKeyNilField(&key) { key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) return key, []byte{}, nil @@ -345,26 +332,36 @@ func (index *collectionUniqueIndex) getDocumentsIndexRecord( } } -func (index *collectionUniqueIndex) prepareIndexRecordToStore( +func (index *collectionUniqueIndex) prepareUniqueIndexRecordToStore( ctx context.Context, txn datastore.Txn, doc *client.Document, ) (core.IndexDataStoreKey, []byte, error) { - key, val, err := index.getDocumentsIndexRecord(doc) + key, val, err := index.getDocumentsUniqueIndexRecord(doc) if err != nil { return core.IndexDataStoreKey{}, nil, err } + return key, val, validateUniqueKeyValue(ctx, txn, key, val, doc, index.fieldsDescs) +} + +func validateUniqueKeyValue( + ctx context.Context, + txn datastore.Txn, + key core.IndexDataStoreKey, + val []byte, + doc *client.Document, + fieldsDescs []client.SchemaFieldDescription, +) error { if len(val) != 0 { - var exists bool - exists, err = txn.Datastore().Has(ctx, key.ToDS()) + exists, err := txn.Datastore().Has(ctx, key.ToDS()) if err != nil { - return core.IndexDataStoreKey{}, nil, err + return err } if exists { - return core.IndexDataStoreKey{}, nil, index.newUniqueIndexError(doc) + return newUniqueIndexError(doc, fieldsDescs) } } - return key, val, nil + return nil } func (index *collectionUniqueIndex) Delete( @@ -386,7 +383,7 @@ func (index *collectionUniqueIndex) Update( if !isUpdatingIndexedFields(index, oldDoc, newDoc) { return nil } - newKey, newVal, err := index.prepareIndexRecordToStore(ctx, txn, newDoc) + newKey, newVal, err := index.prepareUniqueIndexRecordToStore(ctx, txn, newDoc) if err != nil { return err } @@ -402,7 +399,7 @@ func (index *collectionUniqueIndex) deleteDocIndex( txn datastore.Txn, doc *client.Document, ) error { - key, _, err := index.getDocumentsIndexRecord(doc) + key, _, err := index.getDocumentsUniqueIndexRecord(doc) if err != nil { return err } @@ -430,3 +427,319 @@ func isUpdatingIndexedFields(index CollectionIndex, oldDoc, newDoc *client.Docum } return false } + +type collectionArrayBaseIndex struct { + collectionBaseIndex + arrFieldsIndexes []int +} + +func newCollectionArrayBaseIndex(base collectionBaseIndex) collectionArrayBaseIndex { + ind := collectionArrayBaseIndex{collectionBaseIndex: base} + for i := range base.fieldsDescs { + if base.fieldsDescs[i].Kind.IsArray() { + ind.arrFieldsIndexes = append(ind.arrFieldsIndexes, i) + } + } + if len(ind.arrFieldsIndexes) == 0 { + return collectionArrayBaseIndex{} + } + return ind +} + +// newIndexKeyGenerator creates a function that generates index keys for a document +// with multiple array fields. +// All generated keys are unique. +// For example for a doc with these values {{"a", "b", "a"}, {"c", "d", "e"}, {"f", "g"}} it generates: +// "acf", "acg", "adf", "adg", "aef", "aeg", "bcf", "bcg", "bdf", "bdg", "bef", "beg" +// Note: the example is simplified and doesn't include field separation +func (index *collectionArrayBaseIndex) newIndexKeyGenerator( + doc *client.Document, + appendDocID bool, +) (func() (core.IndexDataStoreKey, bool), error) { + key, err := index.getDocumentsIndexKey(doc, appendDocID) + if err != nil { + return nil, err + } + + // Collect unique values to use as source for generating keys + normValsArr := make([][]client.NormalValue, 0, len(index.arrFieldsIndexes)) + for _, arrFieldIndex := range index.arrFieldsIndexes { + arrVal := key.Fields[arrFieldIndex].Value + normVals, err := client.ToArrayOfNormalValues(arrVal) + if err != nil { + return nil, err + } + normValsArr = append(normValsArr, slice.RemoveDuplicates(normVals)) + } + + // arrFieldCounter keeps track of indexes into arrays of normal values + arrFieldCounter := make([]int, len(index.arrFieldsIndexes)) + done := false + + // This function generates the next key by iterating through all possible combinations. + // It works pretty much like a digital clock that first iterates through seconds, then minutes, etc. + return func() (core.IndexDataStoreKey, bool) { + if done { + return core.IndexDataStoreKey{}, false + } + + resultKey := core.IndexDataStoreKey{ + CollectionID: key.CollectionID, + IndexID: key.IndexID, + Fields: make([]core.IndexedField, len(key.Fields)), + } + copy(resultKey.Fields, key.Fields) + + // Use current indexes in arrFieldsIndexes to replace corresponding fields in the key + for i, counter := range arrFieldCounter { + field := &resultKey.Fields[index.arrFieldsIndexes[i]] + field.Value = normValsArr[i][counter] + } + + // iterate in reverse order so that we exhaust all combination for the last field first, + // {"f", "g"} in the example above. This way we guarantee that the order of generated keys + // is from left to right, "acf" -> "acg" -> "adf" -> "adg" -> ... + for i := len(arrFieldCounter) - 1; i >= 0; i-- { + arrFieldCounter[i]++ + if arrFieldCounter[i] < len(normValsArr[i]) { + break + } + // if we iterated through all combinations for the current field, reset the counter + // so that we do it again for the next field from the left side + arrFieldCounter[i] = 0 + // if the current side happens to be the leftmost one (the first), we are done + if i == 0 { + done = true + } + } + + return resultKey, true + }, nil +} + +func (index *collectionArrayBaseIndex) getAllKeys( + doc *client.Document, + appendDocID bool, +) ([]core.IndexDataStoreKey, error) { + getNextOldKey, err := index.newIndexKeyGenerator(doc, appendDocID) + if err != nil { + return nil, err + } + keys := make([]core.IndexDataStoreKey, 0) + for { + key, ok := getNextOldKey() + if !ok { + break + } + keys = append(keys, key) + } + return keys, nil +} + +func (index *collectionArrayBaseIndex) deleteRetiredKeysAndReturnNew( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, + appendDocID bool, +) ([]core.IndexDataStoreKey, error) { + prevKeys, err := index.getAllKeys(oldDoc, appendDocID) + if err != nil { + return nil, err + } + currentKeys, err := index.getAllKeys(newDoc, appendDocID) + if err != nil { + return nil, err + } + + for _, prevKey := range prevKeys { + keyEqual := func(key core.IndexDataStoreKey) bool { return prevKey.Equal(key) } + rem, removedVal := slice.RemoveFirstIf(currentKeys, keyEqual) + // If a previous keys is not among the current keys, it should be retired + if !removedVal.HasValue() { + err = index.deleteIndexKey(ctx, txn, prevKey) + if err != nil { + return nil, err + } + } + currentKeys = rem + } + + return currentKeys, nil +} + +type collectionArrayIndex struct { + collectionArrayBaseIndex +} + +var _ CollectionIndex = (*collectionArrayIndex)(nil) + +func newCollectionArrayIndex(base collectionBaseIndex) *collectionArrayIndex { + return &collectionArrayIndex{collectionArrayBaseIndex: newCollectionArrayBaseIndex(base)} +} + +// Save indexes a document by storing the indexed field value. +func (index *collectionArrayIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, true) + if err != nil { + return err + } + + for { + key, hasKey := getNextKey() + if !hasKey { + break + } + err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToString(), err) + } + } + return nil +} + +func (index *collectionArrayIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + newKeys, err := index.deleteRetiredKeysAndReturnNew(ctx, txn, oldDoc, newDoc, true) + if err != nil { + return err + } + + for _, key := range newKeys { + err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToString(), err) + } + } + + return nil +} + +func (index *collectionArrayIndex) Delete( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, true) + if err != nil { + return err + } + + for { + key, ok := getNextKey() + if !ok { + break + } + err = index.deleteIndexKey(ctx, txn, key) + if err != nil { + return err + } + } + return nil +} + +type collectionArrayUniqueIndex struct { + collectionArrayBaseIndex +} + +var _ CollectionIndex = (*collectionArrayUniqueIndex)(nil) + +func newCollectionArrayUniqueIndex(base collectionBaseIndex) *collectionArrayUniqueIndex { + return &collectionArrayUniqueIndex{collectionArrayBaseIndex: newCollectionArrayBaseIndex(base)} +} + +func (index *collectionArrayUniqueIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, false) + if err != nil { + return err + } + + for { + key, ok := getNextKey() + if !ok { + break + } + err := index.addNewUniqueKey(ctx, txn, doc, key) + if err != nil { + return err + } + } + return nil +} + +func (index *collectionArrayUniqueIndex) addNewUniqueKey( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, + key core.IndexDataStoreKey, +) error { + key, val, err := makeUniqueKeyValueRecord(key, doc) + if err != nil { + return err + } + err = validateUniqueKeyValue(ctx, txn, key, val, doc, index.fieldsDescs) + if err != nil { + return err + } + err = txn.Datastore().Put(ctx, key.ToDS(), val) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToString(), err) + } + return nil +} + +func (index *collectionArrayUniqueIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + newKeys, err := index.deleteRetiredKeysAndReturnNew(ctx, txn, oldDoc, newDoc, false) + if err != nil { + return err + } + + for _, key := range newKeys { + err := index.addNewUniqueKey(ctx, txn, newDoc, key) + if err != nil { + return err + } + } + + return nil +} + +func (index *collectionArrayUniqueIndex) Delete( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, false) + if err != nil { + return err + } + + for { + key, ok := getNextKey() + if !ok { + break + } + err = index.deleteIndexKey(ctx, txn, key) + if err != nil { + return err + } + } + return nil +} diff --git a/internal/db/index_test.go b/internal/db/index_test.go index 9226f92efd..779bcdff84 100644 --- a/internal/db/index_test.go +++ b/internal/db/index_test.go @@ -37,9 +37,11 @@ const ( usersColName = "Users" productsColName = "Products" - usersNameFieldName = "name" - usersAgeFieldName = "age" - usersWeightFieldName = "weight" + usersNameFieldName = "name" + usersAgeFieldName = "age" + usersWeightFieldName = "weight" + usersNumbersFieldName = "numbers" + usersHobbiesFieldName = "hobbies" productsIDFieldName = "id" productsPriceFieldName = "price" @@ -71,11 +73,15 @@ func (f *indexTestFixture) addUsersCollection() client.Collection { %s: String %s: Int %s: Float + %s: [Int!] + %s: [String!] }`, usersColName, usersNameFieldName, usersAgeFieldName, usersWeightFieldName, + usersNumbersFieldName, + usersHobbiesFieldName, ), ) require.NoError(f.t, err) @@ -186,6 +192,20 @@ func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescrip return newDesc } +func (f *indexTestFixture) createUserCollectionIndexOnNumbers() client.IndexDescription { + indexDesc := client.IndexDescription{ + Name: "users_numbers_index", + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + }, + } + + newDesc, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + return newDesc +} + func makeUnique(indexDesc client.IndexDescription) client.IndexDescription { indexDesc.Unique = true return indexDesc @@ -509,35 +529,6 @@ func TestCreateIndex_ShouldUpdateCollectionsDescription(t *testing.T) { f.users.Description().Indexes) } -func TestCreateIndex_IfAttemptToIndexOnUnsupportedType_ReturnError(t *testing.T) { - f := newIndexTestFixtureBare(t) - - const unsupportedKind = client.FieldKind_BOOL_ARRAY - - _, err := f.db.AddSchema( - f.ctx, - `type testTypeCol { - field: [Boolean!] - }`, - ) - require.NoError(f.t, err) - - collection, err := f.db.GetCollectionByName(f.ctx, "testTypeCol") - require.NoError(f.t, err) - - indexDesc := client.IndexDescription{ - Fields: []client.IndexedFieldDescription{ - {Name: "field"}, - }, - } - - f.txn, err = f.db.NewTxn(f.ctx, false) - require.NoError(f.t, err) - - _, err = f.createCollectionIndexFor(collection.Name().Value(), indexDesc) - require.ErrorIs(f.t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) -} - func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -876,43 +867,6 @@ func TestCollectionGetIndexes_IfFailsToCreateTxn_ShouldNotCache(t *testing.T) { assert.Equal(t, testUsersColIndexName, indexes[0].Name) } -func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *testing.T) { - f := newIndexTestFixtureBare(t) - - const unsupportedKind = client.FieldKind_BOOL_ARRAY - _, err := f.db.AddSchema( - f.ctx, - `type testTypeCol { - name: String - field: [Boolean!] - }`, - ) - require.NoError(f.t, err) - - collection, err := f.db.GetCollectionByName(f.ctx, "testTypeCol") - require.NoError(f.t, err) - - f.txn, err = f.db.NewTxn(f.ctx, false) - require.NoError(f.t, err) - - indexDesc := client.IndexDescription{ - Fields: []client.IndexedFieldDescription{ - {Name: "field"}, - }, - } - indexDescData, err := json.Marshal(indexDesc) - require.NoError(t, err) - - mockedTxn := f.mockTxn() - mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything).Unset() - mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything). - Return(mocks.NewQueryResultsWithValues(t, indexDescData), nil) - - ctx := SetContextTxn(f.ctx, mockedTxn) - _, err = collection.GetIndexes(ctx) - require.ErrorIs(t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) -} - func TestCollectionGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() diff --git a/internal/db/indexed_docs_test.go b/internal/db/indexed_docs_test.go index 9f4ea3fe72..4cd591a536 100644 --- a/internal/db/indexed_docs_test.go +++ b/internal/db/indexed_docs_test.go @@ -18,7 +18,6 @@ import ( ipfsDatastore "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" - "github.com/ipld/go-ipld-prime/storage/bsadapter" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -36,9 +35,11 @@ import ( ) type userDoc struct { - Name string `json:"name"` - Age int `json:"age"` - Weight float64 `json:"weight"` + Name string `json:"name"` + Age int `json:"age"` + Weight float64 `json:"weight"` + Numbers []int `json:"numbers"` + Hobbies []string `json:"hobbies"` } type productDoc struct { @@ -56,6 +57,15 @@ func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client. require.NoError(f.t, err) } +func (f *indexTestFixture) deleteDocFromCollection(docID client.DocID, col client.Collection) { + res, err := col.Delete(f.ctx, docID) + require.NoError(f.t, err) + require.True(f.t, res) + f.commitTxn() + f.txn, err = f.db.NewTxn(f.ctx, false) + require.NoError(f.t, err) +} + func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collection) *client.Document { d := userDoc{Name: name, Age: age, Weight: 154.1} data, err := json.Marshal(d) @@ -66,6 +76,15 @@ func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collectio return doc } +func (f *indexTestFixture) newCustomUserDoc(d userDoc, col client.Collection) *client.Document { + data, err := json.Marshal(d) + require.NoError(f.t, err) + + doc, err := client.NewDocFromJSON(data, col.Definition()) + require.NoError(f.t, err) + return doc +} + func (f *indexTestFixture) newProdDoc(id int, price float64, cat string, col client.Collection) *client.Document { d := productDoc{ID: id, Price: price, Category: cat} data, err := json.Marshal(d) @@ -86,10 +105,11 @@ type indexKeyBuilder struct { descendingFields []bool doc *client.Document isUnique bool + arrayFieldValues map[string]any } func newIndexKeyBuilder(f *indexTestFixture) *indexKeyBuilder { - return &indexKeyBuilder{f: f} + return &indexKeyBuilder{f: f, arrayFieldValues: make(map[string]any)} } func (b *indexKeyBuilder) Col(colName string) *indexKeyBuilder { @@ -105,6 +125,14 @@ func (b *indexKeyBuilder) Fields(fieldsNames ...string) *indexKeyBuilder { return b } +// ArrayFieldVal sets the value for the array field. +// The value should be of a single element of the array, as index indexes array fields by each element. +// If ArrayFieldVal is not set and index array field is present, it will take array first element as a value. +func (b *indexKeyBuilder) ArrayFieldVal(fieldName string, val any) *indexKeyBuilder { + b.arrayFieldValues[fieldName] = val + return b +} + // Fields sets the fields names for the index key. func (b *indexKeyBuilder) DescendingFields(descending ...bool) *indexKeyBuilder { b.descendingFields = descending @@ -120,6 +148,7 @@ func (b *indexKeyBuilder) Doc(doc *client.Document) *indexKeyBuilder { return b } +// Unique sets the index key to be unique. func (b *indexKeyBuilder) Unique() *indexKeyBuilder { b.isUnique = true return b @@ -170,12 +199,12 @@ indexLoop: hasNilValue := false for i, fieldName := range b.fieldsNames { fieldValue, err := b.doc.GetValue(fieldName) - var val client.NormalValue if err != nil { if !errors.Is(err, client.ErrFieldNotExist) { require.NoError(b.f.t, err) } } + var val client.NormalValue if fieldValue != nil { val = fieldValue.NormalValue() } else { @@ -190,6 +219,20 @@ indexLoop: } if val.IsNil() { hasNilValue = true + } else if val.IsArray() { + if arrVal, ok := b.arrayFieldValues[fieldName]; ok { + if normVal, ok := arrVal.(client.NormalValue); ok { + val = normVal + } else { + val, err = client.NewNormalValue(arrVal) + require.NoError(b.f.t, err, "given value is not a normal value") + } + } else { + arrVals, err := client.ToArrayOfNormalValues(val) + require.NoError(b.f.t, err) + require.Greater(b.f.t, len(arrVals), 0, "empty array can not be indexed") + val = arrVals[0] + } } descending := false if i < len(b.descendingFields) { @@ -290,6 +333,19 @@ func TestNonUnique_IfDocIsAdded_ShouldBeIndexed(t *testing.T) { assert.Len(t, data, 0) } +func TestNonUnique_IfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + f.createUserCollectionIndexOnName() + + doc := f.newUserDoc("John", 21, f.users) + f.saveDocToCollection(doc, f.users) + f.deleteDocFromCollection(doc.ID(), f.users) + + userNameKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Build() + assert.Len(t, f.getPrefixFromDataStore(userNameKey.ToString()), 0) +} + func TestNonUnique_IfDocWithDescendingOrderIsAdded_ShouldBeIndexed(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -309,29 +365,6 @@ func TestNonUnique_IfDocWithDescendingOrderIsAdded_ShouldBeIndexed(t *testing.T) assert.Len(t, data, 0) } -func TestNonUnique_IfFailsToStoreIndexedDoc_Error(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - f.createUserCollectionIndexOnName() - - doc := f.newUserDoc("John", 21, f.users) - key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build() - - mockTxn := f.mockTxn() - a := &mocks.DAGStore{} - mockTxn.MockDAGstore.EXPECT().AsIPLDStorage().Return(&bsadapter.Adapter{Wrapped: a}) - a.EXPECT().Put(mock.Anything, mock.Anything).Return(nil) - - dataStoreOn := mockTxn.MockDatastore.EXPECT() - dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Unset() - dataStoreOn.Put(mock.Anything, key.ToDS(), mock.Anything).Return(errors.New("error")) - dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Return(nil) - - ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, doc) - require.ErrorIs(f.t, err, NewErrFailedToStoreIndexedField("name", nil)) -} - func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -354,50 +387,6 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { assert.Len(t, prefixes, 0) } -func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - f.createUserCollectionIndexOnName() - - doc := f.newUserDoc("John", 21, f.users) - - mockTxn := f.mockTxn().ClearSystemStore() - a := &mocks.DAGStore{} - mockTxn.MockDAGstore.EXPECT().AsIPLDStorage().Return(&bsadapter.Adapter{Wrapped: a}) - a.EXPECT().Put(mock.Anything, mock.Anything).Return(nil) - - systemStoreOn := mockTxn.MockSystemstore.EXPECT() - systemStoreOn.Query(mock.Anything, mock.Anything). - Return(mocks.NewQueryResultsWithValues(t, []byte("invalid")), nil) - - ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, doc) - assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil)) -} - -func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - f.createUserCollectionIndexOnName() - - doc := f.newUserDoc("John", 21, f.users) - - testErr := errors.New("test error") - - mockTxn := f.mockTxn().ClearSystemStore() - a := &mocks.DAGStore{} - mockTxn.MockDAGstore.EXPECT().AsIPLDStorage().Return(&bsadapter.Adapter{Wrapped: a}) - a.EXPECT().Put(mock.Anything, mock.Anything).Return(nil) - - systemStoreOn := mockTxn.MockSystemstore.EXPECT() - systemStoreOn.Query(mock.Anything, mock.Anything). - Return(nil, testErr) - - ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, doc) - require.ErrorIs(t, err, testErr) -} - func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -1458,3 +1447,148 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { f.commitTxn() } } + +func TestArrayIndex_IfDocIsAdded_ShouldIndexAllArrayElements(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + f.createUserCollectionIndexOnNumbers() + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + for _, num := range numbersArray { + key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName). + ArrayFieldVal(usersNumbersFieldName, num).Doc(doc).Build() + + data, err := f.txn.Datastore().Get(f.ctx, key.ToDS()) + require.NoError(t, err) + assert.Len(t, data, 0) + } +} + +func TestArrayIndex_IfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + f.createUserCollectionIndexOnNumbers() + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName).Build() + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), len(numbersArray)) + + f.deleteDocFromCollection(doc.ID(), f.users) + + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), 0) +} + +func TestArrayIndex_IfDocIsDeletedButOneArrayElementHasNoIndexRecord_Error(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + f.createUserCollectionIndexOnNumbers() + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName). + ArrayFieldVal(usersNumbersFieldName, 2).Doc(doc).Build() + + err := f.txn.Datastore().Delete(f.ctx, userNumbersKey.ToDS()) + require.NoError(t, err) + f.commitTxn() + + res, err := f.users.Delete(f.ctx, doc.ID()) + require.Error(f.t, err) + require.False(f.t, res) +} + +func TestArrayIndex_With2ArrayFieldsIfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + indexDesc := client.IndexDescription{ + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + {Name: usersHobbiesFieldName}, + }, + } + + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + numbersArray := []int{1, 2} + hobbiesArray := []string{"reading", "swimming"} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray, Hobbies: hobbiesArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName). + Fields(usersNumbersFieldName, usersHobbiesFieldName).Build() + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), len(numbersArray)*len(hobbiesArray)) + + f.deleteDocFromCollection(doc.ID(), f.users) + + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), 0) +} + +func TestArrayIndex_With2ArrayFieldsIfDocIsDeletedButOneArrayElementHasNoIndexRecord_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + indexDesc := client.IndexDescription{ + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + {Name: usersHobbiesFieldName}, + }, + } + + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + numbersArray := []int{1, 2} + hobbiesArray := []string{"reading", "swimming"} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray, Hobbies: hobbiesArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName, usersHobbiesFieldName). + ArrayFieldVal(usersNumbersFieldName, 2).ArrayFieldVal(usersHobbiesFieldName, "swimming").Doc(doc).Build() + + err = f.txn.Datastore().Delete(f.ctx, userNumbersKey.ToDS()) + require.NoError(t, err) + f.commitTxn() + + res, err := f.users.Delete(f.ctx, doc.ID()) + require.Error(f.t, err) + require.False(f.t, res) +} + +func TestArrayIndex_WithUniqueIndexIfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + indexDesc := client.IndexDescription{ + Unique: true, + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + }, + } + + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName).Unique().Build() + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), len(numbersArray)) + + f.deleteDocFromCollection(doc.ID(), f.users) + + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), 0) +} diff --git a/internal/planner/filter/copy_field.go b/internal/planner/filter/copy_field.go index 9a524ecabb..838cdf4cf0 100644 --- a/internal/planner/filter/copy_field.go +++ b/internal/planner/filter/copy_field.go @@ -29,11 +29,9 @@ func CopyField(filter *mapper.Filter, fields ...mapper.Field) *mapper.Filter { }) } - resultFilter := &mapper.Filter{} conditionMap := traverseFilterByProperty(conditionKeys, filter.Conditions, false) if len(conditionMap) > 0 { - resultFilter.Conditions = conditionMap - return resultFilter + return &mapper.Filter{Conditions: conditionMap} } return nil } diff --git a/internal/planner/filter/merge.go b/internal/planner/filter/merge.go index d5644e807e..9afbbf4c47 100644 --- a/internal/planner/filter/merge.go +++ b/internal/planner/filter/merge.go @@ -15,9 +15,9 @@ import ( "github.com/sourcenetwork/defradb/internal/planner/mapper" ) -// Merge merges two filters into one. +// MergeConditions merges two sets of filter conditions into one. // It basically applies _and to both filters and normalizes them. -func Merge(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor.FilterKey]any { +func MergeConditions(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor.FilterKey]any { if len(c1) == 0 { return c2 } @@ -37,3 +37,18 @@ func Merge(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor. // more complex, that's why simplify if by normalizing it. return normalize(result) } + +// Merge merges two filters into one. +// It basically applies _and to both filters and normalizes them. +func Merge(f1 *mapper.Filter, f2 *mapper.Filter) *mapper.Filter { + if f1 == nil { + return f2 + } + if f2 == nil { + return f1 + } + + return &mapper.Filter{ + Conditions: MergeConditions(f1.Conditions, f2.Conditions), + } +} diff --git a/internal/planner/filter/merge_test.go b/internal/planner/filter/merge_test.go index 6ea663eba5..745642fe1f 100644 --- a/internal/planner/filter/merge_test.go +++ b/internal/planner/filter/merge_test.go @@ -59,7 +59,7 @@ func TestMergeFilterConditions(t *testing.T) { t.Run(tt.name, func(t *testing.T) { leftFilter := mapper.ToFilter(request.Filter{Conditions: tt.left}, mapping) rightFilter := mapper.ToFilter(request.Filter{Conditions: tt.right}, mapping) - actualFilter := Merge(leftFilter.Conditions, rightFilter.Conditions) + actualFilter := MergeConditions(leftFilter.Conditions, rightFilter.Conditions) expectedFilter := mapper.ToFilter(request.Filter{Conditions: tt.expected}, mapping) AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter) }) @@ -70,6 +70,6 @@ func TestMergeNullFilter(t *testing.T) { f := map[connor.FilterKey]any{ &mapper.PropertyIndex{Index: 0}: "value1", } - AssertEqualFilterMap(t, f, Merge(f, nil)) - AssertEqualFilterMap(t, f, Merge(nil, f)) + AssertEqualFilterMap(t, f, MergeConditions(f, nil)) + AssertEqualFilterMap(t, f, MergeConditions(nil, f)) } diff --git a/internal/planner/filter/split.go b/internal/planner/filter/split.go index 69aed9fc2e..db8381620e 100644 --- a/internal/planner/filter/split.go +++ b/internal/planner/filter/split.go @@ -41,7 +41,7 @@ func SplitByFields(filter *mapper.Filter, fields ...mapper.Field) (*mapper.Filte if newSplitF == nil { continue } - splitF.Conditions = Merge(splitF.Conditions, newSplitF.Conditions) + splitF.Conditions = MergeConditions(splitF.Conditions, newSplitF.Conditions) RemoveField(filter, field) } diff --git a/internal/planner/scan.go b/internal/planner/scan.go index 019cd1dee2..a5fe4a32e9 100644 --- a/internal/planner/scan.go +++ b/internal/planner/scan.go @@ -164,14 +164,26 @@ func (scan *scanNode) initFetcher( f = new(fetcher.DocumentFetcher) if index.HasValue() { - fields := make([]mapper.Field, 0, len(index.Value().Fields)) + fieldsToMove := make([]mapper.Field, 0, len(index.Value().Fields)) + fieldsToCopy := make([]mapper.Field, 0, len(index.Value().Fields)) for _, field := range index.Value().Fields { fieldName := field.Name typeIndex := scan.documentMapping.FirstIndexOfName(fieldName) - fields = append(fields, mapper.Field{Index: typeIndex, Name: fieldName}) + indexField := mapper.Field{Index: typeIndex, Name: fieldName} + fd, _ := scan.col.Definition().Schema.GetFieldByName(fieldName) + // if the field is an array, we need to copy it instead of moving so that the + // top select node can do final filter check on the whole array of the document + if fd.Kind.IsArray() { + fieldsToCopy = append(fieldsToCopy, indexField) + } else { + fieldsToMove = append(fieldsToMove, indexField) + } } var indexFilter *mapper.Filter - scan.filter, indexFilter = filter.SplitByFields(scan.filter, fields...) + scan.filter, indexFilter = filter.SplitByFields(scan.filter, fieldsToMove...) + for i := range fieldsToCopy { + indexFilter = filter.Merge(indexFilter, filter.CopyField(scan.filter, fieldsToCopy[i])) + } if indexFilter != nil { f = fetcher.NewIndexFetcher(f, index.Value(), indexFilter) } diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go index fc5eb9bbaf..a6d726b801 100644 --- a/internal/planner/type_join.go +++ b/internal/planner/type_join.go @@ -277,8 +277,7 @@ func prepareScanNodeFilterForTypeJoin( parent.filter = mapper.NewFilter() parent.filter.Conditions = filter.Copy(scan.filter.Conditions) } else { - parent.filter.Conditions = filter.Merge( - parent.filter.Conditions, scan.filter.Conditions) + parent.filter = filter.Merge(parent.filter, scan.filter) } scan.filter = nil } else { @@ -288,8 +287,7 @@ func prepareScanNodeFilterForTypeJoin( if parent.filter == nil { parent.filter = parentFilter } else { - parent.filter.Conditions = filter.Merge( - parent.filter.Conditions, parentFilter.Conditions) + parent.filter = filter.Merge(parent.filter, parentFilter) } } } @@ -799,7 +797,7 @@ func addFilterOnIDField(scan *scanNode, propIndex int, val any) { } filter.RemoveField(scan.filter, mapper.Field{Index: propIndex}) - scan.filter.Conditions = filter.Merge(scan.filter.Conditions, filterConditions) + scan.filter.Conditions = filter.MergeConditions(scan.filter.Conditions, filterConditions) } func getScanNode(plan planNode) *scanNode { diff --git a/internal/utils/slice/slice.go b/internal/utils/slice/slice.go new file mode 100644 index 0000000000..2a2463d6ec --- /dev/null +++ b/internal/utils/slice/slice.go @@ -0,0 +1,50 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package slice + +import "github.com/sourcenetwork/immutable" + +// RemoveDuplicates removes duplicates from a slice of elements. +// Relative order of the elements is not preserved. +// Both runtime and space complexity are O(n). +func RemoveDuplicates[S ~[]E, E comparable](s S) S { + sets := make(map[E]struct{}) + for i := len(s) - 1; i >= 0; i-- { + if _, ok := sets[s[i]]; ok { + swapLast(s, i) + s = s[:len(s)-1] + } else { + sets[s[i]] = struct{}{} + } + } + return s +} + +// RemoveFirstIf removes the first element that satisfies the predicate. +// Relative order of the elements is not preserved, as the last element is swapped with the removed one. +func RemoveFirstIf[S ~[]E, E any](s S, predicate func(E) bool) (S, immutable.Option[E]) { + for i := 0; i < len(s); i++ { + if predicate(s[i]) { + swapLast(s, i) + lastInd := len(s) - 1 + return s[:lastInd], immutable.Some(s[lastInd]) + } + } + return s, immutable.None[E]() +} + +func swap[T any](elements []T, i, j int) { + elements[i], elements[j] = elements[j], elements[i] +} + +func swapLast[T any](elements []T, i int) { + swap(elements, i, len(elements)-1) +} diff --git a/internal/utils/slice/slice_test.go b/internal/utils/slice/slice_test.go new file mode 100644 index 0000000000..07d6d88dc8 --- /dev/null +++ b/internal/utils/slice/slice_test.go @@ -0,0 +1,100 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package slice_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/internal/utils/slice" +) + +func TestRemoveFirstIf(t *testing.T) { + tests := []struct { + name string + input []int + predicate func(int) bool + expected []int + found immutable.Option[int] + }{ + { + name: "remove in the middle", + input: []int{1, 3, 4, 5, 6}, + predicate: func(n int) bool { return n%2 == 0 }, + expected: []int{1, 3, 6, 5}, + found: immutable.Some(4), + }, + { + name: "nothing removed", + input: []int{1, 3, 4, 5, 6}, + predicate: func(n int) bool { return n > 10 }, + expected: []int{1, 3, 4, 5, 6}, + found: immutable.None[int](), + }, + { + name: "empty slice", + input: []int{}, + predicate: func(n int) bool { return n == 5 }, + expected: []int{}, + found: immutable.None[int](), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, found := slice.RemoveFirstIf(tt.input, tt.predicate) + assert.Equal(t, tt.expected, result, "expected %v, got %v", tt.expected, result) + assert.Equal(t, tt.found, found, "expected found %v, got %v", tt.found, found) + }) + } +} + +func TestRemoveDuplicates(t *testing.T) { + tests := []struct { + name string + input []int + expected []int + }{ + { + name: "no duplicates", + input: []int{1, 2, 3, 4, 5}, + expected: []int{1, 2, 3, 4, 5}, + }, + { + name: "all duplicates", + input: []int{1, 1, 1, 1, 1}, + expected: []int{1}, + }, + { + name: "some duplicates", + input: []int{1, 2, 4, 2, 3, 4, 4, 5}, + expected: []int{1, 2, 3, 4, 5}, + }, + { + name: "empty slice", + input: []int{}, + expected: []int{}, + }, + { + name: "single element", + input: []int{1}, + expected: []int{1}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := slice.RemoveDuplicates(tt.input) + assert.ElementsMatch(t, tt.expected, result, "expected %v, got %v", tt.expected, result) + }) + } +} diff --git a/tests/integration/index/array_composite_test.go b/tests/integration/index/array_composite_test.go new file mode 100644 index 0000000000..94ccc9d659 --- /dev/null +++ b/tests/integration/index/array_composite_test.go @@ -0,0 +1,597 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingAny_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_any: {_eq: 30}}, age: {_eq: 30}}) { + _docID + numbers + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + name: String + numbers: [Int!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50, 30], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [1, 2, 3], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40, 50, 30}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingAll_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_all: {_gt: 1}}, age: {_eq: 30}}) { + _docID + numbers + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + name: String + numbers: [Int!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [1, 2], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // all "Shahzad" users have in total 5 numbers + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(5), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingNone_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_none: {_eq: 3}}, age: {_eq: 30}}) { + _docID + numbers + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + name: String + numbers: [Int!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 3], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // all "Shahzad" users have in total 5 numbers + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(5), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_With2ConsecutiveArrayFields_Succeed(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_any: {_eq: 30}}, hobbies: {_any: {_eq: "sports"}} age: {_eq: 30}}) { + _docID + numbers + hobbies + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}, {name: "age"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "hobbies": ["books", "movies"], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 3], + "hobbies": ["sports", "movies", "books"], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + "hobbies": []string{"sports", "books"}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // all "Shahzad" users have in total 5 numbers + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_With2SeparateArrayFields_Succeed(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_any: {_eq: 30}}, hobbies: {_any: {_eq: "sports"}} age: {_eq: 30}}) { + _docID + numbers + hobbies + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "numbers"}, {name: "name"}, {name: "age"}, {name: "hobbies"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "hobbies": ["books", "movies"], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 3], + "hobbies": ["sports", "movies", "books"], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + "hobbies": []string{"sports", "books"}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_WithAnyNoneAll_Succeed(t *testing.T) { + req := `query { + User(filter: { + numbers1: {_all: {_gt: 0}}, + numbers2: {_none: {_eq: 40}}, + numbers3: {_any: {_le: 200}} + }) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "numbers1"}, {name: "numbers2"}, {name: "numbers3"}]) { + name: String + numbers1: [Int!] + numbers2: [Int!] + numbers3: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers1": [1, 2, 3], + "numbers2": [10, 20, 30], + "numbers3": [100, 200, 300] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers1": [2, 3, 4], + "numbers2": [20, 30, 40], + "numbers3": [200, 300, 400] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Keenan", + "numbers1": [0, 1], + "numbers2": [90], + "numbers3": [900] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "numbers1": [6, 7, 8], + "numbers2": [10, 70, 80], + "numbers3": [100, 700, 800] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "numbers1": [1, 4, 5, 8], + "numbers2": [60, 80], + "numbers3": [600, 800] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Islam"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndexUpdate_With2ArrayFields_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20, 40], + "hobbies": ["sports", "books"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 30], + "hobbies": ["sports", "books"] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50, 50], + "hobbies": ["books", "movies", "books", "movies"] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 40}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 50}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "Shahzad"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "sports"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "books"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "movies"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "Shahzad"}}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndexDelete_With2ConsecutiveArrayFields_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "hobbies": ["sports", "books"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 30, 50], + "hobbies": ["sports", "books", "sports", "movies"] + }`, + }, + testUtils.DeleteDoc{DocID: 1}, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "sports"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/array_test.go b/tests/integration/index/array_test.go new file mode 100644 index 0000000000..096ecb87e1 --- /dev/null +++ b/tests/integration/index/array_test.go @@ -0,0 +1,1103 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayIndex_WithFilterOnIndexedArrayUsingAny_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithFilterOnIndexedArrayUsingAll_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_all: {_ge: 33}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Andy", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(9), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithFilterOnIndexedArrayUsingNone_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_none: {_ge: 33}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(9), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexUpdate_IfUpdateRearrangesArrayElements_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [50, 30, 40] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexUpdate_IfUpdateRemovesSoughtElement_ShouldNotFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [50, 40] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(0), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexUpdate_IfUpdateAddsSoughtElement_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [80, 30, 60] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexDelete_IfUpdateRemovesSoughtElement_ShouldNotFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_gt: 0}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50] + }`, + }, + testUtils.DeleteDoc{DocID: 0}, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_Bool_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {booleans: {_any: {_eq: true}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + booleans: [Boolean!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "booleans": [true, false, true] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "booleans": [false, false] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalBool_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {booleans: {_any: {_eq: true}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + booleans: [Boolean] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "booleans": [true, false, true] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "booleans": [false, false] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalInt_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 3}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [4, 3, 7] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_Float_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {rates: {_any: {_eq: 1.25}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + rates: [Float!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "rates": [0.5, 1.0, 1.25] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "rates": [1.5, 1.2] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalFloat_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {rates: {_any: {_eq: 1.25}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + rates: [Float] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "rates": [0.5, 1.0, 1.25] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "rates": [1.5, 1.2] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalString_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {hobbies: {_any: {_eq: "books"}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + hobbies: [String] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "hobbies": ["games", "books", "music"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "hobbies": ["movies", "music"] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAnyAndInOperator_Succeed(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_in: [3, 4, 5]}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [1, 4, 7] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAllAndInOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_in: [3, 4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNoneAndInOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_in: [4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNoneAndNinOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_nin: [3, 4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAllAndNinOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_nin: [4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAnyAndNinOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_nin: [3, 4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNilElementsAndAnyOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 2}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNilElementsAndAllOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "numbers": [null, null] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Islam"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNilElementsAndNoneOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/array_unique_composite_test.go b/tests/integration/index/array_unique_composite_test.go new file mode 100644 index 0000000000..ec1b10ee0f --- /dev/null +++ b/tests/integration/index/array_unique_composite_test.go @@ -0,0 +1,204 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/internal/db" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayUniqueCompositeIndex_WithUniqueCombinations_Succeed(t *testing.T) { + req := `query { + User(filter: {nfts1: {_any: {_eq: 2}}, nfts2: {_any: {_eq: 3}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int!] + nfts2: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, 2], + "nfts2": [1, 3] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, 2], + "nfts2": [2, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Keenan", + "nfts1": [3, 4], + "nfts2": [1, 3] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueCompositeIndex_IfDocIsCreatedThatViolatesUniqueness_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int!] + nfts2: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, 2], + "nfts2": [1, 3] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, 2], + "nfts2": [2, 4, 3] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-02823b81-729a-5cb8-88cb-6df2e15232b1", + errors.NewKV("nfts1", []int64{1, 2}), errors.NewKV("nfts2", []int64{2, 4, 3})).Error(), + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [5, 6, 2], + "nfts2": [1, 3] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-ccb3fd51-caf9-5b34-b2d2-e4ad020409e1", + errors.NewKV("nfts1", []int64{5, 6, 2}), errors.NewKV("nfts2", []int64{1, 3})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueCompositeIndex_IfDocIsUpdatedThatViolatesUniqueness_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int!] + nfts2: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, 2], + "nfts2": [1, 3] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, 2], + "nfts2": [2, 4, 5, 6] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "nfts1": [1], + "nfts2": [2, 5, 3] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-f6b3ab5a-dfa4-53fd-a320-a3e203a9e6f5", + errors.NewKV("nfts1", []int64{1}), errors.NewKV("nfts2", []int64{2, 5, 3})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueCompositeIndex_IfDocsHaveNilValues_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int] + nfts2: [Int] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, null], + "nfts2": [null, 1, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, null, 2], + "nfts2": [2, 4, null, 5, 6, null] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {nfts1: {_any: {_eq: null}}, nfts2: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/array_unique_test.go b/tests/integration/index/array_unique_test.go new file mode 100644 index 0000000000..4627595ad9 --- /dev/null +++ b/tests/integration/index/array_unique_test.go @@ -0,0 +1,395 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/internal/db" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayUniqueIndex_UponDocCreationWithArrayElementThatExists_Error(t *testing.T) { + req := `query { + User(filter: {nfts: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDocCreationWithUniqueElements_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "nfts": [50, 30] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-a4045a20-b9e6-5b19-82d5-5e54176895a8", + errors.NewKV("nfts", []int64{50, 30})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDocUpdateWithUniqueElements_Succeed(t *testing.T) { + req := `query { + User(filter: {nfts: {_any: {_eq: 60}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "nfts": [10, 60] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDocUpdateWithArrayElementThatExists_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "nfts": [50, 30] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-d065234c-4bf5-5cb8-8068-6f1fda8ed661", + errors.NewKV("nfts", []int64{50, 30})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDeletingDoc_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.DeleteDoc{ + DocID: 1, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_WithNilElementsAndAnyOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 2}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_WithNilElementsAndAllOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "numbers": [null, null] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Islam"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_WithNilElementsAndNoneOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +}