From 61b9c57c8212856dbe533f0eb5100121225b9321 Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 12:19:37 +0100 Subject: [PATCH 01/10] GO-4487: Fix space offload: do not offload files that are existing in other spaces --- core/files/fileoffloader/offloader.go | 9 ++++- core/files/fileoffloader/offloader_test.go | 45 ++++++++++++++++++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/core/files/fileoffloader/offloader.go b/core/files/fileoffloader/offloader.go index deb724ba13..b32d595291 100644 --- a/core/files/fileoffloader/offloader.go +++ b/core/files/fileoffloader/offloader.go @@ -192,20 +192,25 @@ func (s *service) offloadFileSafe(ctx context.Context, record database.Record, includeNotPinned bool, ) (uint64, error) { - existingObjects, err := s.objectStore.SpaceIndex(spaceId).Query(database.Query{ + existingObjects, err := s.objectStore.QueryCrossSpace(database.Query{ Filters: []*model.BlockContentDataviewFilter{ { RelationKey: bundle.RelationKeyFileId.String(), Condition: model.BlockContentDataviewFilter_Equal, Value: pbtypes.String(fileId), }, + { + RelationKey: bundle.RelationKeySpaceId.String(), + Condition: model.BlockContentDataviewFilter_NotEqual, + Value: pbtypes.String(spaceId), + }, }, }) if err != nil { return 0, err } if len(existingObjects) > 0 { - return s.fileOffload(ctx, record.Details, false) + return 0, nil } return s.fileOffload(ctx, record.Details, includeNotPinned) } diff --git a/core/files/fileoffloader/offloader_test.go b/core/files/fileoffloader/offloader_test.go index acbe22d031..88d1785732 100644 --- a/core/files/fileoffloader/offloader_test.go +++ b/core/files/fileoffloader/offloader_test.go @@ -9,6 +9,7 @@ import ( "github.com/anyproto/any-sync/app" "github.com/anyproto/any-sync/commonfile/fileservice" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/anyproto/anytype-heart/core/block/object/idresolver/mock_idresolver" @@ -93,6 +94,50 @@ func TestOffloadAllFiles(t *testing.T) { require.NoError(t, err) } +func TestSpaceOffload(t *testing.T) { + fx := newFixture(t) + + ctx := context.Background() + fileNode1, err := fx.commonFile.AddFile(ctx, generateTestFileData(t, 2*1024*1024)) + require.NoError(t, err) + + fileNode2, err := fx.commonFile.AddFile(ctx, generateTestFileData(t, 2*1024*1024)) + require.NoError(t, err) + + fx.objectStore.AddObjects(t, "space1", []objectstore.TestObject{ + { + bundle.RelationKeyId: pbtypes.String("fileObjectId1"), + bundle.RelationKeySpaceId: pbtypes.String("space1"), + bundle.RelationKeyFileId: pbtypes.String(fileNode1.Cid().String()), + bundle.RelationKeyFileBackupStatus: pbtypes.Int64(int64(filesyncstatus.Synced)), + }, + { + bundle.RelationKeyId: pbtypes.String("fileObjectId2"), + bundle.RelationKeySpaceId: pbtypes.String("space1"), + bundle.RelationKeyFileId: pbtypes.String(fileNode2.Cid().String()), + bundle.RelationKeyFileBackupStatus: pbtypes.Int64(int64(filesyncstatus.Synced)), + }, + }) + fx.objectStore.AddObjects(t, "space2", []objectstore.TestObject{ + { + bundle.RelationKeyId: pbtypes.String("fileObjectId3"), + bundle.RelationKeySpaceId: pbtypes.String("space2"), + bundle.RelationKeyFileId: pbtypes.String(fileNode2.Cid().String()), + bundle.RelationKeyFileBackupStatus: pbtypes.Int64(int64(filesyncstatus.Synced)), + }, + }) + + offloaded, _, err := fx.FileSpaceOffload(ctx, "space1", false) + require.NoError(t, err) + assert.True(t, 1 == offloaded) + + _, err = fx.commonFile.GetFile(ctx, fileNode1.Cid()) + require.Error(t, err) + + _, err = fx.commonFile.GetFile(ctx, fileNode2.Cid()) + require.NoError(t, err) +} + func generateTestFileData(t *testing.T, size int) io.Reader { buf := make([]byte, size) _, err := rand.Read(buf) From 83d3eedcb0fce80aa551140e93f3b7943fc14baf Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 16:37:53 +0100 Subject: [PATCH 02/10] GO-4486: Store file index inside object --- core/block/editor/files.go | 44 +++++++ core/block/export/export.go | 4 +- core/block/files.go | 2 +- core/files/file.go | 27 +++- core/files/fileobject/fileindex.go | 4 +- core/files/files.go | 175 ++++++++++++++++---------- core/files/images.go | 34 +++-- core/indexer/reindex.go | 9 -- pkg/lib/bundle/relation.gen.go | 87 ++++++++++++- pkg/lib/bundle/relations.json | 54 ++++++++ pkg/lib/gateway/gateway.go | 4 +- pkg/lib/localstore/filestore/debug.go | 5 +- pkg/lib/localstore/filestore/files.go | 51 -------- pkg/lib/pb/storage/file.pb.go | 140 +++++++++++++++------ pkg/lib/pb/storage/protos/file.proto | 21 ++-- util/svg/rasterizer_no.go | 3 +- 16 files changed, 464 insertions(+), 200 deletions(-) diff --git a/core/block/editor/files.go b/core/block/editor/files.go index 7068a5ad05..028e3f92bf 100644 --- a/core/block/editor/files.go +++ b/core/block/editor/files.go @@ -11,11 +11,14 @@ import ( "github.com/anyproto/anytype-heart/core/block/migration" "github.com/anyproto/anytype-heart/core/block/source" "github.com/anyproto/anytype-heart/core/domain" + "github.com/anyproto/anytype-heart/core/files" "github.com/anyproto/anytype-heart/core/files/fileobject" "github.com/anyproto/anytype-heart/core/files/reconciler" "github.com/anyproto/anytype-heart/core/filestorage" "github.com/anyproto/anytype-heart/pkg/lib/bundle" coresb "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock" + "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" + "github.com/anyproto/anytype-heart/util/pbtypes" ) // required relations for files beside the bundle.RequiredInternalRelations @@ -34,6 +37,7 @@ func (f *ObjectFactory) newFile(spaceId string, sb smartblock.SmartBlock) *File Text: stext.NewText(sb, store, f.eventSender), fileObjectService: f.fileObjectService, reconciler: f.fileReconciler, + fileService: f.fileService, } } @@ -44,6 +48,7 @@ type File struct { stext.Text fileObjectService fileobject.Service reconciler reconciler.Reconciler + fileService files.Service } func (f *File) CreationStateMigration(ctx *smartblock.InitContext) migration.Migration { @@ -96,5 +101,44 @@ func (f *File) Init(ctx *smartblock.InitContext) error { return f.fileObjectService.EnsureFileAddedToSyncQueue(fullId, applyInfo.State.Details()) }, smartblock.HookOnStateRebuild) } + + infos, err := f.fileService.IndexFile(ctx.Ctx, ctx.State.Details()) + if err != nil { + return fmt.Errorf("get infos for indexing: %w", err) + } + if len(infos) > 0 { + fileInfosToDetails(infos, ctx.State) + } + + return nil +} + +func fileInfosToDetails(infos []*storage.FileInfo, st *state.State) error { + if len(infos) == 0 { + return fmt.Errorf("empty info list") + } + var ( + variantIds []string + keys []string // fill in smartblock? + widths []int + checksums []string + mills []string + ) + + keysInfo := st.GetFileInfo().EncryptionKeys + + st.SetDetailAndBundledRelation(bundle.RelationKeyFileSourceChecksum, pbtypes.String(infos[0].Source)) + for _, info := range infos { + variantIds = append(variantIds, info.Hash) + checksums = append(checksums, info.Checksum) + mills = append(mills, info.Mill) + widths = append(widths, int(pbtypes.GetInt64(info.Meta, "width"))) + keys = append(keys, keysInfo[info.Path]) + } + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantIds, pbtypes.StringList(variantIds)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantChecksums, pbtypes.StringList(checksums)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantMills, pbtypes.StringList(mills)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantWidths, pbtypes.IntList(widths...)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantKeys, pbtypes.StringList(keys)) return nil } diff --git a/core/block/export/export.go b/core/block/export/export.go index 169b7c3bbd..b2ee899ae4 100644 --- a/core/block/export/export.go +++ b/core/block/export/export.go @@ -960,7 +960,7 @@ func (e *exportContext) saveFile(ctx context.Context, wr writer, fileObject sb.S if err != nil { return "", err } - if strings.HasPrefix(file.Info().Media, "image") { + if strings.HasPrefix(file.Media(), "image") { image, err := e.fileService.ImageByHash(context.TODO(), fullId) if err != nil { return "", err @@ -980,7 +980,7 @@ func (e *exportContext) saveFile(ctx context.Context, wr writer, fileObject sb.S if err != nil { return "", err } - return fileName, wr.WriteFile(fileName, rd, file.Info().LastModifiedDate) + return fileName, wr.WriteFile(fileName, rd, file.LastModifiedDate()) } func (e *exportContext) createProfileFile(spaceID string, wr writer) error { diff --git a/core/block/files.go b/core/block/files.go index e4ea28e706..7d6dbf2106 100644 --- a/core/block/files.go +++ b/core/block/files.go @@ -67,7 +67,7 @@ func (s *Service) DownloadFile(ctx context.Context, req *pb.RpcFileDownloadReque countReader = datacounter.NewReaderCounter(r) fileName := f.Meta().Name if fileName == "" { - fileName = f.Info().Name + fileName = f.Name() } path, err := files.WriteReaderIntoFileReuseSameExistingFile(req.Path+string(os.PathSeparator)+fileName, countReader) diff --git a/core/files/file.go b/core/files/file.go index a0eef4ce32..f33b18d349 100644 --- a/core/files/file.go +++ b/core/files/file.go @@ -19,11 +19,14 @@ import ( ) type File interface { - Meta() *FileMeta + Meta() *FileMeta // could be taken from Details FileId() domain.FileId - Reader(ctx context.Context) (io.ReadSeeker, error) + Reader(ctx context.Context) (io.ReadSeeker, error) // getNode(details.FileVariants[idx]) Details(ctx context.Context) (*types.Struct, domain.TypeKey, error) - Info() *storage.FileInfo + Name() string + Media() string + LastModifiedDate() int64 + Mill() string } var _ File = (*file)(nil) @@ -119,8 +122,20 @@ func (f *file) Details(ctx context.Context) (*types.Struct, domain.TypeKey, erro return t, typeKey, nil } -func (f *file) Info() *storage.FileInfo { - return f.info +func (f *file) Name() string { + return f.info.Name +} + +func (f *file) Media() string { + return f.info.Media +} + +func (f *file) LastModifiedDate() int64 { + return f.info.LastModifiedDate +} + +func (f *file) Mill() string { + return f.info.Mill } func (f *file) Meta() *FileMeta { @@ -138,7 +153,7 @@ func (f *file) FileId() domain.FileId { } func (f *file) Reader(ctx context.Context) (io.ReadSeeker, error) { - return f.node.getContentReader(ctx, f.spaceID, f.info) + return f.node.getContentReader(ctx, f.spaceID, f.info.Hash, f.info.Key) } func calculateCommonDetails( diff --git a/core/files/fileobject/fileindex.go b/core/files/fileobject/fileindex.go index c6c280c812..22f64dac93 100644 --- a/core/files/fileobject/fileindex.go +++ b/core/files/fileobject/fileindex.go @@ -265,7 +265,7 @@ func (ind *indexer) buildDetails(ctx context.Context, id domain.FullFileId) (det return nil, "", err } - if file.Info().Mill == mill.BlobId { + if file.Mill() == mill.BlobId { details, typeKey, err = file.Details(ctx) if err != nil { return nil, "", err @@ -284,7 +284,7 @@ func (ind *indexer) buildDetails(ctx context.Context, id domain.FullFileId) (det // Overwrite typeKey for images in case that image is uploaded as file. // That can be possible because some images can't be handled properly and wee fall back to // handling them as files - if mill.IsImage(file.Info().Media) { + if mill.IsImage(file.Media()) { typeKey = bundle.TypeKeyImage } diff --git a/core/files/files.go b/core/files/files.go index d13662b944..459abd8af1 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -15,6 +15,7 @@ import ( "github.com/anyproto/any-sync/app" "github.com/anyproto/any-sync/commonfile/fileservice" "github.com/gogo/protobuf/proto" + "github.com/gogo/protobuf/types" uio "github.com/ipfs/boxo/ipld/unixfs/io" "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" @@ -26,14 +27,17 @@ import ( "github.com/anyproto/anytype-heart/core/filestorage" "github.com/anyproto/anytype-heart/core/filestorage/filesync" "github.com/anyproto/anytype-heart/pb" + "github.com/anyproto/anytype-heart/pkg/lib/bundle" "github.com/anyproto/anytype-heart/pkg/lib/crypto/symmetric" "github.com/anyproto/anytype-heart/pkg/lib/crypto/symmetric/cfb" - "github.com/anyproto/anytype-heart/pkg/lib/crypto/symmetric/gcm" + "github.com/anyproto/anytype-heart/pkg/lib/database" "github.com/anyproto/anytype-heart/pkg/lib/ipfs/helpers" "github.com/anyproto/anytype-heart/pkg/lib/localstore/filestore" + "github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore" "github.com/anyproto/anytype-heart/pkg/lib/logging" m "github.com/anyproto/anytype-heart/pkg/lib/mill" "github.com/anyproto/anytype-heart/pkg/lib/mill/schema" + "github.com/anyproto/anytype-heart/pkg/lib/pb/model" "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" "github.com/anyproto/anytype-heart/util/pbtypes" ) @@ -47,14 +51,20 @@ var log = logging.Logger("anytype-files") var _ Service = (*service)(nil) type Service interface { + // only in uploader FileAdd(ctx context.Context, spaceID string, options ...AddOption) (*AddResult, error) + // buildDetails (fileindex.go), gateway, export, DownloadFile FileByHash(ctx context.Context, id domain.FullFileId) (File, error) FileGetKeys(id domain.FileId) (*domain.FileEncryptionKeys, error) GetSpaceUsage(ctx context.Context, spaceID string) (*pb.RpcFileSpaceUsageResponseUsage, error) GetNodeUsage(ctx context.Context) (*NodeUsageResponse, error) + // only in uploader ImageAdd(ctx context.Context, spaceID string, options ...AddOption) (*AddResult, error) + // buildDetails (fileindex.go), gateway, export, DownloadFile, html converter (clipboard) ImageByHash(ctx context.Context, id domain.FullFileId) (Image, error) + IndexFile(ctx context.Context, details *types.Struct) ([]*storage.FileInfo, error) + app.Component } @@ -64,6 +74,7 @@ type service struct { fileSync filesync.FileSync dagService ipld.DAGService fileStorage filestorage.FileStorage + objectStore objectstore.ObjectStore lock sync.Mutex addOperationLocks map[string]*sync.Mutex @@ -79,6 +90,7 @@ func (s *service) Init(a *app.App) (err error) { s.fileStore = app.MustComponent[filestore.FileStore](a) s.commonFile = app.MustComponent[fileservice.FileService](a) s.fileSync = app.MustComponent[filesync.FileSync](a) + s.objectStore = app.MustComponent[objectstore.ObjectStore](a) s.dagService = s.commonFile.DAGService() s.fileStorage = app.MustComponent[filestorage.FileStorage](a) @@ -263,41 +275,22 @@ func (s *service) fileInfoFromPath(ctx context.Context, spaceId string, fileId d if err != nil { return nil, err } + ed, err := getEncryptorDecryptor(key) + if err != nil { + return nil, err + } + decryptedReader, err := ed.DecryptReader(r) + if err != nil { + return nil, err + } + b, err := ioutil.ReadAll(decryptedReader) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal file info proto with all encryption modes: %w", err) - modes := []storage.FileInfoEncryptionMode{storage.FileInfo_AES_CFB, storage.FileInfo_AES_GCM} - for i, mode := range modes { - if i > 0 { - _, err = r.Seek(0, io.SeekStart) - if err != nil { - return nil, fmt.Errorf("failed to seek ciphertext after enc mode try") - } - } - ed, err := getEncryptorDecryptor(key, mode) - if err != nil { - return nil, err - } - decryptedReader, err := ed.DecryptReader(r) - if err != nil { - return nil, err - } - b, err := ioutil.ReadAll(decryptedReader) - if err != nil { - if i == len(modes)-1 { - return nil, fmt.Errorf("failed to unmarshal file info proto with all encryption modes: %w", err) - } - - continue - } - err = proto.Unmarshal(b, &file) - if err != nil || file.Hash == "" { - if i == len(modes)-1 { - return nil, fmt.Errorf("failed to unmarshal file info proto with all encryption modes: %w", err) - } - continue - } - // save successful enc mode so it will be cached in the DB - file.EncMode = mode - break + } + err = proto.Unmarshal(b, &file) + if err != nil || file.Hash == "" { + return nil, fmt.Errorf("failed to unmarshal file info proto with all encryption modes: %w", err) } } else { b, err := io.ReadAll(r) @@ -318,8 +311,8 @@ func (s *service) fileInfoFromPath(ctx context.Context, spaceId string, fileId d return &file, nil } -func (s *service) getContentReader(ctx context.Context, spaceID string, file *storage.FileInfo) (symmetric.ReadSeekCloser, error) { - fileCid, err := cid.Parse(file.Hash) +func (s *service) getContentReader(ctx context.Context, spaceID string, rawCid string, encKey string) (symmetric.ReadSeekCloser, error) { + fileCid, err := cid.Parse(rawCid) if err != nil { return nil, err } @@ -327,16 +320,16 @@ func (s *service) getContentReader(ctx context.Context, spaceID string, file *st if err != nil { return nil, err } - if file.Key == "" { + if encKey == "" { return fd, nil } - key, err := symmetric.FromString(file.Key) + key, err := symmetric.FromString(encKey) if err != nil { return nil, err } - dec, err := getEncryptorDecryptor(key, file.EncMode) + dec, err := getEncryptorDecryptor(key) if err != nil { return nil, err } @@ -556,36 +549,35 @@ func (s *service) fileIndexInfo(ctx context.Context, id domain.FullFileId, updat var files []*storage.FileInfo if looksLikeFileNode(dirNode) { + path := encryptionKeyPath(schema.LinkFile) var key string if keys != nil { - key = keys[encryptionKeyPath(schema.LinkFile)] + key = keys[path] } fileIndex, err := s.fileInfoFromPath(ctx, id.SpaceId, id.FileId, id.FileId.String()+"/"+dirLink.Name, key) if err != nil { return nil, fmt.Errorf("fileInfoFromPath error: %w", err) } + fileIndex.Path = path files = append(files, fileIndex) } else { for _, link := range dirNode.Links() { + path := encryptionKeyPath(link.Name) var key string if keys != nil { - key = keys[encryptionKeyPath(link.Name)] + key = keys[path] } fileIndex, err := s.fileInfoFromPath(ctx, id.SpaceId, id.FileId, id.FileId.String()+"/"+dirLink.Name+"/"+link.Name, key) if err != nil { return nil, fmt.Errorf("fileInfoFromPath error: %w", err) } + fileIndex.Path = path files = append(files, fileIndex) } } - err = s.fileStore.AddFileVariants(updateIfExists, files...) - if err != nil { - return nil, fmt.Errorf("failed to add files to store: %w", err) - } - return files, nil } @@ -622,26 +614,19 @@ func checksum(r io.Reader, wontEncrypt bool) (string, error) { return base32.RawHexEncoding.EncodeToString(checksum[:]), nil } -func getEncryptorDecryptor(key symmetric.Key, mode storage.FileInfoEncryptionMode) (symmetric.EncryptorDecryptor, error) { - switch mode { - case storage.FileInfo_AES_GCM: - return gcm.New(key), nil - case storage.FileInfo_AES_CFB: - return cfb.New(key, [aes.BlockSize]byte{}), nil - default: - return nil, fmt.Errorf("unsupported encryption mode") - } +func getEncryptorDecryptor(key symmetric.Key) (symmetric.EncryptorDecryptor, error) { + return cfb.New(key, [aes.BlockSize]byte{}), nil } -func (s *service) FileByHash(ctx context.Context, id domain.FullFileId) (File, error) { - fileList, err := s.fileStore.ListFileVariants(id.FileId) - if err != nil { - return nil, err +func (s *service) IndexFile(ctx context.Context, details *types.Struct) ([]*storage.FileInfo, error) { + id := domain.FullFileId{ + SpaceId: pbtypes.GetString(details, bundle.RelationKeySpaceId.String()), + FileId: domain.FileId(pbtypes.GetString(details, bundle.RelationKeyFileId.String())), } - - if len(fileList) == 0 || fileList[0].MetaHash == "" { + variantsList := pbtypes.GetStringList(details, bundle.RelationKeyFileVariantIds.String()) + if true || len(variantsList) == 0 { // info from ipfs - fileList, err = s.fileIndexInfo(ctx, id, false) + fileList, err := s.fileIndexInfo(ctx, id, false) if err != nil { return nil, err } @@ -657,16 +642,76 @@ func (s *service) FileByHash(ctx context.Context, id domain.FullFileId) (File, e return nil, fmt.Errorf("set is file imported: %w", err) } } + return fileList, nil + } + return nil, nil +} + +// TODO SHould be accesed via OBJECT +func (s *service) FileByHash(ctx context.Context, id domain.FullFileId) (File, error) { + recs, err := s.objectStore.SpaceIndex(id.SpaceId).Query(database.Query{ + Filters: []*model.BlockContentDataviewFilter{ + { + RelationKey: bundle.RelationKeyFileId.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(id.FileId.String()), + }, + }, + }) + if err != nil { + return nil, fmt.Errorf("query details: %w", err) } - fileIndex := fileList[0] + + if len(recs) == 0 { + return nil, fmt.Errorf("noooooo") + } + fileRec := recs[0] + + variantsList := pbtypes.GetStringList(fileRec.Details, bundle.RelationKeyFileVariantIds.String()) + if len(variantsList) == 0 { + return nil, fmt.Errorf("not indexed") + } + + infos := getFileInfosFromDetails(fileRec.Details) return &file{ spaceID: id.SpaceId, fileId: id.FileId, - info: fileIndex, + info: infos[0], node: s, }, nil } +func getFileInfosFromDetails(details *types.Struct) []*storage.FileInfo { + variantsList := pbtypes.GetStringList(details, bundle.RelationKeyFileVariantIds.String()) + sourceChecksum := pbtypes.GetString(details, bundle.RelationKeyFileSourceChecksum.String()) + infos := make([]*storage.FileInfo, 0, len(variantsList)) + for i, variantId := range variantsList { + var meta *types.Struct + widths := pbtypes.GetIntList(details, bundle.RelationKeyFileVariantWidths.String()) + if widths[i] > 0 { + meta = &types.Struct{ + Fields: map[string]*types.Value{ + "width": pbtypes.Int64(int64(widths[i])), + }, + } + } + info := &storage.FileInfo{ + Name: pbtypes.GetString(details, bundle.RelationKeyName.String()), + Size_: pbtypes.GetInt64(details, bundle.RelationKeySizeInBytes.String()), + Source: sourceChecksum, + Media: pbtypes.GetString(details, bundle.RelationKeyFileMimeType.String()), + + Hash: variantId, + Checksum: pbtypes.GetStringList(details, bundle.RelationKeyFileVariantChecksums.String())[i], + Mill: pbtypes.GetStringList(details, bundle.RelationKeyFileVariantMills.String())[i], + Meta: meta, + Key: pbtypes.GetStringList(details, bundle.RelationKeyFileVariantKeys.String())[i], + } + infos = append(infos, info) + } + return infos +} + func encryptionKeyPath(linkName string) string { if linkName == schema.LinkFile { return "/0/" diff --git a/core/files/images.go b/core/files/images.go index 3d70042166..a5228cbe8f 100644 --- a/core/files/images.go +++ b/core/files/images.go @@ -9,31 +9,45 @@ import ( ipld "github.com/ipfs/go-ipld-format" "github.com/anyproto/anytype-heart/core/domain" + "github.com/anyproto/anytype-heart/pkg/lib/bundle" + "github.com/anyproto/anytype-heart/pkg/lib/database" "github.com/anyproto/anytype-heart/pkg/lib/ipfs/helpers" "github.com/anyproto/anytype-heart/pkg/lib/localstore" "github.com/anyproto/anytype-heart/pkg/lib/mill/schema" + "github.com/anyproto/anytype-heart/pkg/lib/pb/model" "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" + "github.com/anyproto/anytype-heart/util/pbtypes" ) func (s *service) ImageByHash(ctx context.Context, id domain.FullFileId) (Image, error) { - files, err := s.fileStore.ListFileVariants(id.FileId) + recs, err := s.objectStore.SpaceIndex(id.SpaceId).Query(database.Query{ + Filters: []*model.BlockContentDataviewFilter{ + { + RelationKey: bundle.RelationKeyFileId.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(id.FileId.String()), + }, + }, + }) if err != nil { - return nil, err + return nil, fmt.Errorf("query details: %w", err) } - // check the image files count explicitly because we have a bug when the info can be cached not fully(only for some files) - if len(files) < 4 || files[0].MetaHash == "" { - // index image files info from ipfs - files, err = s.fileIndexInfo(ctx, id, true) - if err != nil { - return nil, err - } + if len(recs) == 0 { + return nil, fmt.Errorf("noooooo") + } + fileRec := recs[0] + + variantsList := pbtypes.GetStringList(fileRec.Details, bundle.RelationKeyFileVariantIds.String()) + if len(variantsList) == 0 { + return nil, fmt.Errorf("not indexed") } + infos := getFileInfosFromDetails(fileRec.Details) return &image{ spaceID: id.SpaceId, fileId: id.FileId, - onlyResizeVariants: selectAndSortResizeVariants(files), + onlyResizeVariants: selectAndSortResizeVariants(infos), service: s, }, nil } diff --git a/core/indexer/reindex.go b/core/indexer/reindex.go index 301406afb1..597a405faf 100644 --- a/core/indexer/reindex.go +++ b/core/indexer/reindex.go @@ -370,15 +370,6 @@ func (i *indexer) removeCommonIndexes(spaceId string, space clientspace.Space, f log.Infof("start store reindex (%s)", flags.String()) } - if flags.fileKeys { - err = i.fileStore.RemoveEmptyFileKeys() - if err != nil { - log.Errorf("reindex failed to RemoveEmptyFileKeys: %v", err) - } else { - log.Infof("RemoveEmptyFileKeys filekeys succeed") - } - } - if flags.eraseLinks { store := i.store.SpaceIndex(spaceId) ids, err := store.ListIds() diff --git a/pkg/lib/bundle/relation.gen.go b/pkg/lib/bundle/relation.gen.go index 519184734b..04b36de24b 100644 --- a/pkg/lib/bundle/relation.gen.go +++ b/pkg/lib/bundle/relation.gen.go @@ -9,7 +9,7 @@ import ( "github.com/anyproto/anytype-heart/pkg/lib/pb/model" ) -const RelationChecksum = "44f147da7e8233e89bb42533778c305d0735a54238aa2e5ba331a3396145450d" +const RelationChecksum = "a1a9658be3a273287ef31208987aeb19a724955011b3f567aa7b8eccd95f33e2" const ( RelationKeyTag domain.RelationKey = "tag" RelationKeyCamera domain.RelationKey = "camera" @@ -144,6 +144,12 @@ const ( RelationKeyChatId domain.RelationKey = "chatId" RelationKeyMentions domain.RelationKey = "mentions" RelationKeyTimestamp domain.RelationKey = "timestamp" + RelationKeyFileVariantIds domain.RelationKey = "fileVariantIds" + RelationKeyFileVariantKeys domain.RelationKey = "fileVariantKeys" + RelationKeyFileVariantWidths domain.RelationKey = "fileVariantWidths" + RelationKeyFileVariantChecksums domain.RelationKey = "fileVariantChecksums" + RelationKeyFileVariantMills domain.RelationKey = "fileVariantMills" + RelationKeyFileSourceChecksum domain.RelationKey = "fileSourceChecksum" ) var ( @@ -605,6 +611,20 @@ var ( ReadOnlyRelation: true, Scope: model.Relation_type, }, + RelationKeyFileSourceChecksum: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_shorttext, + Hidden: true, + Id: "_brfileSourceChecksum", + Key: "fileSourceChecksum", + MaxCount: 1, + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, RelationKeyFileSyncStatus: { DataSource: model.Relation_derived, @@ -619,6 +639,71 @@ var ( ReadOnlyRelation: true, Scope: model.Relation_type, }, + RelationKeyFileVariantChecksums: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_shorttext, + Hidden: true, + Id: "_brfileVariantChecksums", + Key: "fileVariantChecksums", + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, + RelationKeyFileVariantIds: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_shorttext, + Hidden: true, + Id: "_brfileVariantIds", + Key: "fileVariantIds", + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, + RelationKeyFileVariantKeys: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_shorttext, + Hidden: true, + Id: "_brfileVariantKeys", + Key: "fileVariantKeys", + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, + RelationKeyFileVariantMills: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_shorttext, + Hidden: true, + Id: "_brfileVariantMills", + Key: "fileVariantMills", + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, + RelationKeyFileVariantWidths: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_number, + Hidden: true, + Id: "_brfileVariantWidths", + Key: "fileVariantWidths", + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, RelationKeyFocalRatio: { DataSource: model.Relation_details, diff --git a/pkg/lib/bundle/relations.json b/pkg/lib/bundle/relations.json index 30ccb09fb3..3f4caa4fb2 100644 --- a/pkg/lib/bundle/relations.json +++ b/pkg/lib/bundle/relations.json @@ -1360,5 +1360,59 @@ "name": "Timestamp", "readonly": true, "source": "derived" + }, + { + "format": "shorttext", + "hidden": true, + "key": "fileVariantIds", + "maxCount": 0, + "name": "Underlying file id", + "readonly": true, + "source": "details" + }, + { + "format": "shorttext", + "hidden": true, + "key": "fileVariantKeys", + "maxCount": 0, + "name": "Underlying file id", + "readonly": true, + "source": "details" + }, + { + "format": "number", + "hidden": true, + "key": "fileVariantWidths", + "maxCount": 0, + "name": "Underlying file id", + "readonly": true, + "source": "details" + }, + { + "format": "shorttext", + "hidden": true, + "key": "fileVariantChecksums", + "maxCount": 0, + "name": "Underlying file id", + "readonly": true, + "source": "details" + }, + { + "format": "shorttext", + "hidden": true, + "key": "fileVariantMills", + "maxCount": 0, + "name": "Underlying file id", + "readonly": true, + "source": "details" + }, + { + "format": "shorttext", + "hidden": true, + "key": "fileSourceChecksum", + "maxCount": 1, + "name": "Underlying file id", + "readonly": true, + "source": "details" } ] diff --git a/pkg/lib/gateway/gateway.go b/pkg/lib/gateway/gateway.go index e7f8d5dad4..5d273767e7 100644 --- a/pkg/lib/gateway/gateway.go +++ b/pkg/lib/gateway/gateway.go @@ -378,7 +378,7 @@ func (g *gateway) getImageReader(ctx context.Context, id domain.FullFileId, req if err != nil { return nil, fmt.Errorf("get image file: %w", err) } - if filepath.Ext(file.Info().Name) == constant.SvgExt { + if filepath.Ext(file.Name()) == constant.SvgExt { return g.handleSVGFile(ctx, file) } } else { @@ -390,7 +390,7 @@ func (g *gateway) getImageReader(ctx context.Context, id domain.FullFileId, req if err != nil { return nil, fmt.Errorf("get image file: %w", err) } - if filepath.Ext(file.Info().Name) == constant.SvgExt { + if filepath.Ext(file.Name()) == constant.SvgExt { return g.handleSVGFile(ctx, file) } } diff --git a/pkg/lib/localstore/filestore/debug.go b/pkg/lib/localstore/filestore/debug.go index 0f8f53f1b8..a759ea1bc4 100644 --- a/pkg/lib/localstore/filestore/debug.go +++ b/pkg/lib/localstore/filestore/debug.go @@ -2,7 +2,6 @@ package filestore import ( "net/http" - "path/filepath" "github.com/go-chi/chi/v5" "github.com/gogo/protobuf/proto" @@ -39,7 +38,7 @@ func sanitizeFileInfos(infos []*storage.FileInfo, err error) ([]*storage.FileInf func sanitizeFileInfoForDebug(info *storage.FileInfo) *storage.FileInfo { out := proto.Clone(info).(*storage.FileInfo) - out.Key = "" - out.Name = "" + filepath.Ext(out.Name) + // out.Key = "" + // out.Name = "" + filepath.Ext(out.Name) return out } diff --git a/pkg/lib/localstore/filestore/files.go b/pkg/lib/localstore/filestore/files.go index d2bbe67dce..d643c0ec3e 100644 --- a/pkg/lib/localstore/filestore/files.go +++ b/pkg/lib/localstore/filestore/files.go @@ -90,8 +90,6 @@ type FileStore interface { localstore.Indexable AddFileVariant(file *storage.FileInfo) error - AddFileVariants(upsert bool, files ...*storage.FileInfo) error - GetFileVariant(fileId domain.FileContentId) (*storage.FileInfo, error) GetFileVariantBySource(mill string, source string, opts string) (*storage.FileInfo, error) GetFileVariantByChecksum(mill string, checksum string) (*storage.FileInfo, error) DeleteFileVariants(variantIds []domain.FileContentId) error @@ -104,7 +102,6 @@ type FileStore interface { AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error GetFileKeys(fileId domain.FileId) (map[string]string, error) - RemoveEmptyFileKeys() error GetChunksCount(fileId domain.FileId) (int, error) SetChunksCount(fileId domain.FileId, chunksCount int) error @@ -230,48 +227,6 @@ func (s *dsFileStore) AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error { }) } -func (s *dsFileStore) RemoveEmptyFileKeys() error { - return s.updateTxn(func(txn *badger.Txn) error { - res := localstore.GetKeys(txn, filesKeysBase.String(), 0) - - fileIds, err := localstore.GetLeavesFromResults(res) - if err != nil { - return err - } - - var removed int - for _, fileId := range fileIds { - // TODO USE TXN - v, err := s.GetFileKeys(domain.FileId(fileId)) - if err != nil { - if err != nil { - log.Errorf("RemoveEmptyFileKeys failed to get keys: %s", err) - } - continue - } - if len(v) == 0 { - removed++ - // TODO USE TXN - err = s.deleteFileKeys(domain.FileId(fileId)) - if err != nil { - log.Errorf("RemoveEmptyFileKeys failed to delete empty file keys: %s", err) - } - } - } - if removed > 0 { - log.Errorf("RemoveEmptyFileKeys removed %d empty file keys", removed) - } - return nil - }) -} - -func (s *dsFileStore) deleteFileKeys(fileId domain.FileId) error { - return s.updateTxn(func(txn *badger.Txn) error { - fileKeysKey := filesKeysBase.ChildString(fileId.String()) - return txn.Delete(fileKeysKey.Bytes()) - }) -} - func (s *dsFileStore) addSingleFileKeys(txn *badger.Txn, fileKeys domain.FileEncryptionKeys) error { fileKeysKey := filesKeysBase.ChildString(fileKeys.FileId.String()) @@ -332,12 +287,6 @@ func (s *dsFileStore) LinkFileVariantToFile(fileId domain.FileId, childId domain }) } -func (s *dsFileStore) GetFileVariant(childId domain.FileContentId) (*storage.FileInfo, error) { - return badgerhelper.ViewTxnWithResult(s.db, func(txn *badger.Txn) (*storage.FileInfo, error) { - return s.getVariant(txn, childId) - }) -} - func (s *dsFileStore) getVariant(txn *badger.Txn, childId domain.FileContentId) (*storage.FileInfo, error) { fileInfoKey := filesInfoBase.ChildString(childId.String()) file, err := badgerhelper.GetValueTxn(txn, fileInfoKey.Bytes(), unmarshalFileInfo) diff --git a/pkg/lib/pb/storage/file.pb.go b/pkg/lib/pb/storage/file.pb.go index 880b2f903a..a9e10812ba 100644 --- a/pkg/lib/pb/storage/file.pb.go +++ b/pkg/lib/pb/storage/file.pb.go @@ -144,7 +144,14 @@ func (m *FileKeys) GetKeysByPath() map[string]string { return nil } +// To details: {hash, encKey, width} +// fileVariants: [hash1, hash2] +// fileKeys: [key1, key2] +// fileWidths: [width1, width2] +// fileSource: checksum +// fileVariantChecksums: [sum1, sum2] type FileInfo struct { + Path string `protobuf:"bytes,16,opt,name=path,proto3" json:"path,omitempty"` Mill string `protobuf:"bytes,1,opt,name=mill,proto3" json:"mill,omitempty"` Checksum string `protobuf:"bytes,2,opt,name=checksum,proto3" json:"checksum,omitempty"` Source string `protobuf:"bytes,3,opt,name=source,proto3" json:"source,omitempty"` @@ -195,6 +202,13 @@ func (m *FileInfo) XXX_DiscardUnknown() { var xxx_messageInfo_FileInfo proto.InternalMessageInfo +func (m *FileInfo) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + func (m *FileInfo) GetMill() string { if m != nil { return m.Mill @@ -461,46 +475,47 @@ func init() { } var fileDescriptor_c9351ff644be6424 = []byte{ - // 620 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x4d, 0x6f, 0xd3, 0x40, - 0x10, 0xcd, 0x36, 0x4e, 0x93, 0x4c, 0xa0, 0x0d, 0x2b, 0x3e, 0x56, 0x11, 0x0a, 0x51, 0xc4, 0xc1, - 0xfd, 0x90, 0x23, 0xb5, 0x07, 0x2a, 0x24, 0x24, 0x48, 0x49, 0xa1, 0x2a, 0x15, 0xc8, 0xb9, 0x71, - 0xa9, 0x36, 0xf6, 0x24, 0x31, 0xf1, 0x97, 0xbc, 0x1b, 0x24, 0xf3, 0x2b, 0x38, 0x70, 0xe5, 0xff, - 0x70, 0xec, 0x09, 0x71, 0x03, 0xb5, 0x7f, 0x04, 0xed, 0xda, 0x8e, 0xda, 0xa6, 0x20, 0x40, 0xdc, - 0x66, 0xd6, 0x6f, 0x66, 0xde, 0xbc, 0xb7, 0x6b, 0x78, 0x18, 0xcf, 0x26, 0x3d, 0xdf, 0x1b, 0xf5, - 0xe2, 0x51, 0x4f, 0xc8, 0x28, 0xe1, 0x13, 0xec, 0xc5, 0x49, 0x24, 0x23, 0xd1, 0x1b, 0x7b, 0x3e, - 0x5a, 0x3a, 0xa6, 0xeb, 0x3c, 0x4c, 0x65, 0x1a, 0xa3, 0x95, 0x43, 0x5a, 0xf7, 0x27, 0x51, 0x34, - 0xf1, 0x73, 0xe8, 0x68, 0x3e, 0xee, 0x09, 0x99, 0xcc, 0x1d, 0x99, 0xc1, 0xbb, 0x03, 0x30, 0x86, - 0x12, 0x63, 0x4a, 0xc1, 0x08, 0x79, 0x80, 0x8c, 0x74, 0x88, 0x59, 0xb7, 0x75, 0x4c, 0x37, 0xc0, - 0xf0, 0xbd, 0x70, 0xc6, 0x56, 0x3a, 0xc4, 0x6c, 0xec, 0xdc, 0xb1, 0xae, 0x74, 0xb6, 0x5e, 0x79, - 0xe1, 0xcc, 0xd6, 0x90, 0xee, 0x27, 0x02, 0xb5, 0x03, 0xcf, 0xc7, 0x23, 0x4c, 0x05, 0x3d, 0x04, - 0x98, 0x61, 0x2a, 0xfa, 0xe9, 0x1b, 0x2e, 0xa7, 0x8c, 0x74, 0xca, 0x66, 0x63, 0x67, 0x63, 0xa9, - 0xba, 0x80, 0x5b, 0x47, 0x0b, 0xec, 0x20, 0x94, 0x49, 0x6a, 0x5f, 0x28, 0x6e, 0x3d, 0x81, 0xf5, - 0x2b, 0x9f, 0x69, 0x13, 0xca, 0x33, 0x4c, 0x73, 0xa2, 0x2a, 0xa4, 0xb7, 0xa1, 0xf2, 0x9e, 0xfb, - 0x73, 0xd4, 0x44, 0xeb, 0x76, 0x96, 0x3c, 0x5e, 0xd9, 0x23, 0xdd, 0xef, 0xe5, 0x8c, 0xd6, 0x61, - 0x38, 0x8e, 0xd4, 0x8a, 0x81, 0xe7, 0xfb, 0xc5, 0x8a, 0x2a, 0xa6, 0x2d, 0xa8, 0x39, 0x53, 0x74, - 0x66, 0x62, 0x1e, 0xe4, 0xd5, 0x8b, 0x9c, 0xde, 0x85, 0x55, 0x11, 0xcd, 0x13, 0x07, 0x59, 0x59, - 0x7f, 0xc9, 0x33, 0xd5, 0x27, 0x8a, 0xa5, 0x60, 0x46, 0xd6, 0x47, 0xc5, 0xea, 0x6c, 0xca, 0xc5, - 0x94, 0x55, 0xb2, 0x33, 0x15, 0x17, 0x44, 0x57, 0x2f, 0x11, 0x0d, 0xd0, 0xf5, 0x38, 0xab, 0x66, - 0x44, 0x75, 0xb2, 0x90, 0xbe, 0x76, 0x41, 0x7a, 0x0a, 0x86, 0xf0, 0x3e, 0x20, 0xab, 0x77, 0x88, - 0x59, 0xb6, 0x75, 0xac, 0xaa, 0xb9, 0xeb, 0xa2, 0xcb, 0x40, 0x1f, 0x66, 0x09, 0xdd, 0x02, 0x23, - 0x40, 0xc9, 0x59, 0x43, 0x9b, 0x74, 0xcf, 0xca, 0xdc, 0xb6, 0x0a, 0xb7, 0xad, 0xa1, 0x76, 0xdb, - 0xd6, 0x20, 0xca, 0xa0, 0x2a, 0x79, 0x32, 0x41, 0x29, 0xd8, 0x8d, 0x4e, 0xd9, 0xac, 0xdb, 0x45, - 0x4a, 0xfb, 0x50, 0xc5, 0xd0, 0x39, 0x8e, 0x5c, 0x64, 0x37, 0x3b, 0xc4, 0x5c, 0xdb, 0x31, 0xaf, - 0x35, 0x4c, 0x09, 0x69, 0x0d, 0x42, 0x27, 0x49, 0x63, 0xe9, 0x45, 0xa1, 0xc2, 0xdb, 0x45, 0xa1, - 0x12, 0x53, 0x4d, 0x79, 0xa9, 0x84, 0x58, 0xcb, 0xc4, 0x2c, 0x72, 0xba, 0x09, 0x4d, 0x9f, 0x0b, - 0x79, 0x1c, 0xb9, 0xde, 0xd8, 0x43, 0xf7, 0x39, 0x97, 0xc8, 0xd6, 0xf5, 0x1e, 0x4b, 0xe7, 0xdd, - 0x4d, 0x58, 0xbb, 0x3c, 0x82, 0x36, 0xa0, 0xfa, 0x6c, 0x30, 0x3c, 0x79, 0xb1, 0x7f, 0xdc, 0x2c, - 0x15, 0xc9, 0xfe, 0x41, 0xbf, 0x49, 0xba, 0x9f, 0x57, 0xe0, 0xd6, 0x61, 0xc0, 0x27, 0x68, 0xa3, - 0x12, 0x69, 0xe8, 0x4c, 0x31, 0xe0, 0xd7, 0xde, 0xe6, 0xc2, 0x7e, 0xe3, 0x82, 0xfd, 0x4f, 0x73, - 0x2b, 0x2b, 0xfa, 0x8e, 0x6e, 0x2f, 0xad, 0xbc, 0xd4, 0xd9, 0x7a, 0x1d, 0x4b, 0x91, 0x5d, 0xd3, - 0xcc, 0xf8, 0x3d, 0x68, 0xbc, 0x13, 0x51, 0x78, 0x22, 0xf4, 0x67, 0x6d, 0xf6, 0x6f, 0x5c, 0x00, - 0x85, 0xcd, 0x39, 0x6e, 0x41, 0x45, 0x3d, 0x1d, 0xc1, 0x6a, 0x7a, 0xf8, 0x2f, 0x9e, 0x57, 0x86, - 0x69, 0x3d, 0x82, 0xfa, 0x62, 0xf2, 0x5f, 0xbd, 0x80, 0xaf, 0x04, 0x0c, 0xd5, 0xe8, 0x8f, 0x25, - 0xd9, 0xbd, 0x24, 0xc9, 0x83, 0x6b, 0x59, 0xfd, 0x3f, 0x15, 0xfe, 0x79, 0xb1, 0xfe, 0xf6, 0x97, - 0xb3, 0x36, 0x39, 0x3d, 0x6b, 0x93, 0x1f, 0x67, 0x6d, 0xf2, 0xf1, 0xbc, 0x5d, 0x3a, 0x3d, 0x6f, - 0x97, 0xbe, 0x9d, 0xb7, 0x4b, 0x6f, 0xe9, 0xf2, 0x7f, 0x72, 0xb4, 0xaa, 0x39, 0xec, 0xfe, 0x0c, - 0x00, 0x00, 0xff, 0xff, 0x9d, 0x7d, 0x6c, 0x33, 0x44, 0x05, 0x00, 0x00, + // 632 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xce, 0x36, 0x4e, 0x93, 0x4c, 0xa0, 0x0d, 0x2b, 0x7e, 0x56, 0x11, 0x0a, 0x51, 0xc4, 0xc1, + 0xfd, 0x91, 0x23, 0xb5, 0x07, 0x2a, 0x24, 0x24, 0x48, 0x49, 0xa1, 0x2a, 0x15, 0xc8, 0xb9, 0x71, + 0xa9, 0x36, 0xf6, 0x26, 0x31, 0xb1, 0xbd, 0x96, 0x77, 0x83, 0x64, 0x9e, 0x82, 0x03, 0x57, 0xde, + 0x87, 0x63, 0x4f, 0x88, 0x23, 0x6a, 0x1f, 0x81, 0x17, 0x40, 0xbb, 0x6b, 0x47, 0x6d, 0x53, 0x10, + 0x20, 0x6e, 0x33, 0xe3, 0x6f, 0x67, 0xbf, 0xf9, 0xbe, 0x59, 0xc3, 0xc3, 0x64, 0x36, 0xe9, 0x85, + 0xc1, 0xa8, 0x97, 0x8c, 0x7a, 0x42, 0xf2, 0x94, 0x4e, 0x58, 0x2f, 0x49, 0xb9, 0xe4, 0xa2, 0x37, + 0x0e, 0x42, 0xe6, 0xe8, 0x18, 0xaf, 0xd3, 0x38, 0x93, 0x59, 0xc2, 0x9c, 0x1c, 0xd2, 0xba, 0x3f, + 0xe1, 0x7c, 0x12, 0xe6, 0xd0, 0xd1, 0x7c, 0xdc, 0x13, 0x32, 0x9d, 0x7b, 0xd2, 0xc0, 0xbb, 0x03, + 0xb0, 0x86, 0x92, 0x25, 0x18, 0x83, 0x15, 0xd3, 0x88, 0x11, 0xd4, 0x41, 0x76, 0xdd, 0xd5, 0x31, + 0xde, 0x00, 0x2b, 0x0c, 0xe2, 0x19, 0x59, 0xe9, 0x20, 0xbb, 0xb1, 0x73, 0xc7, 0xb9, 0xd2, 0xd9, + 0x79, 0x15, 0xc4, 0x33, 0x57, 0x43, 0xba, 0x9f, 0x10, 0xd4, 0x0e, 0x82, 0x90, 0x1d, 0xb1, 0x4c, + 0xe0, 0x43, 0x80, 0x19, 0xcb, 0x44, 0x3f, 0x7b, 0x43, 0xe5, 0x94, 0xa0, 0x4e, 0xd9, 0x6e, 0xec, + 0x6c, 0x2c, 0x9d, 0x2e, 0xe0, 0xce, 0xd1, 0x02, 0x3b, 0x88, 0x65, 0x9a, 0xb9, 0x17, 0x0e, 0xb7, + 0x9e, 0xc0, 0xfa, 0x95, 0xcf, 0xb8, 0x09, 0xe5, 0x19, 0xcb, 0x72, 0xa2, 0x2a, 0xc4, 0xb7, 0xa1, + 0xf2, 0x9e, 0x86, 0x73, 0xa6, 0x89, 0xd6, 0x5d, 0x93, 0x3c, 0x5e, 0xd9, 0x43, 0xdd, 0x1f, 0x65, + 0x43, 0xeb, 0x30, 0x1e, 0x73, 0x35, 0x62, 0xa2, 0x08, 0x35, 0xcd, 0x88, 0x2a, 0x56, 0xb5, 0x28, + 0x08, 0xc3, 0x62, 0x6c, 0x15, 0xe3, 0x16, 0xd4, 0xbc, 0x29, 0xf3, 0x66, 0x62, 0x1e, 0xe5, 0x1d, + 0x17, 0x39, 0xbe, 0x0b, 0xab, 0x82, 0xcf, 0x53, 0x8f, 0x91, 0xb2, 0xfe, 0x92, 0x67, 0xaa, 0x0f, + 0x4f, 0xa4, 0x20, 0x96, 0xe9, 0xa3, 0x62, 0x55, 0x9b, 0x52, 0x31, 0x25, 0x15, 0x53, 0x53, 0x71, + 0x41, 0x7e, 0xf5, 0x12, 0xf9, 0x88, 0xf9, 0x01, 0x25, 0x55, 0x43, 0x5e, 0x27, 0x0b, 0x3b, 0x6a, + 0x17, 0xec, 0xc0, 0x60, 0x89, 0xe0, 0x03, 0x23, 0xf5, 0x0e, 0xb2, 0xcb, 0xae, 0x8e, 0xd5, 0x69, + 0xea, 0xfb, 0xcc, 0x27, 0xa0, 0x8b, 0x26, 0xc1, 0x5b, 0x60, 0x45, 0x4c, 0x52, 0xd2, 0xd0, 0xc6, + 0xdd, 0x73, 0xcc, 0x06, 0x38, 0xc5, 0x06, 0x38, 0x43, 0xbd, 0x01, 0xae, 0x06, 0x61, 0x02, 0x55, + 0x49, 0xd3, 0x09, 0x93, 0x82, 0xdc, 0xe8, 0x94, 0xed, 0xba, 0x5b, 0xa4, 0xb8, 0x0f, 0x55, 0x16, + 0x7b, 0xc7, 0xdc, 0x67, 0xe4, 0x66, 0x07, 0xd9, 0x6b, 0x3b, 0xf6, 0xb5, 0x26, 0x2a, 0x71, 0x9d, + 0x41, 0xec, 0xa5, 0x59, 0x22, 0x03, 0x1e, 0x2b, 0xbc, 0x5b, 0x1c, 0x54, 0x62, 0xaa, 0x5b, 0x5e, + 0x2a, 0x21, 0xd6, 0x8c, 0x98, 0x45, 0x8e, 0x37, 0xa1, 0x19, 0x52, 0x21, 0x8f, 0xb9, 0x1f, 0x8c, + 0x03, 0xe6, 0x3f, 0xa7, 0x92, 0x91, 0x75, 0x3d, 0xc7, 0x52, 0xbd, 0xbb, 0x09, 0x6b, 0x97, 0xaf, + 0xc0, 0x0d, 0xa8, 0x3e, 0x1b, 0x0c, 0x4f, 0x5e, 0xec, 0x1f, 0x37, 0x4b, 0x45, 0xb2, 0x7f, 0xd0, + 0x6f, 0xa2, 0xee, 0xe7, 0x15, 0xb8, 0x75, 0x18, 0xd1, 0x09, 0x73, 0x99, 0x12, 0x69, 0xe8, 0x4d, + 0x59, 0x44, 0xaf, 0xdd, 0xf0, 0xc2, 0x7e, 0xeb, 0x82, 0xfd, 0x4f, 0x73, 0x2b, 0x2b, 0x7a, 0x6f, + 0xb7, 0x97, 0x46, 0x5e, 0xea, 0xec, 0xbc, 0x4e, 0xa4, 0x30, 0xab, 0x6b, 0x8c, 0xdf, 0x83, 0xc6, + 0x3b, 0xc1, 0xe3, 0x13, 0xa1, 0x3f, 0x6b, 0xb3, 0x7f, 0xe3, 0x02, 0x28, 0x6c, 0xce, 0x71, 0x0b, + 0x2a, 0xea, 0x39, 0x09, 0x52, 0xd3, 0x97, 0xff, 0xe2, 0xc9, 0x19, 0x4c, 0xeb, 0x11, 0xd4, 0x17, + 0x37, 0xff, 0xd5, 0xab, 0xf8, 0x8a, 0xc0, 0x52, 0x8d, 0xfe, 0x58, 0x92, 0xdd, 0x4b, 0x92, 0x3c, + 0xb8, 0x96, 0xd5, 0xff, 0x53, 0xe1, 0x9f, 0x07, 0xeb, 0x6f, 0x7f, 0x39, 0x6b, 0xa3, 0xd3, 0xb3, + 0x36, 0xfa, 0x7e, 0xd6, 0x46, 0x1f, 0xcf, 0xdb, 0xa5, 0xd3, 0xf3, 0x76, 0xe9, 0xdb, 0x79, 0xbb, + 0xf4, 0x16, 0x2f, 0xff, 0x3b, 0x47, 0xab, 0x9a, 0xc3, 0xee, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xf7, 0xf9, 0x4a, 0xc2, 0x58, 0x05, 0x00, 0x00, } func (m *Step) Marshal() (dAtA []byte, err error) { @@ -607,6 +622,15 @@ func (m *FileInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if len(m.Path) > 0 { + i -= len(m.Path) + copy(dAtA[i:], m.Path) + i = encodeVarintFile(dAtA, i, uint64(len(m.Path))) + i-- + dAtA[i] = 0x1 + i-- + dAtA[i] = 0x82 + } if m.LastModifiedDate != 0 { i = encodeVarintFile(dAtA, i, uint64(m.LastModifiedDate)) i-- @@ -973,6 +997,10 @@ func (m *FileInfo) Size() (n int) { if m.LastModifiedDate != 0 { n += 1 + sovFile(uint64(m.LastModifiedDate)) } + l = len(m.Path) + if l > 0 { + n += 2 + l + sovFile(uint64(l)) + } return n } @@ -1802,6 +1830,38 @@ func (m *FileInfo) Unmarshal(dAtA []byte) error { break } } + case 16: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowFile + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthFile + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthFile + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Path = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipFile(dAtA[iNdEx:]) diff --git a/pkg/lib/pb/storage/protos/file.proto b/pkg/lib/pb/storage/protos/file.proto index 062bca2a3e..26beeb0742 100644 --- a/pkg/lib/pb/storage/protos/file.proto +++ b/pkg/lib/pb/storage/protos/file.proto @@ -13,20 +13,27 @@ message FileKeys { map keysByPath = 1; } +// To details: {hash, encKey, width} +// fileVariants: [hash1, hash2] +// fileKeys: [key1, key2] +// fileWidths: [width1, width2] +// fileSource: checksum +// fileVariantChecksums: [sum1, sum2] message FileInfo { + string path = 16; // for retrieving key string mill = 1; string checksum = 2; string source = 3; - string opts = 4; - string hash = 5; - string key = 6; + string opts = 4; // useless? + string hash = 5; // fileHashes [] + string key = 6; // fileKeys [] string media = 7; string name = 8; - int64 size = 9; + int64 size = 9; // useless for variants, used only with original file int64 added = 10; - google.protobuf.Struct meta = 11; - repeated string targets = 12; - EncryptionMode encMode = 13; + google.protobuf.Struct meta = 11; // goes straight to details, but width is used in searching for resize variant + repeated string targets = 12; // useless + EncryptionMode encMode = 13; // useless string metaHash = 14; int64 lastModifiedDate = 15; diff --git a/util/svg/rasterizer_no.go b/util/svg/rasterizer_no.go index 7e50482a85..fb371699b6 100644 --- a/util/svg/rasterizer_no.go +++ b/util/svg/rasterizer_no.go @@ -16,6 +16,7 @@ func ProcessSvg(ctx context.Context, file files.File) (io.ReadSeeker, error) { if err != nil { return nil, err } - file.Info().Media = svgMedia + // TODO Why? + // file.Info().Media = svgMedia return reader, nil } From 0e5ef079d91a53ebfc6cde57f2d218157462ef52 Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 17:11:01 +0100 Subject: [PATCH 03/10] GO-4486: Fix deduplication queries --- core/block/editor/files.go | 3 + core/files/files.go | 29 ++++------ core/files/images.go | 11 ---- core/files/queries.go | 83 +++++++++++++++++++++++++++ pkg/lib/bundle/relation.gen.go | 16 +++++- pkg/lib/bundle/relations.json | 9 +++ pkg/lib/localstore/filestore/files.go | 3 - 7 files changed, 122 insertions(+), 32 deletions(-) create mode 100644 core/files/queries.go diff --git a/core/block/editor/files.go b/core/block/editor/files.go index 028e3f92bf..e93c451326 100644 --- a/core/block/editor/files.go +++ b/core/block/editor/files.go @@ -123,6 +123,7 @@ func fileInfosToDetails(infos []*storage.FileInfo, st *state.State) error { widths []int checksums []string mills []string + options []string ) keysInfo := st.GetFileInfo().EncryptionKeys @@ -134,11 +135,13 @@ func fileInfosToDetails(infos []*storage.FileInfo, st *state.State) error { mills = append(mills, info.Mill) widths = append(widths, int(pbtypes.GetInt64(info.Meta, "width"))) keys = append(keys, keysInfo[info.Path]) + options = append(options, info.Opts) } st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantIds, pbtypes.StringList(variantIds)) st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantChecksums, pbtypes.StringList(checksums)) st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantMills, pbtypes.StringList(mills)) st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantWidths, pbtypes.IntList(widths...)) st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantKeys, pbtypes.StringList(keys)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantOptions, pbtypes.StringList(options)) return nil } diff --git a/core/files/files.go b/core/files/files.go index 459abd8af1..0cd0990d4f 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -160,11 +160,6 @@ func (s *service) FileAdd(ctx context.Context, spaceId string, options ...AddOpt fileId := domain.FileId(rootNode.Cid().String()) addNodeResult.variant.Targets = []string{fileId.String()} - err = s.fileStore.AddFileVariant(addNodeResult.variant) - if err != nil { - addLock.Unlock() - return nil, err - } fileKeys := domain.FileEncryptionKeys{ FileId: fileId, @@ -345,14 +340,11 @@ type addFileNodeResult struct { filePairNode ipld.Node } -func newExistingFileResult(variant *storage.FileInfo) (*addFileNodeResult, error) { - if len(variant.Targets) > 0 { - return &addFileNodeResult{ - isExisting: true, - fileId: domain.FileId(variant.Targets[0]), - }, nil - } - return nil, fmt.Errorf("file exists but has no targets") +func newExistingFileResult(fileId domain.FileId, variant *storage.FileInfo) (*addFileNodeResult, error) { + return &addFileNodeResult{ + isExisting: true, + fileId: fileId, + }, nil } func newAddedFileResult(variant *storage.FileInfo, fileNode ipld.Node) (*addFileNodeResult, error) { @@ -376,8 +368,9 @@ func (s *service) addFileNode(ctx context.Context, spaceID string, mill m.Mill, return nil, err } - if variant, err := s.fileStore.GetFileVariantBySource(mill.ID(), conf.checksum, opts); err == nil { - existingRes, err := newExistingFileResult(variant) + // TODO USE SPACE ID? + if existingFileId, variant, err := s.getFileVariantBySourceChecksum(mill.ID(), conf.checksum, opts); err == nil { + existingRes, err := newExistingFileResult(existingFileId, variant) if err == nil { return existingRes, nil } @@ -395,12 +388,13 @@ func (s *service) addFileNode(ctx context.Context, spaceID string, mill m.Mill, return nil, err } - if variant, err := s.fileStore.GetFileVariantByChecksum(mill.ID(), variantChecksum); err == nil { + // TODO USE SPACE ID? + if existingFileId, variant, err := s.getFileVariantByChecksum(mill.ID(), variantChecksum); err == nil { if variant.Source == conf.checksum { // we may have same variant checksum for different files // e.g. empty image exif with the same resolution // reuse the whole file only in case the checksum of the original file is the same - existingRes, err := newExistingFileResult(variant) + existingRes, err := newExistingFileResult(existingFileId, variant) if err == nil { return existingRes, nil } @@ -706,6 +700,7 @@ func getFileInfosFromDetails(details *types.Struct) []*storage.FileInfo { Mill: pbtypes.GetStringList(details, bundle.RelationKeyFileVariantMills.String())[i], Meta: meta, Key: pbtypes.GetStringList(details, bundle.RelationKeyFileVariantKeys.String())[i], + Opts: pbtypes.GetStringList(details, bundle.RelationKeyFileVariantOptions.String())[i], } infos = append(infos, info) } diff --git a/core/files/images.go b/core/files/images.go index a5228cbe8f..434033bca7 100644 --- a/core/files/images.go +++ b/core/files/images.go @@ -12,7 +12,6 @@ import ( "github.com/anyproto/anytype-heart/pkg/lib/bundle" "github.com/anyproto/anytype-heart/pkg/lib/database" "github.com/anyproto/anytype-heart/pkg/lib/ipfs/helpers" - "github.com/anyproto/anytype-heart/pkg/lib/localstore" "github.com/anyproto/anytype-heart/pkg/lib/mill/schema" "github.com/anyproto/anytype-heart/pkg/lib/pb/model" "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" @@ -103,16 +102,6 @@ func (s *service) ImageAdd(ctx context.Context, spaceId string, options ...AddOp successfullyAdded := make([]domain.FileContentId, 0, len(dirEntries)) for _, variant := range dirEntries { variant.fileInfo.Targets = []string{id.FileId.String()} - err = s.fileStore.AddFileVariant(variant.fileInfo) - if err != nil && !errors.Is(err, localstore.ErrDuplicateKey) { - // Cleanup - deleteErr := s.fileStore.DeleteFileVariants(successfullyAdded) - if deleteErr != nil { - log.Errorf("cleanup: failed to delete file variants %s", deleteErr) - } - addLock.Unlock() - return nil, fmt.Errorf("failed to store file variant: %w", err) - } successfullyAdded = append(successfullyAdded, domain.FileContentId(variant.fileInfo.Hash)) } diff --git a/core/files/queries.go b/core/files/queries.go new file mode 100644 index 0000000000..102a10f0d2 --- /dev/null +++ b/core/files/queries.go @@ -0,0 +1,83 @@ +package files + +import ( + "fmt" + + "github.com/anyproto/anytype-heart/core/domain" + "github.com/anyproto/anytype-heart/pkg/lib/bundle" + "github.com/anyproto/anytype-heart/pkg/lib/database" + "github.com/anyproto/anytype-heart/pkg/lib/pb/model" + "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" + "github.com/anyproto/anytype-heart/util/pbtypes" +) + +func (s *service) getFileVariantBySourceChecksum(mill string, sourceChecksum string, options string) (domain.FileId, *storage.FileInfo, error) { + recs, err := s.objectStore.QueryCrossSpace(database.Query{ + Filters: []*model.BlockContentDataviewFilter{ + { + RelationKey: bundle.RelationKeyFileVariantMills.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(mill), + }, + { + RelationKey: bundle.RelationKeyFileSourceChecksum.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(sourceChecksum), + }, + { + RelationKey: bundle.RelationKeyFileVariantOptions.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(options), + }, + }, + Limit: 1, + }) + if err != nil { + return "", nil, err + } + if len(recs) == 0 { + return "", nil, fmt.Errorf("variant not found") + } + + infos := getFileInfosFromDetails(recs[0].Details) + for _, info := range infos { + if info.Mill == mill && info.Opts == options { + return domain.FileId(pbtypes.GetString(recs[0].Details, bundle.RelationKeyFileId.String())), info, nil + } + } + // Should never happen + return "", nil, fmt.Errorf("variant with specified mill not found") +} + +func (s *service) getFileVariantByChecksum(mill string, variantChecksum string) (domain.FileId, *storage.FileInfo, error) { + recs, err := s.objectStore.QueryCrossSpace(database.Query{ + Filters: []*model.BlockContentDataviewFilter{ + { + RelationKey: bundle.RelationKeyFileVariantMills.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(mill), + }, + { + RelationKey: bundle.RelationKeyFileVariantChecksums.String(), + Condition: model.BlockContentDataviewFilter_Equal, + Value: pbtypes.String(variantChecksum), + }, + }, + Limit: 1, + }) + if err != nil { + return "", nil, err + } + if len(recs) == 0 { + return "", nil, fmt.Errorf("variant not found") + } + + infos := getFileInfosFromDetails(recs[0].Details) + for _, info := range infos { + if info.Mill == mill && info.Checksum == variantChecksum { + return domain.FileId(pbtypes.GetString(recs[0].Details, bundle.RelationKeyFileId.String())), info, nil + } + } + // Should never happen + return "", nil, fmt.Errorf("variant with specified mill not found") +} diff --git a/pkg/lib/bundle/relation.gen.go b/pkg/lib/bundle/relation.gen.go index 04b36de24b..5706a81c89 100644 --- a/pkg/lib/bundle/relation.gen.go +++ b/pkg/lib/bundle/relation.gen.go @@ -9,7 +9,7 @@ import ( "github.com/anyproto/anytype-heart/pkg/lib/pb/model" ) -const RelationChecksum = "a1a9658be3a273287ef31208987aeb19a724955011b3f567aa7b8eccd95f33e2" +const RelationChecksum = "f402a8788a9eb5cb3c58e0850101e561f3870969e07ba6bf568fed9efd256606" const ( RelationKeyTag domain.RelationKey = "tag" RelationKeyCamera domain.RelationKey = "camera" @@ -149,6 +149,7 @@ const ( RelationKeyFileVariantWidths domain.RelationKey = "fileVariantWidths" RelationKeyFileVariantChecksums domain.RelationKey = "fileVariantChecksums" RelationKeyFileVariantMills domain.RelationKey = "fileVariantMills" + RelationKeyFileVariantOptions domain.RelationKey = "fileVariantOptions" RelationKeyFileSourceChecksum domain.RelationKey = "fileSourceChecksum" ) @@ -691,6 +692,19 @@ var ( ReadOnlyRelation: true, Scope: model.Relation_type, }, + RelationKeyFileVariantOptions: { + + DataSource: model.Relation_details, + Description: "", + Format: model.RelationFormat_shorttext, + Hidden: true, + Id: "_brfileVariantOptions", + Key: "fileVariantOptions", + Name: "Underlying file id", + ReadOnly: true, + ReadOnlyRelation: true, + Scope: model.Relation_type, + }, RelationKeyFileVariantWidths: { DataSource: model.Relation_details, diff --git a/pkg/lib/bundle/relations.json b/pkg/lib/bundle/relations.json index 3f4caa4fb2..99ae8aea81 100644 --- a/pkg/lib/bundle/relations.json +++ b/pkg/lib/bundle/relations.json @@ -1406,6 +1406,15 @@ "readonly": true, "source": "details" }, + { + "format": "shorttext", + "hidden": true, + "key": "fileVariantOptions", + "maxCount": 0, + "name": "Underlying file id", + "readonly": true, + "source": "details" + }, { "format": "shorttext", "hidden": true, diff --git a/pkg/lib/localstore/filestore/files.go b/pkg/lib/localstore/filestore/files.go index d643c0ec3e..edeb5507e3 100644 --- a/pkg/lib/localstore/filestore/files.go +++ b/pkg/lib/localstore/filestore/files.go @@ -89,9 +89,6 @@ type FileStore interface { app.ComponentRunnable localstore.Indexable - AddFileVariant(file *storage.FileInfo) error - GetFileVariantBySource(mill string, source string, opts string) (*storage.FileInfo, error) - GetFileVariantByChecksum(mill string, checksum string) (*storage.FileInfo, error) DeleteFileVariants(variantIds []domain.FileContentId) error ListFileIds() ([]domain.FileId, error) From 9db84efdfad32b6f15778d8d0500059afe8802bd Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 18:57:42 +0100 Subject: [PATCH 04/10] GO-4486: Fix indexing --- core/block/editor/files.go | 47 ++++--------------- core/files/fileobject/fileindex.go | 26 +++++----- .../fileobject/filemodels/filerequest.go | 37 +++++++++++++++ core/files/fileobject/service.go | 3 +- core/files/files.go | 29 ++++++++---- core/files/images.go | 22 +++++++-- 6 files changed, 101 insertions(+), 63 deletions(-) diff --git a/core/block/editor/files.go b/core/block/editor/files.go index e93c451326..e4d9359729 100644 --- a/core/block/editor/files.go +++ b/core/block/editor/files.go @@ -13,11 +13,11 @@ import ( "github.com/anyproto/anytype-heart/core/domain" "github.com/anyproto/anytype-heart/core/files" "github.com/anyproto/anytype-heart/core/files/fileobject" + "github.com/anyproto/anytype-heart/core/files/fileobject/filemodels" "github.com/anyproto/anytype-heart/core/files/reconciler" "github.com/anyproto/anytype-heart/core/filestorage" "github.com/anyproto/anytype-heart/pkg/lib/bundle" coresb "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock" - "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" "github.com/anyproto/anytype-heart/util/pbtypes" ) @@ -89,10 +89,11 @@ func (f *File) Init(ctx *smartblock.InitContext) error { return err } - f.SmartBlock.AddHook(f.reconciler.FileObjectHook(domain.FullID{SpaceID: f.SpaceID(), ObjectID: f.Id()}), smartblock.HookBeforeApply) + fullId := domain.FullID{SpaceID: f.SpaceID(), ObjectID: f.Id()} + + f.SmartBlock.AddHook(f.reconciler.FileObjectHook(fullId), smartblock.HookBeforeApply) if !ctx.IsNewObject { - fullId := domain.FullID{ObjectID: f.Id(), SpaceID: f.SpaceID()} err = f.fileObjectService.EnsureFileAddedToSyncQueue(fullId, ctx.State.Details()) if err != nil { log.Errorf("failed to ensure file added to sync queue: %v", err) @@ -102,46 +103,16 @@ func (f *File) Init(ctx *smartblock.InitContext) error { }, smartblock.HookOnStateRebuild) } - infos, err := f.fileService.IndexFile(ctx.Ctx, ctx.State.Details()) + infos, err := f.fileService.IndexFile(ctx.Ctx, domain.FullFileId{ + FileId: domain.FileId(pbtypes.GetString(ctx.State.Details(), bundle.RelationKeyFileId.String())), + SpaceId: f.SpaceID(), + }, ctx.State.Details()) if err != nil { return fmt.Errorf("get infos for indexing: %w", err) } if len(infos) > 0 { - fileInfosToDetails(infos, ctx.State) + filemodels.FileInfosToDetails(infos, ctx.State) } return nil } - -func fileInfosToDetails(infos []*storage.FileInfo, st *state.State) error { - if len(infos) == 0 { - return fmt.Errorf("empty info list") - } - var ( - variantIds []string - keys []string // fill in smartblock? - widths []int - checksums []string - mills []string - options []string - ) - - keysInfo := st.GetFileInfo().EncryptionKeys - - st.SetDetailAndBundledRelation(bundle.RelationKeyFileSourceChecksum, pbtypes.String(infos[0].Source)) - for _, info := range infos { - variantIds = append(variantIds, info.Hash) - checksums = append(checksums, info.Checksum) - mills = append(mills, info.Mill) - widths = append(widths, int(pbtypes.GetInt64(info.Meta, "width"))) - keys = append(keys, keysInfo[info.Path]) - options = append(options, info.Opts) - } - st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantIds, pbtypes.StringList(variantIds)) - st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantChecksums, pbtypes.StringList(checksums)) - st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantMills, pbtypes.StringList(mills)) - st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantWidths, pbtypes.IntList(widths...)) - st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantKeys, pbtypes.StringList(keys)) - st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantOptions, pbtypes.StringList(options)) - return nil -} diff --git a/core/files/fileobject/fileindex.go b/core/files/fileobject/fileindex.go index 22f64dac93..523619e775 100644 --- a/core/files/fileobject/fileindex.go +++ b/core/files/fileobject/fileindex.go @@ -20,12 +20,14 @@ import ( fileblock "github.com/anyproto/anytype-heart/core/block/simple/file" "github.com/anyproto/anytype-heart/core/domain" "github.com/anyproto/anytype-heart/core/files" + "github.com/anyproto/anytype-heart/core/files/fileobject/filemodels" "github.com/anyproto/anytype-heart/core/filestorage/rpcstore" "github.com/anyproto/anytype-heart/pkg/lib/bundle" "github.com/anyproto/anytype-heart/pkg/lib/database" "github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore" "github.com/anyproto/anytype-heart/pkg/lib/mill" "github.com/anyproto/anytype-heart/pkg/lib/pb/model" + "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" "github.com/anyproto/anytype-heart/space" "github.com/anyproto/anytype-heart/util/pbtypes" ) @@ -235,13 +237,21 @@ func (ind *indexer) indexFile(ctx context.Context, id domain.FullID, fileId doma } func (ind *indexer) injectMetadataToState(ctx context.Context, st *state.State, fileId domain.FullFileId, id domain.FullID) error { - details, typeKey, err := ind.buildDetails(ctx, fileId) + infos, err := ind.fileService.IndexFile(ctx, fileId, st.Details()) + if err != nil { + return fmt.Errorf("get infos for indexing: %w", err) + } + if len(infos) > 0 { + filemodels.FileInfosToDetails(infos, st) + } + + prevDetails := st.CombinedDetails() + details, typeKey, err := ind.buildDetails(ctx, fileId, infos) if err != nil { return fmt.Errorf("build details: %w", err) } st.SetObjectTypeKey(typeKey) - prevDetails := st.CombinedDetails() keys := make([]domain.RelationKey, 0, len(details.Fields)) for k := range details.Fields { @@ -259,11 +269,8 @@ func (ind *indexer) injectMetadataToState(ctx context.Context, st *state.State, return nil } -func (ind *indexer) buildDetails(ctx context.Context, id domain.FullFileId) (details *types.Struct, typeKey domain.TypeKey, err error) { - file, err := ind.fileService.FileByHash(ctx, id) - if err != nil { - return nil, "", err - } +func (ind *indexer) buildDetails(ctx context.Context, id domain.FullFileId, infos []*storage.FileInfo) (details *types.Struct, typeKey domain.TypeKey, err error) { + file := ind.fileService.FileFromInfos(id, infos) if file.Mill() == mill.BlobId { details, typeKey, err = file.Details(ctx) @@ -271,10 +278,7 @@ func (ind *indexer) buildDetails(ctx context.Context, id domain.FullFileId) (det return nil, "", err } } else { - image, err := ind.fileService.ImageByHash(ctx, id) - if err != nil { - return nil, "", err - } + image := ind.fileService.ImageFromInfos(id, infos) details, err = image.Details(ctx) if err != nil { return nil, "", err diff --git a/core/files/fileobject/filemodels/filerequest.go b/core/files/fileobject/filemodels/filerequest.go index 97082c8db8..edec33e119 100644 --- a/core/files/fileobject/filemodels/filerequest.go +++ b/core/files/fileobject/filemodels/filerequest.go @@ -5,9 +5,13 @@ import ( "github.com/gogo/protobuf/types" + "github.com/anyproto/anytype-heart/core/block/editor/state" "github.com/anyproto/anytype-heart/core/domain" "github.com/anyproto/anytype-heart/core/domain/objectorigin" + "github.com/anyproto/anytype-heart/pkg/lib/bundle" "github.com/anyproto/anytype-heart/pkg/lib/pb/model" + "github.com/anyproto/anytype-heart/pkg/lib/pb/storage" + "github.com/anyproto/anytype-heart/util/pbtypes" ) type CreateRequest struct { @@ -23,3 +27,36 @@ var ( ErrObjectNotFound = fmt.Errorf("file object not found") ErrEmptyFileId = fmt.Errorf("empty file id") ) + +func FileInfosToDetails(infos []*storage.FileInfo, st *state.State) error { + if len(infos) == 0 { + return fmt.Errorf("empty info list") + } + var ( + variantIds []string + keys []string // fill in smartblock? + widths []int + checksums []string + mills []string + options []string + ) + + keysInfo := st.GetFileInfo().EncryptionKeys + + st.SetDetailAndBundledRelation(bundle.RelationKeyFileSourceChecksum, pbtypes.String(infos[0].Source)) + for _, info := range infos { + variantIds = append(variantIds, info.Hash) + checksums = append(checksums, info.Checksum) + mills = append(mills, info.Mill) + widths = append(widths, int(pbtypes.GetInt64(info.Meta, "width"))) + keys = append(keys, keysInfo[info.Path]) + options = append(options, info.Opts) + } + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantIds, pbtypes.StringList(variantIds)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantChecksums, pbtypes.StringList(checksums)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantMills, pbtypes.StringList(mills)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantWidths, pbtypes.IntList(widths...)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantKeys, pbtypes.StringList(keys)) + st.SetDetailAndBundledRelation(bundle.RelationKeyFileVariantOptions, pbtypes.StringList(options)) + return nil +} diff --git a/core/files/fileobject/service.go b/core/files/fileobject/service.go index b1f1429705..0d832a949c 100644 --- a/core/files/fileobject/service.go +++ b/core/files/fileobject/service.go @@ -315,7 +315,8 @@ func (s *service) createInSpace(ctx context.Context, space clientspace.Space, re s.InitEmptyFileState(createState) fullFileId := domain.FullFileId{SpaceId: space.Id(), FileId: req.FileId} fullObjectId := domain.FullID{SpaceID: space.Id(), ObjectID: payload.RootRawChange.Id} - err := s.indexer.injectMetadataToState(ctx, createState, fullFileId, fullObjectId) + + err = s.indexer.injectMetadataToState(ctx, createState, fullFileId, fullObjectId) if err != nil { return "", nil, fmt.Errorf("inject metadata to state: %w", err) } diff --git a/core/files/files.go b/core/files/files.go index 0cd0990d4f..3472a30be6 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -55,6 +55,7 @@ type Service interface { FileAdd(ctx context.Context, spaceID string, options ...AddOption) (*AddResult, error) // buildDetails (fileindex.go), gateway, export, DownloadFile FileByHash(ctx context.Context, id domain.FullFileId) (File, error) + FileFromInfos(fileId domain.FullFileId, infos []*storage.FileInfo) File FileGetKeys(id domain.FileId) (*domain.FileEncryptionKeys, error) GetSpaceUsage(ctx context.Context, spaceID string) (*pb.RpcFileSpaceUsageResponseUsage, error) GetNodeUsage(ctx context.Context) (*NodeUsageResponse, error) @@ -62,8 +63,9 @@ type Service interface { ImageAdd(ctx context.Context, spaceID string, options ...AddOption) (*AddResult, error) // buildDetails (fileindex.go), gateway, export, DownloadFile, html converter (clipboard) ImageByHash(ctx context.Context, id domain.FullFileId) (Image, error) + ImageFromInfos(fileId domain.FullFileId, infos []*storage.FileInfo) Image - IndexFile(ctx context.Context, details *types.Struct) ([]*storage.FileInfo, error) + IndexFile(ctx context.Context, fileId domain.FullFileId, details *types.Struct) ([]*storage.FileInfo, error) app.Component } @@ -612,11 +614,7 @@ func getEncryptorDecryptor(key symmetric.Key) (symmetric.EncryptorDecryptor, err return cfb.New(key, [aes.BlockSize]byte{}), nil } -func (s *service) IndexFile(ctx context.Context, details *types.Struct) ([]*storage.FileInfo, error) { - id := domain.FullFileId{ - SpaceId: pbtypes.GetString(details, bundle.RelationKeySpaceId.String()), - FileId: domain.FileId(pbtypes.GetString(details, bundle.RelationKeyFileId.String())), - } +func (s *service) IndexFile(ctx context.Context, id domain.FullFileId, details *types.Struct) ([]*storage.FileInfo, error) { variantsList := pbtypes.GetStringList(details, bundle.RelationKeyFileVariantIds.String()) if true || len(variantsList) == 0 { // info from ipfs @@ -661,18 +659,31 @@ func (s *service) FileByHash(ctx context.Context, id domain.FullFileId) (File, e } fileRec := recs[0] - variantsList := pbtypes.GetStringList(fileRec.Details, bundle.RelationKeyFileVariantIds.String()) + return s.fileFromDetails(fileRec.Details) +} + +func (s *service) fileFromDetails(details *types.Struct) (File, error) { + variantsList := pbtypes.GetStringList(details, bundle.RelationKeyFileVariantIds.String()) if len(variantsList) == 0 { return nil, fmt.Errorf("not indexed") } - infos := getFileInfosFromDetails(fileRec.Details) + infos := getFileInfosFromDetails(details) + return &file{ + spaceID: pbtypes.GetString(details, bundle.RelationKeySpaceId.String()), + fileId: domain.FileId(pbtypes.GetString(details, bundle.RelationKeyFileId.String())), + info: infos[0], + node: s, + }, nil +} + +func (s *service) FileFromInfos(id domain.FullFileId, infos []*storage.FileInfo) File { return &file{ spaceID: id.SpaceId, fileId: id.FileId, info: infos[0], node: s, - }, nil + } } func getFileInfosFromDetails(details *types.Struct) []*storage.FileInfo { diff --git a/core/files/images.go b/core/files/images.go index 434033bca7..f045ac0768 100644 --- a/core/files/images.go +++ b/core/files/images.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" + "github.com/gogo/protobuf/types" uio "github.com/ipfs/boxo/ipld/unixfs/io" ipld "github.com/ipfs/go-ipld-format" @@ -37,20 +38,33 @@ func (s *service) ImageByHash(ctx context.Context, id domain.FullFileId) (Image, } fileRec := recs[0] - variantsList := pbtypes.GetStringList(fileRec.Details, bundle.RelationKeyFileVariantIds.String()) + return s.imageFromDetails(fileRec.Details) +} + +func (s *service) imageFromDetails(details *types.Struct) (Image, error) { + variantsList := pbtypes.GetStringList(details, bundle.RelationKeyFileVariantIds.String()) if len(variantsList) == 0 { return nil, fmt.Errorf("not indexed") } - infos := getFileInfosFromDetails(fileRec.Details) + infos := getFileInfosFromDetails(details) return &image{ - spaceID: id.SpaceId, - fileId: id.FileId, + spaceID: pbtypes.GetString(details, bundle.RelationKeySpaceId.String()), + fileId: domain.FileId(pbtypes.GetString(details, bundle.RelationKeyFileId.String())), onlyResizeVariants: selectAndSortResizeVariants(infos), service: s, }, nil } +func (s *service) ImageFromInfos(fileId domain.FullFileId, infos []*storage.FileInfo) Image { + return &image{ + spaceID: fileId.SpaceId, + fileId: fileId.FileId, + onlyResizeVariants: selectAndSortResizeVariants(infos), + service: s, + } +} + func (s *service) ImageAdd(ctx context.Context, spaceId string, options ...AddOption) (*AddResult, error) { opts := AddOptions{} for _, opt := range options { From 62b33a5cdc5ccf4ed22d829f4f983173f10d6360 Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 20:15:38 +0100 Subject: [PATCH 05/10] GO-4389: Remove file store methods --- core/files/fileobject/service.go | 3 -- core/files/fileoffloader/offloader.go | 4 -- core/files/files.go | 33 +++++++------- core/files/image.go | 62 +++++++++++---------------- core/files/images.go | 36 +++++++--------- core/files/queries.go | 20 +++------ pkg/lib/localstore/filestore/files.go | 8 ---- 7 files changed, 62 insertions(+), 104 deletions(-) diff --git a/core/files/fileobject/service.go b/core/files/fileobject/service.go index 0d832a949c..8365079c94 100644 --- a/core/files/fileobject/service.go +++ b/core/files/fileobject/service.go @@ -531,9 +531,6 @@ func (s *service) DeleteFileData(spaceId string, objectId string) error { return fmt.Errorf("list objects that use file id: %w", err) } if len(records) == 0 { - if err := s.fileStore.DeleteFile(fullId.FileId); err != nil { - return err - } if err := s.fileSync.DeleteFile(objectId, fullId); err != nil { return fmt.Errorf("failed to remove file from sync: %w", err) } diff --git a/core/files/fileoffloader/offloader.go b/core/files/fileoffloader/offloader.go index b32d595291..e0fa5b7782 100644 --- a/core/files/fileoffloader/offloader.go +++ b/core/files/fileoffloader/offloader.go @@ -176,10 +176,6 @@ func (s *service) FileSpaceOffload(ctx context.Context, spaceId string, includeN } if size > 0 { filesOffloaded++ - err = s.fileStore.DeleteFile(domain.FileId(fileId)) - if err != nil { - return 0, 0, fmt.Errorf("failed to delete file from store: %w", err) - } } totalSize += size } diff --git a/core/files/files.go b/core/files/files.go index 3472a30be6..ce9e88b886 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -146,7 +146,7 @@ func (s *service) FileAdd(ctx context.Context, spaceId string, options ...AddOpt return nil, err } if addNodeResult.isExisting { - res, err := s.newExistingFileResult(addLock, addNodeResult.fileId) + res, err := s.newExistingFileResult(addLock, addNodeResult.fileId, addNodeResult.existingVariants) if err != nil { addLock.Unlock() return nil, err @@ -182,15 +182,11 @@ func (s *service) FileAdd(ctx context.Context, spaceId string, options ...AddOpt }, nil } -func (s *service) newExistingFileResult(lock *sync.Mutex, fileId domain.FileId) (*AddResult, error) { +func (s *service) newExistingFileResult(lock *sync.Mutex, fileId domain.FileId, variants []*storage.FileInfo) (*AddResult, error) { keys, err := s.FileGetKeys(fileId) if err != nil { return nil, fmt.Errorf("get keys: %w", err) } - variants, err := s.fileStore.ListFileVariants(fileId) - if err != nil { - return nil, fmt.Errorf("list variants: %w", err) - } if len(variants) == 0 { return nil, fmt.Errorf("variants not found") } @@ -335,17 +331,20 @@ func (s *service) getContentReader(ctx context.Context, spaceID string, rawCid s } type addFileNodeResult struct { - isExisting bool - fileId domain.FileId - variant *storage.FileInfo + isExisting bool + existingVariants []*storage.FileInfo + + fileId domain.FileId + variant *storage.FileInfo // filePairNode is the root node for meta + content file nodes filePairNode ipld.Node } -func newExistingFileResult(fileId domain.FileId, variant *storage.FileInfo) (*addFileNodeResult, error) { +func newExistingFileResult(fileId domain.FileId, variants []*storage.FileInfo) (*addFileNodeResult, error) { return &addFileNodeResult{ - isExisting: true, - fileId: fileId, + isExisting: true, + existingVariants: variants, + fileId: fileId, }, nil } @@ -370,9 +369,8 @@ func (s *service) addFileNode(ctx context.Context, spaceID string, mill m.Mill, return nil, err } - // TODO USE SPACE ID? - if existingFileId, variant, err := s.getFileVariantBySourceChecksum(mill.ID(), conf.checksum, opts); err == nil { - existingRes, err := newExistingFileResult(existingFileId, variant) + if existingFileId, variants, err := s.getFileVariantBySourceChecksum(mill.ID(), conf.checksum, opts); err == nil { + existingRes, err := newExistingFileResult(existingFileId, variants) if err == nil { return existingRes, nil } @@ -390,13 +388,12 @@ func (s *service) addFileNode(ctx context.Context, spaceID string, mill m.Mill, return nil, err } - // TODO USE SPACE ID? - if existingFileId, variant, err := s.getFileVariantByChecksum(mill.ID(), variantChecksum); err == nil { + if existingFileId, variant, variants, err := s.getFileVariantByChecksum(mill.ID(), variantChecksum); err == nil { if variant.Source == conf.checksum { // we may have same variant checksum for different files // e.g. empty image exif with the same resolution // reuse the whole file only in case the checksum of the original file is the same - existingRes, err := newExistingFileResult(existingFileId, variant) + existingRes, err := newExistingFileResult(existingFileId, variants) if err == nil { return existingRes, nil } diff --git a/core/files/image.go b/core/files/image.go index 61ed1232dd..7959f5c743 100644 --- a/core/files/image.go +++ b/core/files/image.go @@ -34,9 +34,26 @@ type image struct { fileId domain.FileId spaceID string onlyResizeVariants []*storage.FileInfo + exifVariant *storage.FileInfo service *service } +func newImage(service *service, id domain.FullFileId, variants []*storage.FileInfo) Image { + var exifVariant *storage.FileInfo + for _, variant := range variants { + if variant.Mill == mill.ImageExifId { + exifVariant = variant + } + } + return &image{ + service: service, + fileId: id.FileId, + spaceID: id.SpaceId, + onlyResizeVariants: selectAndSortResizeVariants(variants), + exifVariant: exifVariant, + } +} + func selectAndSortResizeVariants(variants []*storage.FileInfo) []*storage.FileInfo { onlyResizeVariants := variants[:0] for _, variant := range variants { @@ -52,46 +69,25 @@ func selectAndSortResizeVariants(variants []*storage.FileInfo) []*storage.FileIn return onlyResizeVariants } -func (i *image) listResizeVariants() ([]*storage.FileInfo, error) { - if i.onlyResizeVariants != nil { - return i.onlyResizeVariants, nil - } - variants, err := i.service.fileStore.ListFileVariants(i.fileId) - if err != nil { - return nil, fmt.Errorf("get variants: %w", err) - } - i.onlyResizeVariants = selectAndSortResizeVariants(variants) - return i.onlyResizeVariants, nil -} - func (i *image) getLargestVariant() (*storage.FileInfo, error) { - onlyResizeVariants, err := i.listResizeVariants() - if err != nil { - return nil, fmt.Errorf("list resize variants: %w", err) - } - if len(onlyResizeVariants) == 0 { + if len(i.onlyResizeVariants) == 0 { return nil, errors.New("no resize variants") } - return onlyResizeVariants[len(onlyResizeVariants)-1], nil + return i.onlyResizeVariants[len(i.onlyResizeVariants)-1], nil } func (i *image) getVariantForWidth(wantWidth int) (*storage.FileInfo, error) { - onlyResizeVariants, err := i.listResizeVariants() - if err != nil { - return nil, fmt.Errorf("list resize variants: %w", err) - } - - if len(onlyResizeVariants) == 0 { + if len(i.onlyResizeVariants) == 0 { return nil, errors.New("no resize variants") } - for _, variant := range onlyResizeVariants { + for _, variant := range i.onlyResizeVariants { if getVariantWidth(variant) >= wantWidth { return variant, nil } } // return largest if no more suitable variant found - return onlyResizeVariants[len(onlyResizeVariants)-1], nil + return i.onlyResizeVariants[len(i.onlyResizeVariants)-1], nil } func getVariantWidth(variantInfo *storage.FileInfo) int { @@ -130,24 +126,14 @@ func (i *image) FileId() domain.FileId { } func (i *image) getExif(ctx context.Context) (*mill.ImageExifSchema, error) { - variants, err := i.service.fileStore.ListFileVariants(i.fileId) - if err != nil { - return nil, fmt.Errorf("get variants: %w", err) - } - var variant *storage.FileInfo - for _, v := range variants { - if v.Mill == mill.ImageExifId { - variant = v - } - } - if variant == nil { + if i.exifVariant == nil { return nil, fmt.Errorf("exif variant not found") } f := &file{ spaceID: i.spaceID, fileId: i.fileId, - info: variant, + info: i.exifVariant, node: i.service, } r, err := f.Reader(ctx) diff --git a/core/files/images.go b/core/files/images.go index f045ac0768..fb584f7cab 100644 --- a/core/files/images.go +++ b/core/files/images.go @@ -48,21 +48,15 @@ func (s *service) imageFromDetails(details *types.Struct) (Image, error) { } infos := getFileInfosFromDetails(details) - return &image{ - spaceID: pbtypes.GetString(details, bundle.RelationKeySpaceId.String()), - fileId: domain.FileId(pbtypes.GetString(details, bundle.RelationKeyFileId.String())), - onlyResizeVariants: selectAndSortResizeVariants(infos), - service: s, - }, nil + id := domain.FullFileId{ + SpaceId: pbtypes.GetString(details, bundle.RelationKeySpaceId.String()), + FileId: domain.FileId(pbtypes.GetString(details, bundle.RelationKeyFileId.String())), + } + return newImage(s, id, infos), nil } func (s *service) ImageFromInfos(fileId domain.FullFileId, infos []*storage.FileInfo) Image { - return &image{ - spaceID: fileId.SpaceId, - fileId: fileId.FileId, - onlyResizeVariants: selectAndSortResizeVariants(infos), - service: s, - } + return newImage(s, fileId, infos) } func (s *service) ImageAdd(ctx context.Context, spaceId string, options ...AddOption) (*AddResult, error) { @@ -83,7 +77,7 @@ func (s *service) ImageAdd(ctx context.Context, spaceId string, options ...AddOp return nil, err } if addNodesResult.isExisting { - res, err := s.newExistingFileResult(addLock, addNodesResult.fileId) + res, err := s.newExistingFileResult(addLock, addNodesResult.fileId, addNodesResult.existingVariants) if err != nil { addLock.Unlock() return nil, err @@ -130,15 +124,17 @@ func (s *service) ImageAdd(ctx context.Context, spaceId string, options ...AddOp } type addImageNodesResult struct { - isExisting bool - fileId domain.FileId - dirEntries []dirEntry + isExisting bool + fileId domain.FileId + dirEntries []dirEntry + existingVariants []*storage.FileInfo } -func newExistingImageResult(fileId domain.FileId) *addImageNodesResult { +func newExistingImageResult(fileId domain.FileId, variants []*storage.FileInfo) *addImageNodesResult { return &addImageNodesResult{ - isExisting: true, - fileId: fileId, + isExisting: true, + fileId: fileId, + existingVariants: variants, } } @@ -175,7 +171,7 @@ func (s *service) addImageNodes(ctx context.Context, spaceID string, addOpts Add return nil, err } if addNodeResult.isExisting { - return newExistingImageResult(addNodeResult.fileId), nil + return newExistingImageResult(addNodeResult.fileId, addNodeResult.existingVariants), nil } dirEntries = append(dirEntries, dirEntry{ name: link.Name, diff --git a/core/files/queries.go b/core/files/queries.go index 102a10f0d2..42576bc200 100644 --- a/core/files/queries.go +++ b/core/files/queries.go @@ -11,7 +11,7 @@ import ( "github.com/anyproto/anytype-heart/util/pbtypes" ) -func (s *service) getFileVariantBySourceChecksum(mill string, sourceChecksum string, options string) (domain.FileId, *storage.FileInfo, error) { +func (s *service) getFileVariantBySourceChecksum(mill string, sourceChecksum string, options string) (domain.FileId, []*storage.FileInfo, error) { recs, err := s.objectStore.QueryCrossSpace(database.Query{ Filters: []*model.BlockContentDataviewFilter{ { @@ -40,16 +40,10 @@ func (s *service) getFileVariantBySourceChecksum(mill string, sourceChecksum str } infos := getFileInfosFromDetails(recs[0].Details) - for _, info := range infos { - if info.Mill == mill && info.Opts == options { - return domain.FileId(pbtypes.GetString(recs[0].Details, bundle.RelationKeyFileId.String())), info, nil - } - } - // Should never happen - return "", nil, fmt.Errorf("variant with specified mill not found") + return domain.FileId(pbtypes.GetString(recs[0].Details, bundle.RelationKeyFileId.String())), infos, nil } -func (s *service) getFileVariantByChecksum(mill string, variantChecksum string) (domain.FileId, *storage.FileInfo, error) { +func (s *service) getFileVariantByChecksum(mill string, variantChecksum string) (domain.FileId, *storage.FileInfo, []*storage.FileInfo, error) { recs, err := s.objectStore.QueryCrossSpace(database.Query{ Filters: []*model.BlockContentDataviewFilter{ { @@ -66,18 +60,18 @@ func (s *service) getFileVariantByChecksum(mill string, variantChecksum string) Limit: 1, }) if err != nil { - return "", nil, err + return "", nil, nil, err } if len(recs) == 0 { - return "", nil, fmt.Errorf("variant not found") + return "", nil, nil, fmt.Errorf("variant not found") } infos := getFileInfosFromDetails(recs[0].Details) for _, info := range infos { if info.Mill == mill && info.Checksum == variantChecksum { - return domain.FileId(pbtypes.GetString(recs[0].Details, bundle.RelationKeyFileId.String())), info, nil + return domain.FileId(pbtypes.GetString(recs[0].Details, bundle.RelationKeyFileId.String())), info, infos, nil } } // Should never happen - return "", nil, fmt.Errorf("variant with specified mill not found") + return "", nil, nil, fmt.Errorf("variant with specified mill not found") } diff --git a/pkg/lib/localstore/filestore/files.go b/pkg/lib/localstore/filestore/files.go index edeb5507e3..6cdecea647 100644 --- a/pkg/lib/localstore/filestore/files.go +++ b/pkg/lib/localstore/filestore/files.go @@ -89,14 +89,6 @@ type FileStore interface { app.ComponentRunnable localstore.Indexable - DeleteFileVariants(variantIds []domain.FileContentId) error - - ListFileIds() ([]domain.FileId, error) - ListFileVariants(fileId domain.FileId) ([]*storage.FileInfo, error) - ListAllFileVariants() ([]*storage.FileInfo, error) - - DeleteFile(fileId domain.FileId) error - AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error GetFileKeys(fileId domain.FileId) (map[string]string, error) From 34be3600dcaa401e678f4506d8391ab8debbe813 Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 20:20:49 +0100 Subject: [PATCH 06/10] GO-4389: Temporarily solution --- core/anytype/account/service.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/core/anytype/account/service.go b/core/anytype/account/service.go index 66a4659e25..5c3231b2fc 100644 --- a/core/anytype/account/service.go +++ b/core/anytype/account/service.go @@ -182,10 +182,11 @@ func (s *service) getAnalyticsId(ctx context.Context, techSpace techspace.TechSp return nil }) if analyticsId == "" { - err = s.spaceService.WaitPersonalSpaceMigration(ctx) - if err != nil { - return - } + // TODO Temporarily commented + // err = s.spaceService.WaitPersonalSpaceMigration(ctx) + // if err != nil { + // return + // } } else { return analyticsId, nil } From 32f04bc1980530f73984bba1f72cc741e346482a Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 13 Nov 2024 20:35:18 +0100 Subject: [PATCH 07/10] GO-4389: Pass encryption keys directly --- core/block/editor/files.go | 2 +- core/files/fileobject/fileindex.go | 2 +- core/files/files.go | 15 ++++----------- 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/core/block/editor/files.go b/core/block/editor/files.go index e4d9359729..7bfacc9745 100644 --- a/core/block/editor/files.go +++ b/core/block/editor/files.go @@ -106,7 +106,7 @@ func (f *File) Init(ctx *smartblock.InitContext) error { infos, err := f.fileService.IndexFile(ctx.Ctx, domain.FullFileId{ FileId: domain.FileId(pbtypes.GetString(ctx.State.Details(), bundle.RelationKeyFileId.String())), SpaceId: f.SpaceID(), - }, ctx.State.Details()) + }, ctx.State.Details(), ctx.State.GetFileInfo().EncryptionKeys) if err != nil { return fmt.Errorf("get infos for indexing: %w", err) } diff --git a/core/files/fileobject/fileindex.go b/core/files/fileobject/fileindex.go index 523619e775..102b653848 100644 --- a/core/files/fileobject/fileindex.go +++ b/core/files/fileobject/fileindex.go @@ -237,7 +237,7 @@ func (ind *indexer) indexFile(ctx context.Context, id domain.FullID, fileId doma } func (ind *indexer) injectMetadataToState(ctx context.Context, st *state.State, fileId domain.FullFileId, id domain.FullID) error { - infos, err := ind.fileService.IndexFile(ctx, fileId, st.Details()) + infos, err := ind.fileService.IndexFile(ctx, fileId, st.Details(), st.GetFileInfo().EncryptionKeys) if err != nil { return fmt.Errorf("get infos for indexing: %w", err) } diff --git a/core/files/files.go b/core/files/files.go index ce9e88b886..531edbe44c 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -65,7 +65,7 @@ type Service interface { ImageByHash(ctx context.Context, id domain.FullFileId) (Image, error) ImageFromInfos(fileId domain.FullFileId, infos []*storage.FileInfo) Image - IndexFile(ctx context.Context, fileId domain.FullFileId, details *types.Struct) ([]*storage.FileInfo, error) + IndexFile(ctx context.Context, fileId domain.FullFileId, details *types.Struct, keys map[string]string) ([]*storage.FileInfo, error) app.Component } @@ -522,7 +522,7 @@ type dirEntry struct { fileNode ipld.Node } -func (s *service) fileIndexInfo(ctx context.Context, id domain.FullFileId, updateIfExists bool) ([]*storage.FileInfo, error) { +func (s *service) fileIndexInfo(ctx context.Context, id domain.FullFileId, keys map[string]string) ([]*storage.FileInfo, error) { dagService := s.dagServiceForSpace(id.SpaceId) dirLinks, err := helpers.LinksAtCid(ctx, dagService, id.FileId.String()) if err != nil { @@ -533,13 +533,6 @@ func (s *service) fileIndexInfo(ctx context.Context, id domain.FullFileId, updat return nil, fmt.Errorf("get inner dir node: %w", err) } - // File keys should be available at this moment - keys, err := s.fileStore.GetFileKeys(id.FileId) - if err != nil { - // no keys means file is not encrypted or keys are missing - log.Debugf("failed to get file keys from filestore %s: %s", id.FileId.String(), err) - } - var files []*storage.FileInfo if looksLikeFileNode(dirNode) { path := encryptionKeyPath(schema.LinkFile) @@ -611,11 +604,11 @@ func getEncryptorDecryptor(key symmetric.Key) (symmetric.EncryptorDecryptor, err return cfb.New(key, [aes.BlockSize]byte{}), nil } -func (s *service) IndexFile(ctx context.Context, id domain.FullFileId, details *types.Struct) ([]*storage.FileInfo, error) { +func (s *service) IndexFile(ctx context.Context, id domain.FullFileId, details *types.Struct, keys map[string]string) ([]*storage.FileInfo, error) { variantsList := pbtypes.GetStringList(details, bundle.RelationKeyFileVariantIds.String()) if true || len(variantsList) == 0 { // info from ipfs - fileList, err := s.fileIndexInfo(ctx, id, false) + fileList, err := s.fileIndexInfo(ctx, id, keys) if err != nil { return nil, err } From b175ddbf2b624c05371995f3fc841f01bc91055b Mon Sep 17 00:00:00 2001 From: Sergey Date: Thu, 14 Nov 2024 11:05:19 +0100 Subject: [PATCH 08/10] GO-4389: Remove useless methods from filestore --- core/files/fileobject/migration.go | 4 -- core/files/files.go | 12 ---- core/filestorage/filesync/filesync.go | 1 - core/filestorage/filesync/stats.go | 86 --------------------------- pkg/lib/localstore/filestore/files.go | 10 ---- 5 files changed, 113 deletions(-) diff --git a/core/files/fileobject/migration.go b/core/files/fileobject/migration.go index 2af42dc1be..743232df01 100644 --- a/core/files/fileobject/migration.go +++ b/core/files/fileobject/migration.go @@ -108,10 +108,6 @@ func (s *service) migrateFile(space clientspace.Space, origin objectorigin.Objec if !fileId.Valid() { return nil } - storedOrigin, err := s.fileStore.GetFileOrigin(fileId) - if err == nil { - origin = storedOrigin - } // Add fileId as uniqueKey to avoid migration of the same file uniqueKey, err := domain.NewUniqueKey(coresb.SmartBlockTypeFileObject, fileId.String()) diff --git a/core/files/files.go b/core/files/files.go index 531edbe44c..5a6c120dc4 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -612,18 +612,6 @@ func (s *service) IndexFile(ctx context.Context, id domain.FullFileId, details * if err != nil { return nil, err } - ok, err := s.fileStore.IsFileImported(id.FileId) - if err != nil { - return nil, fmt.Errorf("check if file is imported: %w", err) - } - if ok { - log.With("fileId", id.FileId.String()).Warn("file is imported, push it to uploading queue") - // If file is imported we have to sync it, so we don't set sync status to synced - err = s.fileStore.SetIsFileImported(id.FileId, false) - if err != nil { - return nil, fmt.Errorf("set is file imported: %w", err) - } - } return fileList, nil } return nil, nil diff --git a/core/filestorage/filesync/filesync.go b/core/filestorage/filesync/filesync.go index 1852184317..72c82f0469 100644 --- a/core/filestorage/filesync/filesync.go +++ b/core/filestorage/filesync/filesync.go @@ -50,7 +50,6 @@ type FileSync interface { UpdateNodeUsage(ctx context.Context) error NodeUsage(ctx context.Context) (usage NodeUsage, err error) SpaceStat(ctx context.Context, spaceId string) (ss SpaceStat, err error) - FileListStats(ctx context.Context, spaceId string, hashes []domain.FileId) ([]FileStat, error) DebugQueue(*http.Request) (*QueueInfo, error) SendImportEvents() ClearImportEvents() diff --git a/core/filestorage/filesync/stats.go b/core/filestorage/filesync/stats.go index 49ff75455f..f2594ffd2f 100644 --- a/core/filestorage/filesync/stats.go +++ b/core/filestorage/filesync/stats.go @@ -7,16 +7,10 @@ import ( "net/http" "time" - "github.com/anyproto/any-sync/commonfile/fileproto" "github.com/dgraph-io/badger/v4" - "github.com/ipfs/go-cid" - ipld "github.com/ipfs/go-ipld-format" - "github.com/samber/lo" "go.uber.org/zap" - "github.com/anyproto/anytype-heart/core/domain" "github.com/anyproto/anytype-heart/pb" - "github.com/anyproto/anytype-heart/util/conc" ) type NodeUsage struct { @@ -261,86 +255,6 @@ func makeLimitUpdatedEvent(limit uint64) *pb.Event { } } -func (s *fileSync) FileListStats(ctx context.Context, spaceID string, hashes []domain.FileId) ([]FileStat, error) { - filesInfo, err := s.fetchFilesInfo(ctx, spaceID, hashes) - if err != nil { - return nil, err - } - return conc.MapErr(filesInfo, func(fileInfo *fileproto.FileInfo) (FileStat, error) { - return s.fileInfoToStat(ctx, spaceID, fileInfo) - }) -} - -func (s *fileSync) fetchFilesInfo(ctx context.Context, spaceId string, hashes []domain.FileId) ([]*fileproto.FileInfo, error) { - requests := lo.Chunk(hashes, 50) - responses, err := conc.MapErr(requests, func(chunk []domain.FileId) ([]*fileproto.FileInfo, error) { - return s.rpcStore.FilesInfo(ctx, spaceId, chunk...) - }) - if err != nil { - return nil, err - } - return lo.Flatten(responses), nil -} - -func (s *fileSync) fileInfoToStat(ctx context.Context, spaceId string, file *fileproto.FileInfo) (FileStat, error) { - totalChunks, err := s.countChunks(ctx, spaceId, domain.FileId(file.FileId)) - if err != nil { - return FileStat{}, fmt.Errorf("count chunks: %w", err) - } - - return FileStat{ - SpaceId: spaceId, - FileId: file.FileId, - TotalChunksCount: totalChunks, - UploadedChunksCount: int(file.CidsCount), - BytesUsage: int(file.UsageBytes), - }, nil -} - -func (s *fileSync) countChunks(ctx context.Context, spaceID string, fileId domain.FileId) (int, error) { - chunksCount, err := s.fileStore.GetChunksCount(fileId) - if err == nil { - return chunksCount, nil - } - - chunksCount, err = s.fetchChunksCount(ctx, spaceID, fileId) - if err != nil { - return -1, fmt.Errorf("count chunks in IPFS: %w", err) - } - - err = s.fileStore.SetChunksCount(fileId, chunksCount) - - return chunksCount, err -} - -func (s *fileSync) fetchChunksCount(ctx context.Context, spaceID string, fileId domain.FileId) (int, error) { - fileCid, err := cid.Parse(fileId.String()) - if err != nil { - return -1, err - } - dagService := s.dagServiceForSpace(spaceID) - node, err := dagService.Get(ctx, fileCid) - if err != nil { - return -1, err - } - - var count int - visited := map[string]struct{}{} - walker := ipld.NewWalker(ctx, ipld.NewNavigableIPLDNode(node, dagService)) - err = walker.Iterate(func(node ipld.NavigableNode) error { - id := node.GetIPLDNode().Cid().String() - if _, ok := visited[id]; !ok { - visited[id] = struct{}{} - count++ - } - return nil - }) - if err == ipld.EndOfDag { - err = nil - } - return count, err -} - func (s *fileSync) DebugQueue(_ *http.Request) (*QueueInfo, error) { var info QueueInfo info.UploadingQueue = s.uploadingQueue.ListKeys() diff --git a/pkg/lib/localstore/filestore/files.go b/pkg/lib/localstore/filestore/files.go index 6cdecea647..7fe243bc61 100644 --- a/pkg/lib/localstore/filestore/files.go +++ b/pkg/lib/localstore/filestore/files.go @@ -87,19 +87,9 @@ const CName = "filestore" type FileStore interface { app.ComponentRunnable - localstore.Indexable AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error GetFileKeys(fileId domain.FileId) (map[string]string, error) - - GetChunksCount(fileId domain.FileId) (int, error) - SetChunksCount(fileId domain.FileId, chunksCount int) error - IsFileImported(fileId domain.FileId) (bool, error) - SetIsFileImported(fileId domain.FileId, isImported bool) error - SetFileSize(fileId domain.FileId, size int) error - GetFileSize(fileId domain.FileId) (int, error) - GetFileOrigin(fileId domain.FileId) (objectorigin.ObjectOrigin, error) - SetFileOrigin(fileId domain.FileId, origin objectorigin.ObjectOrigin) error } func New() FileStore { From 8a7b42b56286cc7148f5fad7d6aba5a3f6259b18 Mon Sep 17 00:00:00 2001 From: Sergey Date: Thu, 14 Nov 2024 11:20:24 +0100 Subject: [PATCH 09/10] GO-4389: Move file keys methods to objectstore --- core/block/editor/smartblock/smartblock.go | 4 +- core/block/import/common/objectid/oldfile.go | 6 +-- core/block/import/common/objectid/provider.go | 2 +- core/block/import/importer.go | 11 ++-- core/files/fileacl/service.go | 8 +-- core/files/fileobject/service.go | 2 +- core/files/files.go | 4 +- core/files/images.go | 2 +- pkg/lib/localstore/filestore/files.go | 3 -- pkg/lib/localstore/objectstore/filekeys.go | 50 +++++++++++++++++++ pkg/lib/localstore/objectstore/service.go | 4 ++ .../objectstore/spaceindex/invalid.go | 8 +++ 12 files changed, 82 insertions(+), 22 deletions(-) create mode 100644 pkg/lib/localstore/objectstore/filekeys.go diff --git a/core/block/editor/smartblock/smartblock.go b/core/block/editor/smartblock/smartblock.go index 77fe2414b8..0334564c5a 100644 --- a/core/block/editor/smartblock/smartblock.go +++ b/core/block/editor/smartblock/smartblock.go @@ -1272,7 +1272,7 @@ func (sb *smartBlock) storeFileKeys(doc state.Doc) { EncryptionKeys: k.Keys, } } - if err := sb.fileStore.AddFileKeys(fileKeys...); err != nil { + if err := sb.objectStore.AddFileKeys(fileKeys...); err != nil { log.Warnf("can't store file keys: %v", err) } } @@ -1468,7 +1468,7 @@ func (sb *smartBlock) injectDerivedDetails(s *state.State, spaceID string, sbt s } if info := s.GetFileInfo(); info.FileId != "" { - err := sb.fileStore.AddFileKeys(domain.FileEncryptionKeys{ + err := sb.objectStore.AddFileKeys(domain.FileEncryptionKeys{ FileId: info.FileId, EncryptionKeys: info.EncryptionKeys, }) diff --git a/core/block/import/common/objectid/oldfile.go b/core/block/import/common/objectid/oldfile.go index 16b44afd79..8da86938a9 100644 --- a/core/block/import/common/objectid/oldfile.go +++ b/core/block/import/common/objectid/oldfile.go @@ -17,15 +17,15 @@ import ( "github.com/anyproto/anytype-heart/core/files/fileobject" "github.com/anyproto/anytype-heart/pb" "github.com/anyproto/anytype-heart/pkg/lib/bundle" - "github.com/anyproto/anytype-heart/pkg/lib/localstore/filestore" + "github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore" "github.com/anyproto/anytype-heart/util/pbtypes" ) // oldFile represents file in pre Files-as-Objects format type oldFile struct { blockService *block.Service - fileStore filestore.FileStore fileObjectService fileobject.Service + objectStore objectstore.ObjectStore } func (f *oldFile) GetIDAndPayload(ctx context.Context, spaceId string, sn *common.Snapshot, _ time.Time, _ bool, origin objectorigin.ObjectOrigin) (string, treestorage.TreeStorageCreatePayload, error) { @@ -50,7 +50,7 @@ func (f *oldFile) GetIDAndPayload(ctx context.Context, spaceId string, sn *commo } } - err := f.fileStore.AddFileKeys(domain.FileEncryptionKeys{ + err := f.objectStore.AddFileKeys(domain.FileEncryptionKeys{ FileId: domain.FileId(fileId), EncryptionKeys: filesKeys, }) diff --git a/core/block/import/common/objectid/provider.go b/core/block/import/common/objectid/provider.go index 3c16b0fcc9..9d6fd1c7eb 100644 --- a/core/block/import/common/objectid/provider.go +++ b/core/block/import/common/objectid/provider.go @@ -56,7 +56,7 @@ func NewIDProvider( } oldFile := &oldFile{ blockService: blockService, - fileStore: fileStore, + objectStore: objectStore, fileObjectService: fileObjectService, } p.idProviderBySmartBlockType[sb.SmartBlockTypeWorkspace] = newWorkspace(spaceService) diff --git a/core/block/import/importer.go b/core/block/import/importer.go index 12241df328..615f362462 100644 --- a/core/block/import/importer.go +++ b/core/block/import/importer.go @@ -69,6 +69,7 @@ type Import struct { fileSync filesync.FileSync notificationService notifications.Notifications eventSender event.Sender + objectStore objectstore.ObjectStore importCtx context.Context importCtxCancel context.CancelFunc @@ -98,15 +99,15 @@ func (i *Import) Init(a *app.App) (err error) { for _, c := range converters { i.converters[c.Name()] = c } - store := app.MustComponent[objectstore.ObjectStore](a) + i.objectStore = app.MustComponent[objectstore.ObjectStore](a) i.fileStore = app.MustComponent[filestore.FileStore](a) fileObjectService := app.MustComponent[fileobject.Service](a) - i.idProvider = objectid.NewIDProvider(store, spaceService, i.s, i.fileStore, fileObjectService) + i.idProvider = objectid.NewIDProvider(i.objectStore, spaceService, i.s, i.fileStore, fileObjectService) factory := syncer.New(syncer.NewFileSyncer(i.s, fileObjectService), syncer.NewBookmarkSyncer(i.s), syncer.NewIconSyncer(i.s, fileObjectService)) relationSyncer := syncer.NewFileRelationSyncer(i.s, fileObjectService) objectCreator := app.MustComponent[objectcreator.Service](a) detailsService := app.MustComponent[detailservice.Service](a) - i.oc = creator.New(detailsService, factory, store, relationSyncer, spaceService, objectCreator, i.s) + i.oc = creator.New(detailsService, factory, i.objectStore, relationSyncer, spaceService, objectCreator, i.s) i.fileSync = app.MustComponent[filesync.FileSync](a) i.notificationService = app.MustComponent[notifications.Notifications](a) i.eventSender = app.MustComponent[event.Sender](a) @@ -437,7 +438,7 @@ func (i *Import) getObjectID( // Preload file keys for _, fileKeys := range snapshot.Snapshot.GetFileKeys() { - err := i.fileStore.AddFileKeys(domain.FileEncryptionKeys{ + err := i.objectStore.AddFileKeys(domain.FileEncryptionKeys{ FileId: domain.FileId(fileKeys.Hash), EncryptionKeys: fileKeys.Keys, }) @@ -450,7 +451,7 @@ func (i *Import) getObjectID( for _, key := range fileInfo.EncryptionKeys { keys[key.Path] = key.Key } - err := i.fileStore.AddFileKeys(domain.FileEncryptionKeys{ + err := i.objectStore.AddFileKeys(domain.FileEncryptionKeys{ FileId: domain.FileId(fileInfo.FileId), EncryptionKeys: keys, }) diff --git a/core/files/fileacl/service.go b/core/files/fileacl/service.go index 053fe8a356..7dfd6f9896 100644 --- a/core/files/fileacl/service.go +++ b/core/files/fileacl/service.go @@ -9,7 +9,7 @@ import ( "github.com/anyproto/anytype-heart/core/domain" "github.com/anyproto/anytype-heart/core/files" "github.com/anyproto/anytype-heart/core/files/fileobject" - "github.com/anyproto/anytype-heart/pkg/lib/localstore/filestore" + "github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore" "github.com/anyproto/anytype-heart/pkg/lib/pb/model" ) @@ -25,7 +25,7 @@ type Service interface { type service struct { fileService files.Service fileObjectService fileobject.Service - fileStore filestore.FileStore + objectStore objectstore.ObjectStore } func New() Service { @@ -35,7 +35,7 @@ func New() Service { func (s *service) Init(a *app.App) error { s.fileService = app.MustComponent[files.Service](a) s.fileObjectService = app.MustComponent[fileobject.Service](a) - s.fileStore = app.MustComponent[filestore.FileStore](a) + s.objectStore = app.MustComponent[objectstore.ObjectStore](a) return nil } @@ -76,7 +76,7 @@ func (s *service) StoreFileKeys(fileId domain.FileId, fileKeys []*model.FileEncr for _, key := range fileKeys { keys.EncryptionKeys[key.Path] = key.Key } - err := s.fileStore.AddFileKeys(keys) + err := s.objectStore.AddFileKeys(keys) if err != nil { return fmt.Errorf("store file encryption keys: %w", err) } diff --git a/core/files/fileobject/service.go b/core/files/fileobject/service.go index d52ca6a30e..6ea6a96410 100644 --- a/core/files/fileobject/service.go +++ b/core/files/fileobject/service.go @@ -390,7 +390,7 @@ func (s *service) CreateFromImport(fileId domain.FullFileId, origin objectorigin if err == nil { return fileObjectId, nil } - keys, err := s.fileStore.GetFileKeys(fileId.FileId) + keys, err := s.objectStore.GetFileKeys(fileId.FileId) if err != nil { return "", fmt.Errorf("get file keys: %w", err) } diff --git a/core/files/files.go b/core/files/files.go index 5a6c120dc4..2dc9b735fe 100644 --- a/core/files/files.go +++ b/core/files/files.go @@ -167,7 +167,7 @@ func (s *service) FileAdd(ctx context.Context, spaceId string, options ...AddOpt FileId: fileId, EncryptionKeys: keys.KeysByPath, } - err = s.fileStore.AddFileKeys(fileKeys) + err = s.objectStore.AddFileKeys(fileKeys) if err != nil { addLock.Unlock() return nil, fmt.Errorf("failed to save file keys: %w", err) @@ -244,7 +244,7 @@ func (s *service) addFileRootNode(ctx context.Context, spaceID string, fileInfo } func (s *service) FileGetKeys(id domain.FileId) (*domain.FileEncryptionKeys, error) { - keys, err := s.fileStore.GetFileKeys(id) + keys, err := s.objectStore.GetFileKeys(id) if err != nil { return nil, err } diff --git a/core/files/images.go b/core/files/images.go index fb584f7cab..0eceedafe8 100644 --- a/core/files/images.go +++ b/core/files/images.go @@ -99,7 +99,7 @@ func (s *service) ImageAdd(ctx context.Context, spaceId string, options ...AddOp FileId: fileId, EncryptionKeys: keys.KeysByPath, } - err = s.fileStore.AddFileKeys(fileKeys) + err = s.objectStore.AddFileKeys(fileKeys) if err != nil { addLock.Unlock() return nil, fmt.Errorf("failed to save file keys: %w", err) diff --git a/pkg/lib/localstore/filestore/files.go b/pkg/lib/localstore/filestore/files.go index 7fe243bc61..5d5d7b1eef 100644 --- a/pkg/lib/localstore/filestore/files.go +++ b/pkg/lib/localstore/filestore/files.go @@ -87,9 +87,6 @@ const CName = "filestore" type FileStore interface { app.ComponentRunnable - - AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error - GetFileKeys(fileId domain.FileId) (map[string]string, error) } func New() FileStore { diff --git a/pkg/lib/localstore/objectstore/filekeys.go b/pkg/lib/localstore/objectstore/filekeys.go new file mode 100644 index 0000000000..65f752c9c3 --- /dev/null +++ b/pkg/lib/localstore/objectstore/filekeys.go @@ -0,0 +1,50 @@ +package objectstore + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/anyproto/anytype-heart/core/domain" +) + +func fileKeysKey(fileId domain.FileId) string { + return fmt.Sprintf("fileKeys/%s", fileId) +} + +func (s *dsObjectStore) AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error { + arena := s.arenaPool.Get() + defer func() { + arena.Reset() + s.arenaPool.Put(arena) + }() + + txn, err := s.system.WriteTx(s.componentCtx) + if err != nil { + return fmt.Errorf("start transaction: %w", err) + } + defer txn.Commit() + + for _, fk := range fileKeys { + it, err := keyValueItem(arena, fileKeysKey(fk.FileId), fk.EncryptionKeys) + if err != nil { + return errors.Join(txn.Rollback(), fmt.Errorf("create item: %w", err)) + } + err = s.system.UpsertOne(s.componentCtx, it) + if err != nil { + return errors.Join(txn.Rollback(), fmt.Errorf("upsert: %w", err)) + } + } + return err +} + +func (s *dsObjectStore) GetFileKeys(fileId domain.FileId) (map[string]string, error) { + doc, err := s.system.FindId(s.componentCtx, fileKeysKey(fileId)) + if err != nil { + return nil, fmt.Errorf("find file keys: %w", err) + } + val := doc.Value().GetStringBytes("value") + keys := map[string]string{} + err = json.Unmarshal(val, keys) + return keys, err +} diff --git a/pkg/lib/localstore/objectstore/service.go b/pkg/lib/localstore/objectstore/service.go index 6e943045f4..a552fc3250 100644 --- a/pkg/lib/localstore/objectstore/service.go +++ b/pkg/lib/localstore/objectstore/service.go @@ -13,6 +13,7 @@ import ( "github.com/anyproto/any-sync/app" "github.com/anyproto/any-sync/coordinator/coordinatorproto" + "github.com/anyproto/anytype-heart/core/domain" "github.com/anyproto/anytype-heart/core/wallet" "github.com/anyproto/anytype-heart/pkg/lib/database" "github.com/anyproto/anytype-heart/pkg/lib/localstore/ftsearch" @@ -53,6 +54,9 @@ type ObjectStore interface { SpaceNameGetter CrossSpace + + AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error + GetFileKeys(fileId domain.FileId) (map[string]string, error) } type IndexerStore interface { diff --git a/pkg/lib/localstore/objectstore/spaceindex/invalid.go b/pkg/lib/localstore/objectstore/spaceindex/invalid.go index 7d892aad92..c73ca67c0a 100644 --- a/pkg/lib/localstore/objectstore/spaceindex/invalid.go +++ b/pkg/lib/localstore/objectstore/spaceindex/invalid.go @@ -190,3 +190,11 @@ func (s *invalidStore) WriteTx(ctx context.Context) (anystore.WriteTx, error) { func (s *invalidStore) SubscribeForAll(callback func(rec database.Record)) { } + +func (s *invalidStore) AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error { + return s.err +} + +func (s *invalidStore) GetFileKeys(fileId domain.FileId) (map[string]string, error) { + return nil, s.err +} From b47c3d50b30eb17f3b3e284cf4a2f2c2d1879e95 Mon Sep 17 00:00:00 2001 From: Sergey Date: Thu, 14 Nov 2024 11:44:40 +0100 Subject: [PATCH 10/10] GO-4389: Fix file keys methods --- core/anytype/account/service.go | 9 ++++----- pkg/lib/localstore/objectstore/filekeys.go | 4 ++-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/core/anytype/account/service.go b/core/anytype/account/service.go index 5c3231b2fc..66a4659e25 100644 --- a/core/anytype/account/service.go +++ b/core/anytype/account/service.go @@ -182,11 +182,10 @@ func (s *service) getAnalyticsId(ctx context.Context, techSpace techspace.TechSp return nil }) if analyticsId == "" { - // TODO Temporarily commented - // err = s.spaceService.WaitPersonalSpaceMigration(ctx) - // if err != nil { - // return - // } + err = s.spaceService.WaitPersonalSpaceMigration(ctx) + if err != nil { + return + } } else { return analyticsId, nil } diff --git a/pkg/lib/localstore/objectstore/filekeys.go b/pkg/lib/localstore/objectstore/filekeys.go index 65f752c9c3..07b3700deb 100644 --- a/pkg/lib/localstore/objectstore/filekeys.go +++ b/pkg/lib/localstore/objectstore/filekeys.go @@ -30,7 +30,7 @@ func (s *dsObjectStore) AddFileKeys(fileKeys ...domain.FileEncryptionKeys) error if err != nil { return errors.Join(txn.Rollback(), fmt.Errorf("create item: %w", err)) } - err = s.system.UpsertOne(s.componentCtx, it) + err = s.system.UpsertOne(txn.Context(), it) if err != nil { return errors.Join(txn.Rollback(), fmt.Errorf("upsert: %w", err)) } @@ -45,6 +45,6 @@ func (s *dsObjectStore) GetFileKeys(fileId domain.FileId) (map[string]string, er } val := doc.Value().GetStringBytes("value") keys := map[string]string{} - err = json.Unmarshal(val, keys) + err = json.Unmarshal(val, &keys) return keys, err }