Skip to content

Commit

Permalink
bug: Fixes loading large number data files in Metadata Editor
Browse files Browse the repository at this point in the history
Enables Batch fetch of member models and updates save button control toggling

- Adds `fetchMemberModels` method to `DataPackage` to fetch member models in batches.
- Updates `fetch` method in `DataPackage` to use `fetchMemberModels`.
- Adds listener for `numLoadingFileMetadata` change in `EML211EditorView`.
- Updates `toggleEnableControls` in `EML211EditorView` to handle `numLoadingFileMetadata`.
- Adds `fetchBatchSize` configuration to `AppModel` to control batch size for fetching member models.

Closes NCEAS#2547
  • Loading branch information
vchendrix committed Dec 13, 2024
1 parent 1cff0d9 commit c487822
Show file tree
Hide file tree
Showing 3 changed files with 99 additions and 54 deletions.
119 changes: 69 additions & 50 deletions src/js/collections/DataPackage.js
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,72 @@ define([
}
},


/**
* Fetches member models in batches to avoid fetching all members simultaneously.
*
* @param {Array} models - The array of member models to fetch.
* @param {number} [index=0] - The current index of the model being fetched.
* @param {number} [batchSize=10] - The number of models to fetch in each batch.
*/
fetchMemberModels(models, index = 0, batchSize = 0) {
// Update the number of file metadata items being loaded
this.packageModel.set("numLoadingFileMetadata", models.length - index);
// If the index is greater than or equal to the length of the models array, stop fetching
if (index >= models.length) {
return;
}
if (batchSize == 0) batchSize = models.length;

const collection = this;
// Slice the models array to get the current batch
const batch = models.slice(index, index + batchSize);

// Create an array of promises for fetching each model in the batch
const fetchPromises = batch.map((memberModel) => {
return new Promise((resolve) => {
// Fetch the member model
memberModel.fetch();
// Once the model is synced, handle the response
memberModel.once("sync", (oldModel) => {
const newModel = collection.getMember(oldModel);

// If the type of the old model is different from the new model
if (oldModel.type != newModel.type) {
if (newModel.type == "DataPackage") {
// If the new model is a DataPackage, replace the old model with the new one
oldModel.trigger("replace", newModel);
resolve();
} else {
// Otherwise, fetch the new model and replace the old model with the new one
newModel.set("synced", false);
newModel.fetch();
newModel.once("sync", (fetchedModel) => {
fetchedModel.set("synced", true);
collection.remove(oldModel);
collection.add(fetchedModel);
oldModel.trigger("replace", newModel);
if (newModel.type == "EML") collection.trigger("add:EML");
resolve();
});
}
} else {
// If the type of the old model is the same as the new model, merge the new model into the collection
newModel.set("synced", true);
collection.add(newModel, { merge: true });
if (newModel.type == "EML") collection.trigger("add:EML");
resolve();
}
});
});
});

// Once all fetch promises are resolved, fetch the next batch
Promise.all(fetchPromises).then(() => {
this.fetchMemberModels.call(collection, models, index + batchSize, batchSize);
});
},

/**
* Overload fetch calls for a DataPackage
* @param {object} [options] - Optional options for this fetch that get sent with the XHR request
Expand Down Expand Up @@ -688,56 +754,8 @@ define([

// Don't fetch each member model if the fetchModels property on this Collection is set to false
if (this.fetchModels !== false) {
// Add the models to the collection now, silently
// this.add(models, {silent: true});

// Retrieve the model for each member
_.each(
models,
function (memberModel) {
const collection = this;

memberModel.fetch();
memberModel.once("sync", (oldModel) => {
// Get the right model type based on the model values
const newModel = collection.getMember(oldModel);

// If the model type has changed, then mark the model as unsynced, since there may be custom fetch() options for the new model
if (oldModel.type != newModel.type) {
// DataPackages shouldn't be fetched until we support nested packages better in the UI
if (newModel.type == "DataPackage") {
// Trigger a replace event so other parts of the app know when a model has been replaced with a different type
oldModel.trigger("replace", newModel);
} else {
newModel.set("synced", false);

newModel.fetch();
newModel.once("sync", (fetchedModel) => {
fetchedModel.set("synced", true);

// Remove the model from the collection and add it back
collection.remove(oldModel);
collection.add(fetchedModel);

// Trigger a replace event so other parts of the app know when a model has been replaced with a different type
oldModel.trigger("replace", newModel);

if (newModel.type == "EML")
collection.trigger("add:EML");
});
}
} else {
newModel.set("synced", true);
collection.add(newModel, {
merge: true,
});

if (newModel.type == "EML") collection.trigger("add:EML");
}
});
},
this,
);
// Start fetching member models
this.fetchMemberModels.call(this, models, 0, MetacatUI.appModel.get("batchModeValue"));
}
} catch (error) {
console.log(error);
Expand Down Expand Up @@ -3661,6 +3679,7 @@ define([
this.packageModel.updateSysMeta();
},


/**
* Tracks the upload status of DataONEObject models in this collection. If they are
* `loading` into the DOM or `in progress` of an upload to the server, they will be considered as "loading" files.
Expand Down
16 changes: 16 additions & 0 deletions src/js/models/AppModel.js
Original file line number Diff line number Diff line change
Expand Up @@ -2413,6 +2413,22 @@ define(["jquery", "underscore", "backbone"], function ($, _, Backbone) {
* @example application%2Fbagit-097
*/
packageFormat: "application%2Fbagit-1.0",
/**
* Whether to batch requests to the DataONE API. This is an experimental feature
* and should be used with caution. If set to a number greater than 0, MetacatUI will
* batch requests to the DataONE API and send them in groups of this size. This can
* improve performance when making many requests to the DataONE API, but can also
* cause issues if the requests are too large or if the DataONE API is not able to
* handle the batched requests.
*
* Currently, this feature is only used in the DataPackageModel when fetching the
* list of DataONE member models.
*
* @type {number}
* @default 0
* @example 20
*/
batchModeValue: 0,
},
MetacatUI.AppConfig,
),
Expand Down
18 changes: 14 additions & 4 deletions src/js/views/metadata/EML211EditorView.js
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,12 @@ define([
"change:numLoadingFiles",
this.toggleEnableControls,
);

this.listenTo(
MetacatUI.rootDataPackage.packageModel,
"change:numLoadingFileMetadata",
this.toggleEnableControls,
);
},

renderChildren: function (model, options) {},
Expand Down Expand Up @@ -1215,15 +1221,19 @@ define([
toggleEnableControls: function () {
if (MetacatUI.rootDataPackage.packageModel.get("isLoadingFiles")) {
let noun =
MetacatUI.rootDataPackage.packageModel.get("numLoadingFiles") > 1
? " files"
: " file";
MetacatUI.rootDataPackage.packageModel.get("numLoadingFiles") > 1
? " files"
: " file";
this.disableControls(
"Waiting for " +
"Waiting for " +
MetacatUI.rootDataPackage.packageModel.get("numLoadingFiles") +
noun +
" to upload...",
);
} else if (MetacatUI.rootDataPackage.packageModel.get("numLoadingFileMetadata") >0) {
this.disableControls("Waiting for " +
MetacatUI.rootDataPackage.packageModel.get("numLoadingFileMetadata") +
" file metadata to load...");
} else {
this.enableControls();
}
Expand Down

0 comments on commit c487822

Please sign in to comment.