Skip to content

Commit

Permalink
test: Adds unit tests for DataItemView,DataPackage
Browse files Browse the repository at this point in the history
- new test spec for DataItemView.  Specifically tests
  uploadFilesInBatch and addFiles in relation to NCEAS#2224
- Adds to test spec for DataPackage. Specifically tests
  fetchMemberModels functionality in relation to NCEAS#2547
- Ensures comments provide context and purpose for each action in the
  tests
  • Loading branch information
vchendrix committed Dec 20, 2024
1 parent 361fa73 commit 4fd253e
Show file tree
Hide file tree
Showing 5 changed files with 235 additions and 16 deletions.
2 changes: 1 addition & 1 deletion src/js/collections/DataPackage.js
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ define([

// If batchSize is 0, set it to the total number of models
let batchSizeAdjust = batchSize;
if (batchSizeAdjust === 0) batchSizeAdjust = models.length;
if (batchSizeAdjust === 0 && index === 0) batchSizeAdjust = models.length;

const collection = this;
// Slice the models array to get the current batch
Expand Down
20 changes: 10 additions & 10 deletions src/js/views/DataItemView.js
Original file line number Diff line number Diff line change
Expand Up @@ -792,7 +792,7 @@ define([
let activeUploads = 0; // Counter for the number of active uploads

// If batchSize is 0, set it to the total number of files
if (batchSize == 0) batchSize = fileList.length;
if (batchSize === 0) batchSize = fileList.length;

/**
* Function to upload the next file in the list.
Expand Down Expand Up @@ -1128,7 +1128,7 @@ define([
this.render();
}


},

/**
Expand Down Expand Up @@ -1236,7 +1236,7 @@ define([
// Is this a Data or Metadata model?
if (eventModel.get && eventModel.get("type") === "Metadata") {
return eventModel;
}
}
// It's data, get the parent scimeta
parentMetadata = MetacatUI.rootDataPackage.where({
id: Array.isArray(eventModel.get("isDocumentedBy"))
Expand All @@ -1247,15 +1247,15 @@ define([
if (parentMetadata.length > 0) {
parentSciMeta = parentMetadata[0];
return parentSciMeta;
}
}
// If there is only one metadata model in the root data package, then use that metadata model
const metadataModels = MetacatUI.rootDataPackage.where({
type: "Metadata",
});

if (metadataModels.length == 1) return metadataModels[0];


}
},

Expand Down Expand Up @@ -1296,11 +1296,11 @@ define([
return MetacatUI.rootDataPackage;

// A nested package
}
}
return MetacatUI.rootDataPackage.where({
id: parentResourceMapId,
})[0];

}
},

Expand Down Expand Up @@ -1536,12 +1536,12 @@ define([

// Only return true if we can share both
return canShareMetadata && canShareResourceMap;
}
}
return (
this.model.get("accessPolicy") &&
this.model.get("accessPolicy").isAuthorized("changePermission")
);

}
}
},
Expand Down
3 changes: 2 additions & 1 deletion test/config/tests.json
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@
"./js/specs/unit/collections/BioontologyResults.spec.js",
"./js/specs/unit/models/ontologies/BioontologyOntology.spec.js",
"./js/specs/unit/models/accordion/Accordion.spec.js",
"./js/specs/unit/models/accordion/AccordionItem.spec.js"
"./js/specs/unit/models/accordion/AccordionItem.spec.js",
"./js/specs/unit/views/DataItemView.spec.js"
],
"integration": [
"./js/specs/integration/collections/SolrResults.spec.js",
Expand Down
83 changes: 79 additions & 4 deletions test/js/specs/unit/collections/DataPackage.spec.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
define(["../../../../../../../../src/js/collections/DataPackage"], function (
DataPackage,
) {
define([
"../../../../../../../../src/js/collections/DataPackage",
"../../../../../../../../src/js/models/DataONEObject",
], function (DataPackage, DataONEObject) {
var expect = chai.expect;

describe("DataPackage Test Suite", function () {
Expand Down Expand Up @@ -39,5 +40,79 @@ define(["../../../../../../../../src/js/collections/DataPackage"], function (
expect(result).to.equal("folder1/folder2/file.txt");
});
});

describe("fetchMemberModels", function () {
this.timeout(30000); // Increase timeout to 30 seconds

it("should fetch member models successfully", function (done) {
const models = [new DataONEObject(), new DataONEObject()];
const originalFetch = DataONEObject.prototype.fetch;
let fetchCallCount = 0;

DataONEObject.prototype.fetch = function (options) {
fetchCallCount++;
options.success();
};

dataPackage.fetchMemberModels.call(dataPackage, models, 0, 2, 5000, 3);

setTimeout(function () {
expect(fetchCallCount).to.equal(2);
DataONEObject.prototype.fetch = originalFetch;
done();
}, 100);
});

it("should retry fetching member models on failure", function (done) {
const models = [new DataONEObject(), new DataONEObject()];
const originalFetch = DataONEObject.prototype.fetch;
let fetchCallCount = 0;
let maxRetries = 3;

DataONEObject.prototype.fetch = function (options) {
fetchCallCount++;
options.error({ statusText: "Internal Server Error" });
};

dataPackage.fetchMemberModels(models, 0, 2, 5000, maxRetries);

setTimeout(function () {
console.log("[should retry fetching member models on failure] "+ fetchCallCount + " fetch calls");
expect(fetchCallCount).to.equal(models.length * (maxRetries + 1)); // 2 models * 3 retries
DataONEObject.prototype.fetch = originalFetch;
done();
}, 100);
});

it("should trigger complete event after fetching all models", function (done) {
const models = [new DataONEObject({identifier: "1"}), new DataONEObject({identifier: "2"})];
const originalFetch = DataONEObject.prototype.fetch;
let fetchCallCount = 0;
let completeEventTriggered = false;
let maxRetries = 3;

DataONEObject.prototype.fetch = function (options) {
console.log("[should trigger complete event after fetching all models] fetching model: " + this.get("identifier"));
fetchCallCount++;
options.success();
};

dataPackage.triggerComplete = function () {
completeEventTriggered = true;
console.log("[should trigger complete event after fetching all models] complete event triggered");
};

dataPackage.fetchMemberModels(models, 0, 2, 100, maxRetries);

setTimeout(function () {
console.log("[should trigger complete event after fetching all models] "+ fetchCallCount + " fetch calls");
console.log("[should trigger complete event after fetching all models] "+ completeEventTriggered);
expect(fetchCallCount).to.equal(models.length * (maxRetries + 1));
expect(completeEventTriggered).to.be.true;
DataONEObject.prototype.fetch = originalFetch;
done();
}, 1000);
});
});
});
});
});
143 changes: 143 additions & 0 deletions test/js/specs/unit/views/DataItemView.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
define([
"jquery",
"underscore",
"backbone",
"models/DataONEObject",
"views/DataItemView",
], function ($, _, Backbone, DataONEObject, DataItemView) {
var expect = chai.expect;

describe("DataItemView Test Suite", function () {
let dataItemView, model, collection;

// Set up the test environment before each test
beforeEach(function () {
// Create a new DataONEObject model with a test identifier
model = new DataONEObject({ identifier: "test-id" });
// Create a new Backbone collection
collection = new Backbone.Collection();
// Initialize the DataItemView with the model and collection
dataItemView = new DataItemView({
model: model,
collection: collection
});

// Stub the getParentScienceMetadata function to return a mock object
sinon.stub(dataItemView, "getParentScienceMetadata").returns({
id: "mock-sci-meta-id"
});

// Stub the getParentDataPackage function to return a mock object with a spy on the add method
sinon.stub(dataItemView, "getParentDataPackage").returns({
packageModel: { id: "mock-package-id" },
add: sinon.spy()
});
});

// Clean up the test environment after each test
afterEach(function () {
// Restore the stubbed methods to their original implementations
dataItemView.getParentScienceMetadata.restore();
dataItemView.getParentDataPackage.restore();
dataItemView.remove();
});

describe("uploadFilesInBatch", function () {
it("should upload files in batches", function (done) {
// Create a list of DataONEObject models with initial upload status
const fileList = [
new DataONEObject({ uploadFile: true, uploadStatus: "l" }),
new DataONEObject({ uploadFile: true, uploadStatus: "l" }),
new DataONEObject({ uploadFile: true, uploadStatus: "l" })
];

// Define the batch size for the upload
const batchSize = 2;
// Spy on the uploadFilesInBatch method to verify its call
const uploadSpy = sinon.spy(dataItemView, "uploadFilesInBatch");
// Stub the save method to simulate setting the upload status to "p"
const saveStub = sinon.stub(DataONEObject.prototype, "save").callsFake(function () {
this.set("uploadStatus", "p");
});
// Stub the calculateChecksum method to simulate setting checksum attributes
const checksumStub = sinon.stub(DataONEObject.prototype, "calculateChecksum").callsFake(function () {
this.set("checksum", "fakeChecksum");
this.set("checksumAlgorithm", "fakeAlgorithm");
this.trigger("checksumCalculated", this.attributes);
});

// Call the method to be tested
dataItemView.uploadFilesInBatch(fileList, batchSize);

// Simulate the completion of the upload by setting the upload status to "c"
fileList.forEach(function (file) {
file.set("uploadStatus", "c");
});

// Use setTimeout to allow asynchronous operations to complete
setTimeout(function () {
// Log the call counts for debugging purposes
console.log("[should upload files in batches] uploadSpy.callCount: ", uploadSpy.callCount);
console.log("[should upload files in batches] checksumSpy.callCount: ", checksumStub.callCount);

// Verify that the method was called once with the correct arguments
expect(uploadSpy.calledOnce).to.be.true;
expect(uploadSpy.calledWith(fileList, batchSize)).to.be.true;
// Verify that the calculateChecksum method was called the expected number of times
console.log("[should upload files in batches] fileList.length: ", fileList.length);
console.log("[should upload files in batches] saveSpy.callCount: ", saveStub.callCount);
expect(checksumStub.callCount).to.equal(fileList.length);
expect(saveStub.callCount).to.equal(fileList.length);
// Restore the spies and stubs
uploadSpy.restore();
checksumStub.restore();
saveStub.restore();
// Indicate that the test is complete
done();
}, 0);
});
});

describe("addFiles", function () {
it("should add files to the collection", function (done) {
// Create a fake file object to simulate a file upload
const fakeFile = new Blob(["fake file content"], { type: "text/plain" });
fakeFile.name = "fakeFile.txt";

// Create a mock event object with the necessary properties
const event = {
stopPropagation: sinon.spy(),
preventDefault: sinon.spy(),
target: { files: [fakeFile] },
originalEvent: { dataTransfer: { files: [fakeFile] } },
delegateTarget: { dataset: { id: "test-id" } }
};

// Stub the methods to simulate their behavior
const uploadStub = sinon.stub(dataItemView, "uploadFilesInBatch").returns(true);
const d1ObjectStub = sinon.stub(DataONEObject.prototype, "initialize").returns(true);

// Call the method to be tested
dataItemView.addFiles.call(dataItemView, event);

// Use setTimeout to allow asynchronous operations to complete
setTimeout(function () {
// Verify that the event methods were called
expect(event.stopPropagation.calledOnce).to.be.true;
expect(event.preventDefault.calledOnce).to.be.true;
// Verify that the DataONEObject initialize method was called
console.log("[should add files to the collection] d1ObjectStub.callCount: ", d1ObjectStub.callCount);
expect(d1ObjectStub.calledOnce).to.be.true;
// Verify that the uploadFilesInBatch method was called
console.log("[should add files to the collection] uploadStub.callCount: ", uploadStub.callCount);
expect(uploadStub.calledOnce).to.be.true;
// Restore the stubs
uploadStub.restore();
d1ObjectStub.restore();
// Indicate that the test is complete
done();
}, 0);
});
});
});
});

0 comments on commit 4fd253e

Please sign in to comment.