Skip to content

Commit

Permalink
more incremental changes #10977
Browse files Browse the repository at this point in the history
  • Loading branch information
landreev committed Nov 18, 2024
1 parent 1d2d776 commit eef0d22
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ public long retrieveSizeFromMedia() {
JsonArray dataArray = responseJson.getJsonArray("DATA");
if (dataArray != null && dataArray.size() != 0) {
//File found
return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getJsonNumber("size").longValueExact();
return (long) dataArray.getJsonObject(0).getJsonNumber("size").longValueExact();
}
} else {
logger.warning("Response from " + get.getURI().toString() + " was "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
import edu.harvard.iq.dataverse.util.URLTokenUtil;
import edu.harvard.iq.dataverse.util.UrlSignerUtil;
import edu.harvard.iq.dataverse.util.json.JsonUtil;
import jakarta.json.JsonNumber;
import jakarta.json.JsonReader;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
Expand Down Expand Up @@ -980,9 +981,16 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut

inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName);
}

// Look up the sizes of all the files in the dataset folder, to avoid
// looking them up one by one later:
// @todo: we should only be doing this if this is a managed store, probably?
GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
Map<String, Long> fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath());

// calculateMissingMetadataFields: checksum, mimetype
JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger);

JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files");
logger.fine("Size: " + newfilesJsonArray.size());
logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0)));
Expand All @@ -1006,13 +1014,23 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut
if (newfileJsonObject != null) {
logger.fine("List Size: " + newfileJsonObject.size());
// if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) {
JsonPatch path = Json.createPatchBuilder()
JsonPatch patch = Json.createPatchBuilder()
.add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build();
fileJsonObject = path.apply(fileJsonObject);
path = Json.createPatchBuilder()
fileJsonObject = patch.apply(fileJsonObject);
patch = Json.createPatchBuilder()
.add("/mimeType", newfileJsonObject.get(0).getString("mime")).build();
fileJsonObject = path.apply(fileJsonObject);
fileJsonObject = patch.apply(fileJsonObject);
addFilesJsonData.add(fileJsonObject);
// If we already know the size of this file on the Globus end,
// we'll pass it to /addFiles, to avoid looking up file sizes
// one by one:
if (fileSizeMap != null && fileSizeMap.get(fileId) != null) {
Long uploadedFileSize = fileSizeMap.get(fileId);
myLogger.fine("Found size for file " + fileId + ": " + uploadedFileSize + " bytes");
patch = Json.createPatchBuilder()
.add("/fileSize", Json.createValue(uploadedFileSize)).build();
fileJsonObject = patch.apply(fileJsonObject);
}
countSuccess++;
} else {
myLogger.info(fileName
Expand Down

0 comments on commit eef0d22

Please sign in to comment.