Skip to content

Commit

Permalink
preliminary #10977
Browse files Browse the repository at this point in the history
  • Loading branch information
landreev committed Nov 15, 2024
1 parent f838e76 commit 1d2d776
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,8 @@ public class AddReplaceFileHelper{
private String newStorageIdentifier; // step 30
private String newCheckSum; // step 30
private ChecksumType newCheckSumType; //step 30

private Long suppliedFileSize = null;

// -- Optional
private DataFile fileToReplace; // step 25

Expand Down Expand Up @@ -610,11 +611,14 @@ private boolean runAddReplacePhase1(Dataset owner,
return false;

}
if(optionalFileParams != null) {
if(optionalFileParams.hasCheckSum()) {
newCheckSum = optionalFileParams.getCheckSum();
newCheckSumType = optionalFileParams.getCheckSumType();
}
if (optionalFileParams != null) {
if (optionalFileParams.hasCheckSum()) {
newCheckSum = optionalFileParams.getCheckSum();
newCheckSumType = optionalFileParams.getCheckSumType();
}
if (optionalFileParams.hasFileSize()) {
suppliedFileSize = optionalFileParams.getFileSize();
}
}

msgt("step_030_createNewFilesViaIngest");
Expand Down Expand Up @@ -1204,20 +1208,11 @@ private boolean step_030_createNewFilesViaIngest(){
clone = workingVersion.cloneDatasetVersion();
}
try {
/*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion,
this.newFileInputStream,
this.newFileName,
this.newFileContentType,
this.newStorageIdentifier,
this.newCheckSum,
this.newCheckSumType,
this.systemConfig);*/

UploadSessionQuotaLimit quota = null;
if (systemConfig.isStorageQuotasEnforced()) {
quota = fileService.getUploadSessionQuotaLimit(dataset);
}
Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType);
Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, suppliedFileSize);
CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
initialFileList = createDataFilesResult.getDataFiles();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ public class OptionalFileParams {
public static final String MIME_TYPE_ATTR_NAME = "mimeType";
private String checkSumValue;
private ChecksumType checkSumType;
public static final String FILE_SIZE_ATTR_NAME = "fileSize";
private Long fileSize;
public static final String LEGACY_CHECKSUM_ATTR_NAME = "md5Hash";
public static final String CHECKSUM_OBJECT_NAME = "checksum";
public static final String CHECKSUM_OBJECT_TYPE = "@type";
Expand Down Expand Up @@ -268,6 +270,18 @@ public String getCheckSum() {
public ChecksumType getCheckSumType() {
return checkSumType;
}

public boolean hasFileSize() {
return fileSize != null;
}

public Long getFileSize() {
return fileSize;
}

public void setFileSize(long fileSize) {
this.fileSize = fileSize;
}

/**
* Set tags
Expand Down Expand Up @@ -416,7 +430,13 @@ else if ((jsonObj.has(CHECKSUM_OBJECT_NAME)) && (!jsonObj.get(CHECKSUM_OBJECT_NA
this.checkSumType = ChecksumType.fromString(((JsonObject) jsonObj.get(CHECKSUM_OBJECT_NAME)).get(CHECKSUM_OBJECT_TYPE).getAsString());

}

// -------------------------------
// get file size as a Long, if supplied
// -------------------------------
if ((jsonObj.has(FILE_SIZE_ATTR_NAME)) && (!jsonObj.get(FILE_SIZE_ATTR_NAME).isJsonNull())){

this.fileSize = jsonObj.get(FILE_SIZE_ATTR_NAME).getAsLong();
}
// -------------------------------
// get tags
// -------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi
this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null);
}

public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize) {
this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, newFileSize, null);
}

// This version of the command must be used when files are created in the
// context of creating a brand new dataset (from the Add Dataset page):

Expand Down Expand Up @@ -636,6 +640,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException
createIngestFailureReport(datafile, warningMessage);
datafile.SetIngestProblem();
}
logger.info("datafile size: " + datafile.getFilesize());
if (datafile.getFilesize() < 0) {
datafile.setFilesize(fileSize);
}
Expand All @@ -654,6 +659,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException
quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize);
}

logger.info("datafile size (again): " + datafile.getFilesize());
return CreateDataFileResult.success(fileName, finalType, datafiles);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,48 @@ private int makeDir(GlobusEndpoint endpoint, String dir) {
return result.status;
}

private Map<String, Long> lookupFileSizes(GlobusEndpoint endpoint, String dir) {
Map<String, Long> ret = new HashMap<>();

MakeRequestResponse result;

try {
URL url = new URL(
"https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId()
+ "/ls?path=" + dir);
result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null);

switch (result.status) {
case 200:
logger.fine("Looked up directory " + dir + " successfully.");
break;
default:
logger.warning("Status " + result.status + " received when looking up dir " + dir);
logger.fine("Response: " + result.jsonResponse);
}
} catch (MalformedURLException ex) {
// Misconfiguration
logger.warning("Failed to create dir on " + endpoint.getId());
return null;
}

JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse);
JsonArray dataArray = listObject.getJsonArray("DATA");

if (dataArray != null && !dataArray.isEmpty()) {
for (int i = 0; i < dataArray.size(); i++) {
String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null);
if (dataType != null && dataType.equals("file")) {
String fileName = dataArray.getJsonObject(i).getString("name");
long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact();
ret.put(fileName, fileSize);
}
}
}

return ret;
}

private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) {
Gson gson = new GsonBuilder().create();
MakeRequestResponse result = null;
Expand Down Expand Up @@ -972,12 +1014,6 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut
fileJsonObject = path.apply(fileJsonObject);
addFilesJsonData.add(fileJsonObject);
countSuccess++;
// } else {
// globusLogger.info(fileName
// + " will be skipped from adding to dataset by second API due to missing
// values ");
// countError++;
// }
} else {
myLogger.info(fileName
+ " will be skipped from adding to dataset in the final AddReplaceFileHelper.addFiles() call. ");
Expand Down Expand Up @@ -1211,7 +1247,7 @@ private GlobusTaskState globusStatusCheck(GlobusEndpoint endpoint, String taskId
return task;
}

public JsonObject calculateMissingMetadataFields(List<String> inputList, Logger globusLogger)
private JsonObject calculateMissingMetadataFields(List<String> inputList, Logger globusLogger)
throws InterruptedException, ExecutionException, IOException {

List<CompletableFuture<FileDetailsHolder>> hashvalueCompletableFutures = inputList.stream()
Expand All @@ -1230,7 +1266,7 @@ public JsonObject calculateMissingMetadataFields(List<String> inputList, Logger
});

JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get();

JsonObject output = Json.createObjectBuilder().add("files", filesObject).build();

return output;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,10 +344,20 @@ public List<DataFile> saveAndAddFilesToDataset(DatasetVersion version,
try {
StorageIO<DvObject> dataAccess = DataAccess.getStorageIO(dataFile);
//Populate metadata
dataAccess.open(DataAccessOption.READ_ACCESS);
// (the .open() above makes a remote call to check if
// the file exists and obtains its size)
confirmedFileSize = dataAccess.getSize();

// There are direct upload sub-cases where the file size
// is already known at this point. For example, direct uploads
// to S3 that go through the jsf dataset page. Or the Globus
// uploads, where the file sizes are looked up in bulk on
// the completion of the remote upload task.
if (dataFile.getFilesize() > 0) {
confirmedFileSize = dataFile.getFilesize();
} else {
dataAccess.open(DataAccessOption.READ_ACCESS);
// (the .open() above makes a remote call to check if
// the file exists and obtains its size)
confirmedFileSize = dataAccess.getSize();
}

// For directly-uploaded files, we will perform the file size
// limit and quota checks here. Perform them *again*, in
Expand All @@ -362,13 +372,16 @@ public List<DataFile> saveAndAddFilesToDataset(DatasetVersion version,
if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) {

//set file size
logger.fine("Setting file size: " + confirmedFileSize);
dataFile.setFilesize(confirmedFileSize);
if (dataFile.getFilesize() < 1) {
logger.fine("Setting file size: " + confirmedFileSize);
dataFile.setFilesize(confirmedFileSize);
}

if (dataAccess instanceof S3AccessIO) {
((S3AccessIO<DvObject>) dataAccess).removeTempTag();
}
savedSuccess = true;
logger.info("directly uploaded file successfully saved. file size: "+dataFile.getFilesize());
}
} catch (IOException ioex) {
logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " ("
Expand Down

0 comments on commit 1d2d776

Please sign in to comment.