Skip to content

Commit

Permalink
Merge branch 'develop' into 9913-template-preview
Browse files Browse the repository at this point in the history
  • Loading branch information
sekmiller committed Oct 2, 2023
2 parents 97aeed6 + c26e1e7 commit 973e471
Show file tree
Hide file tree
Showing 13 changed files with 1,002 additions and 547 deletions.
61 changes: 61 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
Expand Up @@ -10,6 +10,7 @@
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder;
import edu.harvard.iq.dataverse.util.FileUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
Expand Down Expand Up @@ -63,6 +64,8 @@ public class DataFileServiceBean implements java.io.Serializable {

@EJB EmbargoServiceBean embargoService;

@EJB SystemConfig systemConfig;

@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;

Expand Down Expand Up @@ -136,6 +139,39 @@ public class DataFileServiceBean implements java.io.Serializable {
*/
public static final String MIME_TYPE_PACKAGE_FILE = "application/vnd.dataverse.file-package";

public class UserStorageQuota {
private Long totalAllocatedInBytes = 0L;
private Long totalUsageInBytes = 0L;

public UserStorageQuota(Long allocated, Long used) {
this.totalAllocatedInBytes = allocated;
this.totalUsageInBytes = used;
}

public Long getTotalAllocatedInBytes() {
return totalAllocatedInBytes;
}

public void setTotalAllocatedInBytes(Long totalAllocatedInBytes) {
this.totalAllocatedInBytes = totalAllocatedInBytes;
}

public Long getTotalUsageInBytes() {
return totalUsageInBytes;
}

public void setTotalUsageInBytes(Long totalUsageInBytes) {
this.totalUsageInBytes = totalUsageInBytes;
}

public Long getRemainingQuotaInBytes() {
if (totalUsageInBytes > totalAllocatedInBytes) {
return 0L;
}
return totalAllocatedInBytes - totalUsageInBytes;
}
}

public DataFile find(Object pk) {
return em.find(DataFile.class, pk);
}
Expand Down Expand Up @@ -1359,4 +1395,29 @@ public Embargo findEmbargo(Long id) {
DataFile d = find(id);
return d.getEmbargo();
}

public Long getStorageUsageByCreator(AuthenticatedUser user) {
Query query = em.createQuery("SELECT SUM(o.filesize) FROM DataFile o WHERE o.creator.id=:creatorId");

try {
Long totalSize = (Long)query.setParameter("creatorId", user.getId()).getSingleResult();
logger.info("total size for user: "+totalSize);
return totalSize == null ? 0L : totalSize;
} catch (NoResultException nre) { // ?
logger.info("NoResultException, returning 0L");
return 0L;
}
}

public UserStorageQuota getUserStorageQuota(AuthenticatedUser user, Dataset dataset) {
// this is for testing only - one pre-set, installation-wide quota limit
// for everybody:
Long totalAllocated = systemConfig.getTestStorageQuotaLimit();
// again, this is for testing only - we are only counting the total size
// of all the files created by this user; it will likely be a much more
// complex calculation in real life applications:
Long totalUsed = getStorageUsageByCreator(user);

return new UserStorageQuota(totalAllocated, totalUsed);
}
}
Expand Up @@ -1126,5 +1126,5 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo
hdLogger.warning("Failed to destroy the dataset");
}
}

}
101 changes: 86 additions & 15 deletions src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
Expand Up @@ -2,6 +2,7 @@

import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean;
import edu.harvard.iq.dataverse.DataFile.ChecksumType;
import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota;
import edu.harvard.iq.dataverse.api.AbstractApiBean;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.Permission;
Expand All @@ -28,6 +29,7 @@
import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand;
import edu.harvard.iq.dataverse.ingest.IngestRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.ingest.IngestUtil;
Expand Down Expand Up @@ -186,7 +188,13 @@ public enum Referrer {
// Used to store results of permissions checks
private final Map<String, Boolean> datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean }

// Size limit of an individual file: (set for the storage volume used)
private Long maxFileUploadSizeInBytes = null;
// Total amount of data that the user should be allowed to upload.
// Will be calculated in real time based on various level quotas -
// for this user and/or this collection/dataset, etc. We should
// assume that it may change during the user session.
private Long maxTotalUploadSizeInBytes = null;
private Long maxIngestSizeInBytes = null;
// CSV: 4.8 MB, DTA: 976.6 KB, XLSX: 5.7 MB, etc.
private String humanPerFormatTabularLimits = null;
Expand All @@ -198,6 +206,7 @@ public enum Referrer {
private final int NUMBER_OF_SCROLL_ROWS = 25;

private DataFile singleFile = null;
private UserStorageQuota userStorageQuota = null;

public DataFile getSingleFile() {
return singleFile;
Expand Down Expand Up @@ -340,6 +349,18 @@ public boolean isUnlimitedUploadFileSize() {

return this.maxFileUploadSizeInBytes == null;
}

public Long getMaxTotalUploadSizeInBytes() {
return maxTotalUploadSizeInBytes;
}

public String getHumanMaxTotalUploadSizeInBytes() {
return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes);
}

public boolean isStorageQuotaEnforced() {
return userStorageQuota != null;
}

public Long getMaxIngestSizeInBytes() {
return maxIngestSizeInBytes;
Expand Down Expand Up @@ -508,15 +529,26 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo
selectedFiles = selectedFileMetadatasList;

this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
if (systemConfig.isStorageQuotasEnforced()) {
this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset);
this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes();
} else {
this.maxTotalUploadSizeInBytes = null;
}
this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit();
this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits();
this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();

logger.fine("done");

saveEnabled = true;

return null;
}

public boolean isQuotaExceeded() {
return systemConfig.isStorageQuotasEnforced() && userStorageQuota != null && userStorageQuota.getRemainingQuotaInBytes() == 0;
}

public String init() {
// default mode should be EDIT
Expand Down Expand Up @@ -559,10 +591,13 @@ public String init() {

clone = workingVersion.cloneDatasetVersion();
this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
if (systemConfig.isStorageQuotasEnforced()) {
this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset);
this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes();
}
this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit();
this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits();
this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();
this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());

hasValidTermsOfAccess = isHasValidTermsOfAccess();
if (!hasValidTermsOfAccess) {
Expand Down Expand Up @@ -656,7 +691,7 @@ public String init() {
if (isHasPublicStore()){
JH.addMessage(FacesMessage.SEVERITY_WARN, getBundleString("dataset.message.label.fileAccess"), getBundleString("dataset.message.publicInstall"));
}

return null;
}

Expand Down Expand Up @@ -1493,14 +1528,16 @@ public void handleDropBoxUpload(ActionEvent event) {
// for example, multiple files can be extracted from an uncompressed
// zip file.
//datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream");
CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig);
//CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig);
Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, userStorageQuota, null);
CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
datafiles = createDataFilesResult.getDataFiles();
Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage));

} catch (IOException ex) {
} catch (CommandException ex) {
this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}", new Object[]{fileName, fileLink});
continue;
}/*catch (FileExceedsMaxSizeException ex){
} /*catch (FileExceedsMaxSizeException ex){
this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}: {2}", new Object[]{fileName, fileLink, ex.getMessage()});
continue;
}*/ finally {
Expand Down Expand Up @@ -2023,7 +2060,21 @@ public void handleFileUpload(FileUploadEvent event) throws IOException {
// Note: A single uploaded file may produce multiple datafiles -
// for example, multiple files can be extracted from an uncompressed
// zip file.
CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig);
///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig);

Command<CreateDataFileResult> cmd;
if (mode == FileEditMode.CREATE) {
// This is a file upload in the context of creating a brand new
// dataset that does not yet exist in the database. We must
// use the version of the Create New Files constructor that takes
// the parent Dataverse as the extra argument:
cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, null, workingVersion.getDataset().getOwner());
} else {
cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null);
}
CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);


dFileList = createDataFilesResult.getDataFiles();
String createDataFilesError = editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult);
if(createDataFilesError != null) {
Expand All @@ -2032,8 +2083,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException {
}

} catch (IOException ioex) {
// shouldn't we try and communicate to the user what happened?
logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage());
return;
} catch (CommandException cex) {
// shouldn't we try and communicate to the user what happened?
errorMessages.add(cex.getMessage());
uploadComponentId = event.getComponent().getClientId();
return;
}
/*catch (FileExceedsMaxSizeException ex) {
logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ex.getMessage());
Expand Down Expand Up @@ -2111,6 +2168,11 @@ public void handleExternalUpload() {
- Max size NOT specified in db: default is unlimited
- Max size specified in db: check too make sure file is within limits
// ---------------------------- */
/**
* @todo: this size check is probably redundant here, since the new
* CreateNewFilesCommand is going to perform it (and the quota
* checks too, if enabled
*/
if ((!this.isUnlimitedUploadFileSize()) && (fileSize > this.getMaxFileUploadSizeInBytes())) {
String warningMessage = "Uploaded file \"" + fileName + "\" exceeded the limit of " + fileSize + " bytes and was not uploaded.";
sio.delete();
Expand All @@ -2130,18 +2192,27 @@ public void handleExternalUpload() {
List<DataFile> datafiles = new ArrayList<>();

// -----------------------------------------------------------
// Send it through the ingest service
// Execute the CreateNewDataFiles command:
// -----------------------------------------------------------

Dataverse parent = null;

if (mode == FileEditMode.CREATE) {
// This is a file upload in the context of creating a brand new
// dataset that does not yet exist in the database. We must
// pass the parent Dataverse to the CreateNewFiles command
// constructor. The RequiredPermission on the command in this
// scenario = Permission.AddDataset on the parent dataverse.
parent = workingVersion.getDataset().getOwner();
}

try {

// Note: A single uploaded file may produce multiple datafiles -
// for example, multiple files can be extracted from an uncompressed
// zip file.
//datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream");
CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig);

Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType, fileSize, parent);
CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
datafiles = createDataFilesResult.getDataFiles();
Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage));
} catch (IOException ex) {
} catch (CommandException ex) {
logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName});
}

Expand Down

0 comments on commit 973e471

Please sign in to comment.