Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

6634 primefaces8 #6804

Merged
merged 17 commits into from Apr 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions doc/release-notes/6634-primefaces-8.md
@@ -0,0 +1,3 @@
##Primefaces 8

Primefaces, the open source UI framework upon which the Dataverse front end is built, has been updated to the most recent version. This provides security updates and bug fixes and will also allow Dataverse developers to take advantage of new features and enhancements.
3 changes: 0 additions & 3 deletions doc/release-notes/6650-export-import-mismatch

This file was deleted.

2 changes: 1 addition & 1 deletion pom.xml
Expand Up @@ -303,7 +303,7 @@
<dependency>
<groupId>org.primefaces</groupId>
<artifactId>primefaces</artifactId>
<version>7.0</version>
<version>8.0</version>
</dependency>
<dependency>
<groupId>org.primefaces.themes</groupId>
Expand Down
13 changes: 2 additions & 11 deletions src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -1,7 +1,6 @@
package edu.harvard.iq.dataverse;

import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean;
import edu.harvard.iq.dataverse.PackagePopupFragmentBean;
import edu.harvard.iq.dataverse.api.AbstractApiBean;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.Permission;
Expand Down Expand Up @@ -79,7 +78,7 @@
import javax.inject.Named;

import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;
import org.primefaces.model.file.UploadedFile;
import javax.validation.ConstraintViolation;
import org.apache.commons.httpclient.HttpClient;
//import org.primefaces.context.RequestContext;
Expand All @@ -88,11 +87,9 @@
import javax.faces.model.SelectItem;
import java.util.logging.Level;
import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult;
Expand Down Expand Up @@ -121,19 +118,13 @@
import org.primefaces.event.TabChangeEvent;
import org.primefaces.event.data.PageEvent;

import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil;
import edu.harvard.iq.dataverse.search.FacetCategory;
import edu.harvard.iq.dataverse.search.FacetLabel;
import edu.harvard.iq.dataverse.search.SearchConstants;
import edu.harvard.iq.dataverse.search.SearchFields;
import edu.harvard.iq.dataverse.search.SearchServiceBean;
import edu.harvard.iq.dataverse.search.SearchUtil;
import edu.harvard.iq.dataverse.search.SolrClientService;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.TimeZone;
import javax.servlet.http.HttpServletRequest;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.FacetField;
Expand Down Expand Up @@ -4541,7 +4532,7 @@ public void handleLabelsFileUpload(FileUploadEvent event) {

InputStream uploadStream = null;
try {
uploadStream = file.getInputstream();
uploadStream = file.getInputStream();
} catch (IOException ioex) {
logger.log(Level.WARNING, ioex, ()->"the file "+file.getFileName()+" failed to upload!");
List<String> args = Arrays.asList(file.getFileName());
Expand Down
Expand Up @@ -19,7 +19,7 @@
import javax.inject.Inject;
import javax.inject.Named;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;
import org.primefaces.model.file.UploadedFile;

@ViewScoped
@Named("DatasetWidgetsPage")
Expand Down Expand Up @@ -132,7 +132,7 @@ public void handleImageFileUpload(FileUploadEvent event) {
logger.fine("handleImageFileUpload clicked");
UploadedFile uploadedFile = event.getFile();
try {
updateDatasetThumbnailCommand = new UpdateDatasetThumbnailCommand(dvRequestService.getDataverseRequest(), dataset, UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, uploadedFile.getInputstream());
updateDatasetThumbnailCommand = new UpdateDatasetThumbnailCommand(dvRequestService.getDataverseRequest(), dataset, UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, uploadedFile.getInputStream());
} catch (IOException ex) {
String error = "Unexpected error while uploading file.";
logger.warning("Problem uploading dataset thumbnail to dataset id " + dataset.getId() + ". " + error + " . Exception: " + ex);
Expand All @@ -141,7 +141,7 @@ public void handleImageFileUpload(FileUploadEvent event) {
}
File file = null;
try {
file = FileUtil.inputStreamToFile(uploadedFile.getInputstream());
file = FileUtil.inputStreamToFile(uploadedFile.getInputStream());
} catch (IOException ex) {
Logger.getLogger(DatasetWidgetsPage.class.getName()).log(Level.SEVERE, null, ex);
return;
Expand Down
22 changes: 4 additions & 18 deletions src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
Expand Up @@ -22,7 +22,6 @@
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand;
import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
Expand All @@ -48,9 +47,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
Expand All @@ -65,7 +62,7 @@
import javax.inject.Inject;
import javax.inject.Named;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;
import org.primefaces.model.file.UploadedFile;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonArray;
Expand All @@ -75,25 +72,14 @@
import org.apache.commons.httpclient.methods.GetMethod;
import java.text.DateFormat;
import java.util.Arrays;
import java.util.HashSet;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.logging.Level;
import javax.faces.component.UIComponent;
import javax.faces.component.UIInput;
import javax.faces.event.AjaxBehaviorEvent;
import javax.faces.event.FacesEvent;
import javax.faces.event.ValueChangeEvent;
import javax.faces.validator.ValidatorException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import org.apache.commons.lang.StringUtils;
import org.primefaces.PrimeFaces;
//import org.primefaces.context.RequestContext;

/**
*
Expand Down Expand Up @@ -1984,7 +1970,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException {
*/
if (isFileReplaceOperation()){

handleReplaceFileUpload(event, uFile.getInputstream(),
handleReplaceFileUpload(event, uFile.getInputStream(),
uFile.getFileName(),
uFile.getContentType(),
event,
Expand All @@ -2007,7 +1993,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException {
// Note: A single uploaded file may produce multiple datafiles -
// for example, multiple files can be extracted from an uncompressed
// zip file.
dFileList = FileUtil.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig);
dFileList = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig);

} catch (IOException ioex) {
logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage());
Expand Down Expand Up @@ -2865,7 +2851,7 @@ public void handleLabelsFileUpload(FileUploadEvent event) {

InputStream uploadStream = null;
try {
uploadStream = file.getInputstream();
uploadStream = file.getInputStream();
} catch (IOException ioex) {
logger.info("the file " + file.getFileName() + " failed to upload!");
List<String> args = Arrays.asList(file.getFileName());
Expand Down
Expand Up @@ -5,10 +5,9 @@
*/
package edu.harvard.iq.dataverse;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.ejb.EJB;
import org.primefaces.model.FilterMeta;
import org.primefaces.model.LazyDataModel;
import org.primefaces.model.SortOrder;

Expand All @@ -29,7 +28,7 @@ public LazyFileMetadataDataModel(Long datasetVersionId, DataFileServiceBean file

@Override
public List<FileMetadata> load(int first, int pageSize, String sortField,
SortOrder sortOrder, Map<String, Object> filters) {
SortOrder sortOrder, Map<String, FilterMeta> filters) {

List<FileMetadata> listFileMetadata = null; //fileServiceBean.findFileMetadataByDatasetVersionIdLazy(datasetVersionId, pageSize, sortField, sortField, first);
//this.setRowCount(fileServiceBean.findCountByDatasetVersionId(datasetVersionId).intValue());
Expand Down
Expand Up @@ -5,12 +5,10 @@
*/
package edu.harvard.iq.dataverse;

import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseThemeCommand;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.JsfHelper;
import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
Expand All @@ -24,7 +22,6 @@
import javax.ejb.EJB;
import javax.faces.application.FacesMessage;
import javax.faces.component.UIComponent;
import javax.faces.component.UIInput;
import javax.faces.component.html.HtmlInputText;
import javax.faces.context.FacesContext;
import javax.faces.validator.ValidatorException;
Expand All @@ -37,7 +34,7 @@
//import org.primefaces.context.RequestContext;

import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;
import org.primefaces.model.file.UploadedFile;

/**
*
Expand Down Expand Up @@ -232,7 +229,7 @@ public void handleImageFooterFileUpload(FileUploadEvent event) {
uploadedFileFooter.createNewFile();
}
logger.finer("created file");
Files.copy(uFile.getInputstream(), uploadedFileFooter.toPath(), StandardCopyOption.REPLACE_EXISTING);
Files.copy(uFile.getInputStream(), uploadedFileFooter.toPath(), StandardCopyOption.REPLACE_EXISTING);
logger.finer("copied inputstream to file");
editDv.getDataverseTheme().setLogoFooter(uFile.getFileName());

Expand All @@ -259,7 +256,7 @@ public void handleImageFileUpload(FileUploadEvent event) {
uploadedFile.createNewFile();
}
logger.finer("created file");
Files.copy(uFile.getInputstream(), uploadedFile.toPath(),StandardCopyOption.REPLACE_EXISTING);
Files.copy(uFile.getInputStream(), uploadedFile.toPath(),StandardCopyOption.REPLACE_EXISTING);
logger.finer("copied inputstream to file");
editDv.getDataverseTheme().setLogo(uFile.getFileName());

Expand Down
Expand Up @@ -8,13 +8,9 @@
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.FileMetadata;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;


/**
* Adding single file replace to the EditDatafilesPage.
Expand Down
Expand Up @@ -6,8 +6,6 @@
package edu.harvard.iq.dataverse.mydata;

import edu.harvard.iq.dataverse.DvObject;
import static edu.harvard.iq.dataverse.DvObject.DATASET_DTYPE_STRING;
import static edu.harvard.iq.dataverse.DvObject.DATAVERSE_DTYPE_STRING;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
Expand All @@ -18,14 +16,11 @@
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import org.apache.commons.lang.StringUtils;
import org.primefaces.json.JSONException;
import org.primefaces.json.JSONObject;

/**
*
Expand Down
Expand Up @@ -10,11 +10,9 @@
import edu.harvard.iq.dataverse.FilePage;
import edu.harvard.iq.dataverse.api.AbstractApiBean;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import java.io.IOException;
import java.util.logging.Logger;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;
import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteProvJsonCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetProvJsonCommand;
Expand All @@ -39,6 +37,7 @@
import javax.faces.context.ExternalContext;
import javax.faces.context.FacesContext;
import javax.json.JsonObject;
import org.primefaces.model.file.UploadedFile;

/**
* This bean contains functionality for the provenance json pop up
Expand Down Expand Up @@ -92,7 +91,7 @@ public class ProvPopupFragmentBean extends AbstractApiBean implements java.io.Se
public void handleFileUpload(FileUploadEvent event) throws IOException {
jsonUploadedTempFile = event.getFile();
provJsonParsedEntities = new HashMap<>();
provJsonState = IOUtils.toString(jsonUploadedTempFile.getInputstream());
provJsonState = IOUtils.toString(jsonUploadedTempFile.getInputStream());


if(!provUtil.isProvValid(provJsonState)) { //if uploaded prov-json does not comply with schema
Expand Down Expand Up @@ -214,7 +213,7 @@ public String stagePopupChanges(boolean saveInPopup) throws IOException{
popupDataFile.setProvEntityName(null);
}
if(null != jsonUploadedTempFile && "application/json".equalsIgnoreCase(jsonUploadedTempFile.getContentType())) { //delete and create again can both happen at once
stagingEntry.provJson = IOUtils.toString(jsonUploadedTempFile.getInputstream());
stagingEntry.provJson = IOUtils.toString(jsonUploadedTempFile.getInputStream());
stagingEntry.deleteJson = false;

jsonUploadedTempFile = null;
Expand Down