diff --git a/doc/release-notes/10116-incomplete-metadata-label-setting.md b/doc/release-notes/10116-incomplete-metadata-label-setting.md new file mode 100644 index 00000000000..769100c3804 --- /dev/null +++ b/doc/release-notes/10116-incomplete-metadata-label-setting.md @@ -0,0 +1 @@ +Bug fixed for the ``incomplete metadata`` label being shown for published dataset with incomplete metadata in certain scenarios. This label will now be shown for draft versions of such datasets and published datasets that the user can edit. This label can also be made invisible for published datasets (regardless of edit rights) with the new option ``dataverse.ui.show-validity-label-when-published`` set to `false`. diff --git a/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md b/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md new file mode 100644 index 00000000000..77cc7f59773 --- /dev/null +++ b/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md @@ -0,0 +1,3 @@ +Changed ``api/dataverses/{id}/metadatablocks`` so that setting the query parameter ``onlyDisplayedOnCreate=true`` also returns metadata blocks with dataset field type input levels configured as required on the General Information page of the collection, in addition to the metadata blocks and their fields with the property ``displayOnCreate=true`` (which was the original behavior). + +A new endpoint ``api/dataverses/{id}/inputLevels`` has been created for updating the dataset field type input levels of a collection via API. diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index f694703f0a6..0f076d32cf8 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -417,12 +417,16 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa As noted above, deployment of the war file might take several minutes due a database migration script required for the new storage quotas feature. -6\. Restart Payara +6\. For installations with internationalization: + +- Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs). + +7\. Restart Payara - `service payara stop` - `service payara start` -7\. Update the following Metadata Blocks to reflect the incremental improvements made to the handling of core metadata fields: +8\. Update the following Metadata Blocks to reflect the incremental improvements made to the handling of core metadata fields: ``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/geospatial.tsv @@ -442,7 +446,7 @@ wget https://github.com/IQSS/dataverse/releases/download/v6.2/biomedical.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/biomedical.tsv ``` -8\. For installations with custom or experimental metadata blocks: +9\. For installations with custom or experimental metadata blocks: - Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.2/installation/prerequisites.html#solr-init-script)) @@ -455,7 +459,7 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta - Restart Solr instance (usually `service solr restart` depending on solr/OS) -9\. Reindex Solr: +10\. Reindex Solr: For details, see https://guides.dataverse.org/en/6.2/admin/solr-search-index.html but here is the reindex command: diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index bcc37d6db1c..f22f8727fb0 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -898,7 +898,46 @@ The following attributes are supported: * ``filePIDsEnabled`` ("true" or "false") Restricted to use by superusers and only when the :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>` setting is true. Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting). .. _collection-storage-quotas: - + +Update Collection Input Levels +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates the dataset field type input levels in a collection. + +Please note that this endpoint overwrites all the input levels of the collection page, so if you want to keep the existing ones, you will need to add them to the JSON request body. + +If one of the input levels corresponds to a dataset field type belonging to a metadata block that does not exist in the collection, the metadata block will be added to the collection. + +This endpoint expects a JSON with the following format:: + + [ + { + "datasetFieldTypeName": "datasetFieldTypeName1", + "required": true, + "include": true + }, + { + "datasetFieldTypeName": "datasetFieldTypeName2", + "required": true, + "include": true + } + ] + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + export JSON='[{"datasetFieldTypeName":"geographicCoverage", "required":true, "include":true}, {"datasetFieldTypeName":"country", "required":true, "include":true}]' + + curl -X PUT -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" "$SERVER_URL/api/dataverses/$ID/inputLevels" -d "$JSON" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -X PUT -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type:application/json" "https://demo.dataverse.org/api/dataverses/root/inputLevels" -d '[{"datasetFieldTypeName":"geographicCoverage", "required":true, "include":false}, {"datasetFieldTypeName":"country", "required":true, "include":false}]' + Collection Storage Quotas ~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4b35b2a37e6..c18e53cc6e1 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2945,6 +2945,24 @@ Defaults to ``false``. Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_API_ALLOW_INCOMPLETE_METADATA``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. +.. _dataverse.ui.show-validity-label-when-published: + +dataverse.ui.show-validity-label-when-published ++++++++++++++++++++++++++++++++++++++++++++++++ + +Even when you do not allow incomplete metadata to be saved in dataverse, some metadata may end up being incomplete, e.g., after making a metadata field mandatory. Datasets where that field is +not filled out, become incomplete, and therefore can be labeled with the ``incomplete metadata`` label. By default, this label is only shown for draft datasets and published datasets that the +user can edit. This option can be disabled by setting it to ``false`` where only draft datasets with incomplete metadata will have that label. When disabled, all published dataset will not have +that label. Note that you need to reindex the datasets after changing the metadata definitions. Reindexing will update the labels and other dataset information according to the new situation. + +When enabled (by default), published datasets with incomplete metadata will have an ``incomplete metadata`` label attached to them, but only for the datasets that the user can edit. +You can list these datasets, for example, with the validity of metadata filter shown in "My Data" page that can be turned on by enabling the :ref:`dataverse.ui.show-validity-filter` option. + +Defaults to ``true``. + +Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable +``DATAVERSE_API_SHOW_LABEL_FOR_INCOMPLETE_WHEN_PUBLISHED``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. + .. _dataverse.signposting.level1-author-limit: dataverse.signposting.level1-author-limit @@ -3142,6 +3160,8 @@ Defaults to ``false``. Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_UI_ALLOW_REVIEW_FOR_INCOMPLETE``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. +.. _dataverse.ui.show-validity-filter: + dataverse.ui.show-validity-filter +++++++++++++++++++++++++++++++++ diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 6bf8547712e..d9cb10026a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2296,13 +2296,11 @@ private void displayPublishMessage(){ public boolean isValid() { if (valid == null) { - DatasetVersion version = dataset.getLatestVersion(); - if (!version.isDraft()) { + if (workingVersion.isDraft() || (canUpdateDataset() && JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true))) { + valid = workingVersion.isValid(); + } else { valid = true; } - DatasetVersion newVersion = version.cloneDatasetVersion(); - newVersion.setDatasetFields(newVersion.initDatasetFields()); - valid = newVersion.isValid(); } return valid; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 5fd963f3931..943693355a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1728,7 +1728,36 @@ public List> validateRequired() { } public boolean isValid() { - return validate().isEmpty(); + // first clone to leave the original untouched + final DatasetVersion newVersion = this.cloneDatasetVersion(); + // initDatasetFields + newVersion.setDatasetFields(newVersion.initDatasetFields()); + // remove special "N/A" values and empty values + newVersion.removeEmptyValues(); + // check validity of present fields and detect missing mandatory fields + return newVersion.validate().isEmpty(); + } + + private void removeEmptyValues() { + if (this.getDatasetFields() != null) { + for (DatasetField dsf : this.getDatasetFields()) { + removeEmptyValues(dsf); + } + } + } + + private void removeEmptyValues(DatasetField dsf) { + if (dsf.getDatasetFieldType().isPrimitive()) { // primitive + final Iterator i = dsf.getDatasetFieldValues().iterator(); + while (i.hasNext()) { + final String v = i.next().getValue(); + if (StringUtils.isBlank(v) || DatasetField.NA_VALUE.equals(v)) { + i.remove(); + } + } + } else { + dsf.getDatasetFieldCompoundValues().forEach(cv -> cv.getChildDatasetFields().forEach(v -> removeEmptyValues(v))); + } } public Set validate() { diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 42db9c1392a..78b1827c798 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -411,6 +411,14 @@ public List getDataverseFieldTypeInputLevels() { return dataverseFieldTypeInputLevels; } + public boolean isDatasetFieldTypeRequiredAsInputLevel(Long datasetFieldTypeId) { + for(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel : dataverseFieldTypeInputLevels) { + if (dataverseFieldTypeInputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && dataverseFieldTypeInputLevel.isRequired()) { + return true; + } + } + return false; + } public Template getDefaultTemplate() { return defaultTemplate; diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 3a87990d9cc..9889d23cf55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -34,6 +34,7 @@ import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -314,13 +315,18 @@ private void displayPublishMessage(){ } } + Boolean valid = null; + public boolean isValid() { - if (!fileMetadata.getDatasetVersion().isDraft()) { - return true; + if (valid == null) { + final DatasetVersion workingVersion = fileMetadata.getDatasetVersion(); + if (workingVersion.isDraft() || (canUpdateDataset() && JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true))) { + valid = workingVersion.isValid(); + } else { + valid = true; + } } - DatasetVersion newVersion = fileMetadata.getDatasetVersion().cloneDatasetVersion(); - newVersion.setDatasetFields(newVersion.initDatasetFields()); - return newVersion.isValid(); + return valid; } private boolean canViewUnpublishedDataset() { diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index c4c95fae551..1e2a34f5472 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -58,10 +58,18 @@ public List listMetadataBlocksDisplayedOnCreate(Dataverse ownerDa if (ownerDataverse != null) { Root dataverseRoot = criteriaQuery.from(Dataverse.class); + Join datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT); + + Predicate requiredPredicate = criteriaBuilder.and( + datasetFieldTypeInputLevelJoin.get("datasetFieldType").in(metadataBlockRoot.get("datasetFieldTypes")), + criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))); + + Predicate unionPredicate = criteriaBuilder.or(displayOnCreatePredicate, requiredPredicate); + criteriaQuery.where(criteriaBuilder.and( criteriaBuilder.equal(dataverseRoot.get("id"), ownerDataverse.getId()), metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), - displayOnCreatePredicate + unionPredicate )); } else { criteriaQuery.where(displayOnCreatePredicate); diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 24c0f9d7926..f9cf061e771 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -59,6 +59,8 @@ public class Shib implements java.io.Serializable { SettingsServiceBean settingsService; @EJB SystemConfig systemConfig; + @EJB + UserServiceBean userService; HttpServletRequest request; @@ -259,6 +261,7 @@ else if (ShibAffiliationOrder.equals("firstAffiliation")) { state = State.REGULAR_LOGIN_INTO_EXISTING_SHIB_ACCOUNT; logger.fine("Found user based on " + userPersistentId + ". Logging in."); logger.fine("Updating display info for " + au.getName()); + userService.updateLastLogin(au); authSvc.updateAuthenticatedUser(au, displayInfo); logInUserAndSetShibAttributes(au); String prettyFacesHomePageString = getPrettyFacesHomePageString(false); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 7e5a5e8965c..02b60fdb32a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1,27 +1,10 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseFacet; -import edu.harvard.iq.dataverse.DataverseContact; -import edu.harvard.iq.dataverse.DataverseFeaturedDataverse; -import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; -import edu.harvard.iq.dataverse.DataverseMetadataBlockFacet; -import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.FeaturedDataverseServiceBean; -import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; -import edu.harvard.iq.dataverse.GuestbookServiceBean; -import edu.harvard.iq.dataverse.MetadataBlock; -import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; @@ -37,46 +20,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.impl.AddRoleAssigneesToExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteCollectionQuotaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseLinkingDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetSchemaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionQuotaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionStorageUseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetRootCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseStorageSizeCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ImportDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.LinkDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListFeaturedCollectionsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlockFacetsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlocksCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments; -import edu.harvard.iq.dataverse.engine.command.impl.ListRolesCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult; -import edu.harvard.iq.dataverse.engine.command.impl.MoveDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RemoveRoleAssigneesFromExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.SetCollectionQuotaCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseDefaultContributorRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ValidateDatasetJsonCommand; +import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.JvmSettings; @@ -91,23 +35,14 @@ import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.JsonUtil; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.TreeSet; +import java.io.StringReader; +import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; -import jakarta.json.Json; -import jakarta.json.JsonArrayBuilder; -import jakarta.json.JsonNumber; -import jakarta.json.JsonObject; -import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonString; -import jakarta.json.JsonValue; +import jakarta.json.*; import jakarta.json.JsonValue.ValueType; import jakarta.json.stream.JsonParsingException; import jakarta.validation.ConstraintViolationException; @@ -131,16 +66,11 @@ import java.io.OutputStream; import java.text.MessageFormat; import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Date; -import java.util.Map; -import java.util.Optional; import java.util.stream.Collectors; import jakarta.servlet.http.HttpServletResponse; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.StreamingOutput; -import java.util.ArrayList; import javax.xml.stream.XMLStreamException; /** @@ -172,10 +102,10 @@ public class Dataverses extends AbstractApiBean { @EJB DataverseServiceBean dataverseService; - + @EJB DataverseLinkingServiceBean linkingService; - + @EJB FeaturedDataverseServiceBean featuredDataverseService; @@ -707,6 +637,43 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam } } + @PUT + @AuthRequired + @Path("{identifier}/inputLevels") + public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) { + try { + Dataverse dataverse = findDataverseOrDie(identifier); + List newInputLevels = parseInputLevels(jsonBody, dataverse); + execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels)); + return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + private List parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse { + JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray(); + + List newInputLevels = new ArrayList<>(); + for (JsonValue value : inputLevelsArray) { + JsonObject inputLevel = (JsonObject) value; + String datasetFieldTypeName = inputLevel.getString("datasetFieldTypeName"); + DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName); + + if (datasetFieldType == null) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName); + throw new WrappedResponse(badRequest(errorMessage)); + } + + boolean required = inputLevel.getBoolean("required"); + boolean include = inputLevel.getBoolean("include"); + + newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include)); + } + + return newInputLevels; + } + @DELETE @AuthRequired @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") @@ -726,14 +693,15 @@ public Response listMetadataBlocks(@Context ContainerRequestContext crc, @QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) { try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); final List metadataBlocks = execCommand( new ListMetadataBlocksCommand( createDataverseRequest(getRequestUser(crc)), - findDataverseOrDie(dvIdtf), + dataverse, onlyDisplayedOnCreate ) ); - return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate)); + return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate, dataverse)); } catch (WrappedResponse we) { return we.getResponse(); } @@ -836,8 +804,8 @@ public Response listFacets(@Context ContainerRequestContext crc, @PathParam("ide return e.getResponse(); } } - - + + @GET @AuthRequired @Path("{identifier}/featured") @@ -860,19 +828,19 @@ public Response getFeaturedDataverses(@Context ContainerRequestContext crc, @Pat return e.getResponse(); } } - - + + @POST @AuthRequired @Path("{identifier}/featured") /** * Allows user to set featured dataverses - must have edit dataverse permission - * + * */ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { List dvsFromInput = new LinkedList<>(); - - + + try { for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) { @@ -886,7 +854,7 @@ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @Pat if (dvsFromInput.isEmpty()) { return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured."); } - + Dataverse dataverse = findDataverseOrDie(dvIdtf); List featuredSource = new ArrayList<>(); List featuredTarget = new ArrayList<>(); @@ -919,7 +887,7 @@ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @Pat // by passing null for Facets and DataverseFieldTypeInputLevel, those are not changed execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null)); return ok("Featured Dataverses of dataverse " + dvIdtf + " updated."); - + } catch (WrappedResponse ex) { return ex.getResponse(); } catch (JsonParsingException jpe){ @@ -927,7 +895,7 @@ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @Pat } } - + @DELETE @AuthRequired @Path("{identifier}/featured") diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 1c0f5010059..4a8fb123fd4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -307,11 +307,9 @@ public AuthenticatedUser getUpdateAuthenticatedUser( String authenticationProvid if (user != null && !user.isDeactivated()) { user = userService.updateLastLogin(user); } - + if ( user == null ) { throw new IllegalStateException("Authenticated user does not exist. The functionality to support creating one at this point in authentication has been removed."); - //return createAuthenticatedUser( - // new UserRecordIdentifier(authenticationProviderId, resp.getUserId()), resp.getUserId(), resp.getUserDisplayInfo(), true ); } else { if (BuiltinAuthenticationProvider.PROVIDER_ID.equals(user.getAuthenticatedUserLookup().getAuthenticationProviderId())) { return user; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index 0fd0852b4df..8f3dc07fdea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; @@ -65,6 +66,9 @@ public class OAuth2LoginBackingBean implements Serializable { @EJB SystemConfig systemConfig; + @EJB + UserServiceBean userService; + @Inject DataverseSession session; @@ -128,6 +132,7 @@ public void exchangeCodeForToken() throws IOException { } else { // login the user and redirect to HOME of intended page (if any). // setUser checks for deactivated users. + dvUser = userService.updateLastLogin(dvUser); session.setUser(dvUser); final OAuth2TokenData tokenData = oauthUser.getTokenData(); if (tokenData != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index fe9415f39f9..bdb69dc918f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -18,7 +18,6 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import jakarta.persistence.TypedQuery; /** * Update an existing dataverse. @@ -30,10 +29,10 @@ public class UpdateDataverseCommand extends AbstractCommand { private final Dataverse editedDv; private final List facetList; - private final List featuredDataverseList; - private final List inputLevelList; - - private boolean datasetsReindexRequired = false; + private final List featuredDataverseList; + private final List inputLevelList; + + private boolean datasetsReindexRequired = false; public UpdateDataverseCommand(Dataverse editedDv, List facetList, List featuredDataverseList, DataverseRequest aRequest, List inputLevelList ) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java new file mode 100644 index 00000000000..cf7b4a6f69c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java @@ -0,0 +1,51 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.ArrayList; +import java.util.List; + +@RequiredPermissions(Permission.EditDataverse) +public class UpdateDataverseInputLevelsCommand extends AbstractCommand { + private final Dataverse dataverse; + private final List inputLevelList; + + public UpdateDataverseInputLevelsCommand(Dataverse dataverse, DataverseRequest request, List inputLevelList) { + super(request, dataverse); + this.dataverse = dataverse; + this.inputLevelList = new ArrayList<>(inputLevelList); + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + if (inputLevelList == null || inputLevelList.isEmpty()) { + throw new CommandException("Error while updating dataverse input levels: Input level list cannot be null or empty", this); + } + addInputLevelMetadataBlocks(); + dataverse.setMetadataBlockRoot(true); + return ctxt.engine().submit(new UpdateDataverseCommand(dataverse, null, null, getRequest(), inputLevelList)); + } + + private void addInputLevelMetadataBlocks() { + List dataverseMetadataBlocks = dataverse.getMetadataBlocks(); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock(); + if (!dataverseHasMetadataBlock(dataverseMetadataBlocks, inputLevelMetadataBlock)) { + dataverseMetadataBlocks.add(inputLevelMetadataBlock); + } + } + dataverse.setMetadataBlocks(dataverseMetadataBlocks); + } + + private boolean dataverseHasMetadataBlock(List dataverseMetadataBlocks, MetadataBlock metadataBlock) { + return dataverseMetadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId())); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java index 7a6553cfe74..6fccbe35e44 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java @@ -3,6 +3,7 @@ */ package edu.harvard.iq.dataverse.mydata; +import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DataverseRoleServiceBean; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseSession; @@ -63,7 +64,7 @@ public class DataRetrieverAPI extends AbstractApiBean { private static final String retrieveDataPartialAPIPath = "retrieve"; @Inject - DataverseSession session; + DataverseSession session; @EJB DataverseRoleServiceBean dataverseRoleService; @@ -81,6 +82,8 @@ public class DataRetrieverAPI extends AbstractApiBean { //MyDataQueryHelperServiceBean myDataQueryHelperServiceBean; @EJB GroupServiceBean groupService; + @EJB + DatasetServiceBean datasetService; private List roleList; private DataverseRolePermissionHelper rolePermissionHelper; @@ -491,7 +494,8 @@ private JsonArrayBuilder formatSolrDocs(SolrQueryResponse solrResponse, RoleTagR // ------------------------------------------- // (a) Get core card data from solr // ------------------------------------------- - myDataCardInfo = doc.getJsonForMyData(); + + myDataCardInfo = doc.getJsonForMyData(isValid(doc)); if (doc.getEntity() != null && !doc.getEntity().isInstanceofDataFile()){ String parentAlias = dataverseService.getParentAliasString(doc); @@ -514,4 +518,8 @@ private JsonArrayBuilder formatSolrDocs(SolrQueryResponse solrResponse, RoleTagR return jsonSolrDocsArrayBuilder; } + + private boolean isValid(SolrSearchResult result) { + return result.isValid(x -> true); + } } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java index 2ab248fcc0b..277fa9ee12f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java @@ -292,7 +292,7 @@ public String getSolrFragmentForPublicationStatus(){ } public String getSolrFragmentForDatasetValidity(){ - if ((this.datasetValidities == null) || (this.datasetValidities.isEmpty())){ + if ((this.datasetValidities == null) || (this.datasetValidities.isEmpty()) || (this.datasetValidities.size() > 1)){ return ""; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 28a65a652b2..e3049fa210f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -839,16 +839,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set dataverses = new ArrayList<>(); dataverses.add(dataverse); solrQueryResponse = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, !isFacetsDisabled(), true); @@ -1489,9 +1488,14 @@ public boolean isRetentionExpired(SolrSearchResult result) { return false; } } + + private DataverseRequest getDataverseRequest() { + final HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); + return new DataverseRequest(session.getUser(), httpServletRequest); + } public boolean isValid(SolrSearchResult result) { - return result.isValid(); + return result.isValid(x -> permissionsWrapper.canUpdateDataset(getDataverseRequest(), datasetService.find(x.getEntityId()))); } public enum SortOrder { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index 2bf6d079a4a..e84c8f133da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -7,6 +7,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import java.util.logging.Logger; import edu.harvard.iq.dataverse.*; @@ -19,6 +20,7 @@ import edu.harvard.iq.dataverse.api.Util; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.DateUtil; import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -402,7 +404,7 @@ public JsonArrayBuilder getRelevance() { * * @return */ - public JsonObjectBuilder getJsonForMyData() { + public JsonObjectBuilder getJsonForMyData(boolean isValid) { JsonObjectBuilder myDataJson = json(true, true, true);// boolean showRelevance, boolean showEntityIds, boolean showApiUrls) @@ -410,7 +412,7 @@ public JsonObjectBuilder getJsonForMyData() { .add("is_draft_state", this.isDraftState()).add("is_in_review_state", this.isInReviewState()) .add("is_unpublished_state", this.isUnpublishedState()).add("is_published", this.isPublishedState()) .add("is_deaccesioned", this.isDeaccessionedState()) - .add("is_valid", this.isValid()) + .add("is_valid", isValid) .add("date_to_display_on_card", getDateToDisplayOnCard()); // Add is_deaccessioned attribute, even though MyData currently screens any deaccessioned info out @@ -1256,7 +1258,19 @@ public void setDatasetValid(Boolean datasetValid) { this.datasetValid = datasetValid == null || Boolean.valueOf(datasetValid); } - public boolean isValid() { - return datasetValid; + public boolean isValid(Predicate canUpdateDataset) { + if (this.datasetValid) { + return true; + } + if (!this.getType().equals("datasets")) { + return true; + } + if (this.isDraftState()) { + return false; + } + if (!JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true)) { + return true; + } + return !canUpdateDataset.test(this); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index bd78a022dd3..9d13be005c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -230,6 +230,7 @@ public enum JvmSettings { SCOPE_UI(PREFIX, "ui"), UI_ALLOW_REVIEW_INCOMPLETE(SCOPE_UI, "allow-review-for-incomplete"), UI_SHOW_VALIDITY_FILTER(SCOPE_UI, "show-validity-filter"), + UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED(SCOPE_UI, "show-validity-label-when-published"), // NetCDF SETTINGS SCOPE_NETCDF(PREFIX, "netcdf"), diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 6c314c4dc2d..95f14b79ece 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -273,7 +273,7 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re } if (returnOwners){ bld.add("isPartOf", getOwnersFromDvObject(dv)); - } + } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) .add("dataverseType", dv.getDataverseType().name()); @@ -294,6 +294,11 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re } bld.add("isReleased", dv.isReleased()); + List inputLevels = dv.getDataverseFieldTypeInputLevels(); + if(!inputLevels.isEmpty()) { + bld.add("inputLevels", JsonPrinter.jsonDataverseFieldTypeInputLevels(inputLevels)); + } + return bld; } @@ -589,9 +594,13 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie } public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes) { + return json(metadataBlocks, returnDatasetFieldTypes, printOnlyDisplayedOnCreateDatasetFieldTypes, null); + } + + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) { JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for (MetadataBlock metadataBlock : metadataBlocks) { - arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, printOnlyDisplayedOnCreateDatasetFieldTypes) : brief.json(metadataBlock)); + arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, printOnlyDisplayedOnCreateDatasetFieldTypes, ownerDataverse) : brief.json(metadataBlock)); } return arrayBuilder; } @@ -619,20 +628,25 @@ public static JsonObject json(DatasetField dfv) { } public static JsonObjectBuilder json(MetadataBlock metadataBlock) { - return json(metadataBlock, false); + return json(metadataBlock, false, null); } - public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes) { + public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) { JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder(); jsonObjectBuilder.add("id", metadataBlock.getId()); jsonObjectBuilder.add("name", metadataBlock.getName()); jsonObjectBuilder.add("displayName", metadataBlock.getDisplayName()); jsonObjectBuilder.add("displayOnCreate", metadataBlock.isDisplayOnCreate()); - JsonObjectBuilder fieldsBuilder = jsonObjectBuilder(); - for (DatasetFieldType datasetFieldType : new TreeSet<>(metadataBlock.getDatasetFieldTypes())) { - if (!printOnlyDisplayedOnCreateDatasetFieldTypes || datasetFieldType.isDisplayOnCreate()) { - fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType)); + JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder(); + Set datasetFieldTypes = new TreeSet<>(metadataBlock.getDatasetFieldTypes()); + for (DatasetFieldType datasetFieldType : datasetFieldTypes) { + boolean requiredInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldType.getId()); + boolean displayCondition = !printOnlyDisplayedOnCreateDatasetFieldTypes || + datasetFieldType.isDisplayOnCreate() || + requiredInOwnerDataverse; + if (displayCondition) { + fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse)); } } @@ -642,6 +656,10 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO } public static JsonObjectBuilder json(DatasetFieldType fld) { + return json(fld, null); + } + + public static JsonObjectBuilder json(DatasetFieldType fld, Dataverse ownerDataverse) { JsonObjectBuilder fieldsBld = jsonObjectBuilder(); fieldsBld.add("name", fld.getName()); fieldsBld.add("displayName", fld.getDisplayName()); @@ -654,8 +672,11 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { fieldsBld.add("multiple", fld.isAllowMultiples()); fieldsBld.add("isControlledVocabulary", fld.isControlledVocabulary()); fieldsBld.add("displayFormat", fld.getDisplayFormat()); - fieldsBld.add("isRequired", fld.isRequired()); fieldsBld.add("displayOrder", fld.getDisplayOrder()); + + boolean requiredInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(fld.getId()); + fieldsBld.add("isRequired", requiredInOwnerDataverse || fld.isRequired()); + if (fld.isControlledVocabulary()) { // If the field has a controlled vocabulary, // add all values to the resulting JSON @@ -665,10 +686,11 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { } fieldsBld.add("controlledVocabularyValues", jab); } + if (!fld.getChildDatasetFieldTypes().isEmpty()) { JsonObjectBuilder subFieldsBld = jsonObjectBuilder(); for (DatasetFieldType subFld : fld.getChildDatasetFieldTypes()) { - subFieldsBld.add(subFld.getName(), JsonPrinter.json(subFld)); + subFieldsBld.add(subFld.getName(), JsonPrinter.json(subFld, ownerDataverse)); } fieldsBld.add("childFields", subFieldsBld); } @@ -1342,4 +1364,16 @@ private static JsonObjectBuilder jsonLicense(DatasetVersion dsv) { } return licenseJsonObjectBuilder; } + + public static JsonArrayBuilder jsonDataverseFieldTypeInputLevels(List inputLevels) { + JsonArrayBuilder jsonArrayOfInputLevels = Json.createArrayBuilder(); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + NullSafeJsonBuilder inputLevelJsonObject = NullSafeJsonBuilder.jsonObjectBuilder(); + inputLevelJsonObject.add("datasetFieldTypeName", inputLevel.getDatasetFieldType().getName()); + inputLevelJsonObject.add("required", inputLevel.isRequired()); + inputLevelJsonObject.add("include", inputLevel.isInclude()); + jsonArrayOfInputLevels.add(inputLevelJsonObject); + } + return jsonArrayOfInputLevels; + } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 02d848df1e3..1e847d3eeb3 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -947,6 +947,7 @@ dataverse.default=(Default) dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation dataverse.guestbookentry.atdownload=Guestbook Entry At Download dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request +dataverse.updateinputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} # rolesAndPermissionsFragment.xhtml # advanced.xhtml diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 8bef75b6549..835764d9cf5 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -77,7 +77,7 @@ - + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index f3472aa43a4..01f4a4646fe 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -24,17 +24,15 @@ import org.junit.jupiter.api.Test; import static jakarta.ws.rs.core.Response.Status.*; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.not; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.hamcrest.CoreMatchers.*; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItemInArray; +import static org.junit.jupiter.api.Assertions.*; + import java.nio.file.Files; import io.restassured.path.json.JsonPath; -import static jakarta.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.containsString; import org.hamcrest.Matchers; public class DataversesIT { @@ -704,26 +702,52 @@ public void testListMetadataBlocks() { Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + String[] testInputLevelNames = {"geographicCoverage", "country"}; + Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, apiToken); + updateDataverseInputLevelsResponse.then().assertThat().statusCode(OK.getStatusCode()); + // Dataverse not found Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks("-1", false, false, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // Existent dataverse and no optional params + String[] expectedAllMetadataBlockDisplayNames = {"Astronomy and Astrophysics Metadata", "Citation Metadata", "Geospatial Metadata"}; + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, false, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", equalTo(null)) - .body("data.size()", equalTo(2)); + .body("data[1].fields", equalTo(null)) + .body("data[2].fields", equalTo(null)) + .body("data.size()", equalTo(3)); + + String actualMetadataBlockDisplayName1 = listMetadataBlocksResponse.then().extract().path("data[0].displayName"); + String actualMetadataBlockDisplayName2 = listMetadataBlocksResponse.then().extract().path("data[1].displayName"); + String actualMetadataBlockDisplayName3 = listMetadataBlocksResponse.then().extract().path("data[2].displayName"); + assertNotEquals(actualMetadataBlockDisplayName1, actualMetadataBlockDisplayName2); + assertNotEquals(actualMetadataBlockDisplayName1, actualMetadataBlockDisplayName3); + assertNotEquals(actualMetadataBlockDisplayName2, actualMetadataBlockDisplayName3); + assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName1)); + assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName2)); + assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName3)); // Existent dataverse and onlyDisplayedOnCreate=true + String[] expectedOnlyDisplayedOnCreateMetadataBlockDisplayNames = {"Citation Metadata", "Geospatial Metadata"}; + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, false, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", equalTo(null)) - .body("data[0].displayName", equalTo("Citation Metadata")) - .body("data.size()", equalTo(1)); + .body("data[1].fields", equalTo(null)) + .body("data.size()", equalTo(2)); + + actualMetadataBlockDisplayName1 = listMetadataBlocksResponse.then().extract().path("data[0].displayName"); + actualMetadataBlockDisplayName2 = listMetadataBlocksResponse.then().extract().path("data[1].displayName"); + assertNotEquals(actualMetadataBlockDisplayName1, actualMetadataBlockDisplayName2); + assertThat(expectedOnlyDisplayedOnCreateMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName1)); + assertThat(expectedOnlyDisplayedOnCreateMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName2)); // Existent dataverse and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, true, apiToken); @@ -731,7 +755,19 @@ public void testListMetadataBlocks() { listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) - .body("data.size()", equalTo(2)); + .body("data[1].fields", not(equalTo(null))) + .body("data[2].fields", not(equalTo(null))) + .body("data.size()", equalTo(3)); + + actualMetadataBlockDisplayName1 = listMetadataBlocksResponse.then().extract().path("data[0].displayName"); + actualMetadataBlockDisplayName2 = listMetadataBlocksResponse.then().extract().path("data[1].displayName"); + actualMetadataBlockDisplayName3 = listMetadataBlocksResponse.then().extract().path("data[2].displayName"); + assertNotEquals(actualMetadataBlockDisplayName1, actualMetadataBlockDisplayName2); + assertNotEquals(actualMetadataBlockDisplayName1, actualMetadataBlockDisplayName3); + assertNotEquals(actualMetadataBlockDisplayName2, actualMetadataBlockDisplayName3); + assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName1)); + assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName2)); + assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName3)); // Existent dataverse and onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); @@ -739,8 +775,26 @@ public void testListMetadataBlocks() { listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) - .body("data[0].displayName", equalTo("Citation Metadata")) - .body("data.size()", equalTo(1)); + .body("data[1].fields", not(equalTo(null))) + .body("data.size()", equalTo(2)); + + actualMetadataBlockDisplayName1 = listMetadataBlocksResponse.then().extract().path("data[0].displayName"); + actualMetadataBlockDisplayName2 = listMetadataBlocksResponse.then().extract().path("data[1].displayName"); + assertNotEquals(actualMetadataBlockDisplayName1, actualMetadataBlockDisplayName2); + assertThat(expectedOnlyDisplayedOnCreateMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName1)); + assertThat(expectedOnlyDisplayedOnCreateMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName2)); + + // Check dataset fields for the updated input levels are retrieved + int geospatialMetadataBlockIndex = actualMetadataBlockDisplayName2.equals("Geospatial Metadata") ? 1 : 0; + + listMetadataBlocksResponse.then().assertThat() + .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(2)); + + String actualMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); + String actualMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex)); + + assertNotNull(actualMetadataField1); + assertNotNull(actualMetadataField2); // User has no permissions on the requested dataverse Response createSecondUserResponse = UtilIT.createRandomUser(); @@ -753,7 +807,7 @@ public void testListMetadataBlocks() { listMetadataBlocksResponse = UtilIT.listMetadataBlocks(secondDataverseAlias, true, true, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); } - + @Test public void testFeatureDataverse() throws Exception { @@ -762,42 +816,42 @@ public void testFeatureDataverse() throws Exception { Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); assertEquals(200, publishDataverse.getStatusCode()); - - Response createSubDVToBeFeatured = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-feature", null, apiToken, dataverseAlias); + + Response createSubDVToBeFeatured = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-feature", null, apiToken, dataverseAlias); String subDataverseAlias = UtilIT.getAliasFromResponse(createSubDVToBeFeatured); - + //publish a sub dataverse so that the owner will have something to feature - Response createSubDVToBePublished = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-pub", null, apiToken, dataverseAlias); + Response createSubDVToBePublished = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-pub", null, apiToken, dataverseAlias); assertEquals(201, createSubDVToBePublished.getStatusCode()); String subDataverseAliasPub = UtilIT.getAliasFromResponse(createSubDVToBePublished); publishDataverse = UtilIT.publishDataverseViaNativeApi(subDataverseAliasPub, apiToken); assertEquals(200, publishDataverse.getStatusCode()); - + //can't feature a dataverse that is unpublished Response featureSubDVResponseUnpublished = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); featureSubDVResponseUnpublished.prettyPrint(); assertEquals(400, featureSubDVResponseUnpublished.getStatusCode()); featureSubDVResponseUnpublished.then().assertThat() .body(containsString("may not be featured")); - + //can't feature a dataverse you don't own Response featureSubDVResponseNotOwned = UtilIT.addFeaturedDataverse(dataverseAlias, "root", apiToken); featureSubDVResponseNotOwned.prettyPrint(); assertEquals(400, featureSubDVResponseNotOwned.getStatusCode()); featureSubDVResponseNotOwned.then().assertThat() .body(containsString("may not be featured")); - + //can't feature a dataverse that doesn't exist Response featureSubDVResponseNotExist = UtilIT.addFeaturedDataverse(dataverseAlias, "dummy-alias-sek-foobar-333", apiToken); featureSubDVResponseNotExist.prettyPrint(); assertEquals(400, featureSubDVResponseNotExist.getStatusCode()); featureSubDVResponseNotExist.then().assertThat() .body(containsString("Can't find dataverse collection")); - + publishDataverse = UtilIT.publishDataverseViaNativeApi(subDataverseAlias, apiToken); assertEquals(200, publishDataverse.getStatusCode()); @@ -805,32 +859,71 @@ public void testFeatureDataverse() throws Exception { Response featureSubDVResponse = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); featureSubDVResponse.prettyPrint(); assertEquals(OK.getStatusCode(), featureSubDVResponse.getStatusCode()); - - + + Response getFeaturedDataverseResponse = UtilIT.getFeaturedDataverses(dataverseAlias, apiToken); getFeaturedDataverseResponse.prettyPrint(); assertEquals(OK.getStatusCode(), getFeaturedDataverseResponse.getStatusCode()); getFeaturedDataverseResponse.then().assertThat() .body("data[0]", equalTo(subDataverseAlias)); - + Response deleteFeaturedDataverseResponse = UtilIT.deleteFeaturedDataverses(dataverseAlias, apiToken); deleteFeaturedDataverseResponse.prettyPrint(); - + assertEquals(OK.getStatusCode(), deleteFeaturedDataverseResponse.getStatusCode()); deleteFeaturedDataverseResponse.then().assertThat() .body(containsString("Featured dataverses have been removed")); - + Response deleteSubCollectionResponse = UtilIT.deleteDataverse(subDataverseAlias, apiToken); deleteSubCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteSubCollectionResponse.getStatusCode()); - + Response deleteSubCollectionPubResponse = UtilIT.deleteDataverse(subDataverseAliasPub, apiToken); deleteSubCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteSubCollectionPubResponse.getStatusCode()); - + Response deleteCollectionResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); deleteCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); } - + + @Test + public void testUpdateInputLevels() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Update valid input levels + String[] testInputLevelNames = {"geographicCoverage", "country"}; + Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, apiToken); + updateDataverseInputLevelsResponse.then().assertThat() + .body("data.inputLevels[0].required", equalTo(true)) + .body("data.inputLevels[0].include", equalTo(true)) + .body("data.inputLevels[1].required", equalTo(true)) + .body("data.inputLevels[1].include", equalTo(true)) + .statusCode(OK.getStatusCode()); + String actualFieldTypeName1 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + String actualFieldTypeName2 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[1].datasetFieldTypeName"); + assertNotEquals(actualFieldTypeName1, actualFieldTypeName2); + assertThat(testInputLevelNames, hasItemInArray(actualFieldTypeName1)); + assertThat(testInputLevelNames, hasItemInArray(actualFieldTypeName2)); + + // Update input levels with an invalid field type name + String[] testInvalidInputLevelNames = {"geographicCoverage", "invalid1"}; + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInvalidInputLevelNames, apiToken); + updateDataverseInputLevelsResponse.then().assertThat() + .body("message", equalTo("Invalid dataset field type name: invalid1")) + .statusCode(BAD_REQUEST.getStatusCode()); + + // Update invalid empty input levels + testInputLevelNames = new String[]{}; + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, apiToken); + updateDataverseInputLevelsResponse.prettyPrint(); + updateDataverseInputLevelsResponse.then().assertThat() + .body("message", equalTo("Error while updating dataverse input levels: Input level list cannot be null or empty")) + .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4326250a157..507c9b302b3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3939,4 +3939,19 @@ static Response requestGlobusUploadPaths(Integer datasetId, JsonObject body, Str .post("/api/datasets/" + datasetId + "/requestGlobusUploadPaths"); } + static Response updateDataverseInputLevels(String dataverseAlias, String[] inputLevelNames, String apiToken) { + JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); + for(String inputLevelName : inputLevelNames) { + contactArrayBuilder.add(Json.createObjectBuilder() + .add("datasetFieldTypeName", inputLevelName) + .add("required", true) + .add("include", true) + ); + } + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(contactArrayBuilder.build().toString()) + .contentType(ContentType.JSON) + .put("/api/dataverses/" + dataverseAlias + "/inputLevels"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java index 672d7563669..3a63371d7a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GitHubOAuth2APTest; @@ -48,6 +49,7 @@ class OAuth2LoginBackingBeanTest { @Mock AuthenticationServiceBean authenticationServiceBean; @Mock SystemConfig systemConfig; + @Mock UserServiceBean userService; Clock constantClock = Clock.fixed(Instant.now(), ZoneId.systemDefault()); @@ -70,6 +72,7 @@ void setUp() { this.loginBackingBean.clock = constantClock; this.loginBackingBean.authenticationSvc = this.authenticationServiceBean; this.loginBackingBean.systemConfig = this.systemConfig; + this.loginBackingBean.userService = this.userService; lenient().when(this.authenticationServiceBean.getOAuth2Provider(testIdp.getId())).thenReturn(testIdp); } @@ -178,6 +181,7 @@ void existingUser() throws Exception { // also fake the result of the lookup in the auth service doReturn(userIdentifier).when(userRecord).getUserRecordIdentifier(); doReturn(user).when(authenticationServiceBean).lookupUser(userIdentifier); + doReturn(user).when(userService).updateLastLogin(user); // WHEN (& then) // capture the redirect target from the faces context