From 830ea359b12610011b3f377329d4de4b3fadc059 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 15 Jul 2024 15:45:23 -0400 Subject: [PATCH 01/45] stub out dataset types #10517 --- doc/release-notes/10517-datasetType.md | 3 + .../_static/api/dataset-create-software.json | 82 +++++++++++++++++++ .../api/dataset-create-software.jsonld | 16 ++++ .../source/user/dataset-types.rst | 30 +++++++ doc/sphinx-guides/source/user/index.rst | 1 + docker-compose-dev.yml | 1 + .../edu/harvard/iq/dataverse/Dataset.java | 14 +++- .../harvard/iq/dataverse/api/Dataverses.java | 8 ++ .../iq/dataverse/dataverse/DataverseUtil.java | 14 ++++ .../iq/dataverse/settings/FeatureFlags.java | 5 ++ .../iq/dataverse/util/json/JsonParser.java | 7 +- .../iq/dataverse/api/DatasetTypesIT.java | 54 ++++++++++++ tests/integration-tests.txt | 2 +- 13 files changed, 234 insertions(+), 3 deletions(-) create mode 100644 doc/release-notes/10517-datasetType.md create mode 100644 doc/sphinx-guides/source/_static/api/dataset-create-software.json create mode 100644 doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld create mode 100755 doc/sphinx-guides/source/user/dataset-types.rst create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md new file mode 100644 index 00000000000..afc96a546fa --- /dev/null +++ b/doc/release-notes/10517-datasetType.md @@ -0,0 +1,3 @@ +### Initial Support for Dataset Types (Dataset, Software, Workflow) + +Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see doc/sphinx-guides/source/user/dataset-types.rst and #10517. Please note that this feature is highly experimental. diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.json b/doc/sphinx-guides/source/_static/api/dataset-create-software.json new file mode 100644 index 00000000000..2556d52f1fc --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.json @@ -0,0 +1,82 @@ +{ + "datasetType": "software", + "datasetVersion": { + "license": { + "name": "CC0 1.0", + "uri": "http://creativecommons.org/publicdomain/zero/1.0" + }, + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Darwin's Finches", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Finch, Fiona", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Birds Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "finch@mailinator.com" + }, + "datasetContactName" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Finch, Fiona" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Medicine, Health and Life Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld new file mode 100644 index 00000000000..e5c93f58c03 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld @@ -0,0 +1,16 @@ +{ + "http://purl.org/dc/terms/title": "Darwin's Finches", + "http://purl.org/dc/terms/subject": "Medicine, Health and Life Sciences", + "http://purl.org/dc/terms/creator": { + "https://dataverse.org/schema/citation/authorName": "Finch, Fiona", + "https://dataverse.org/schema/citation/authorAffiliation": "Birds Inc." + }, + "https://dataverse.org/schema/citation/datasetContact": { + "https://dataverse.org/schema/citation/datasetContactEmail": "finch@mailinator.com", + "https://dataverse.org/schema/citation/datasetContactName": "Finch, Fiona" + }, + "https://dataverse.org/schema/citation/dsDescription": { + "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." + }, + "datasetType": "software" +} diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst new file mode 100755 index 00000000000..619c4cd0c4c --- /dev/null +++ b/doc/sphinx-guides/source/user/dataset-types.rst @@ -0,0 +1,30 @@ +Dataset Types ++++++++++++++ + +NOTE: This separate page will be folded into individual pages and removed as the pull request is finalized + +.. contents:: |toctitle| + :local: + +Intro +===== + +Datasets can have a dataset type such as "dataset", "software", or "workflow". + +Enabling Dataset Types +====================== + +Turn on ``dataverse.feature.dataset-types``. See also :ref:`feature-flags`. + +Specifying a Dataset Type When Creating a Dataset +================================================= + +Native API +---------- + +An example JSON file is available at :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>` + +Semantic API +--------------------------------- + +An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>` diff --git a/doc/sphinx-guides/source/user/index.rst b/doc/sphinx-guides/source/user/index.rst index 857bd27ca22..60f6e473b68 100755 --- a/doc/sphinx-guides/source/user/index.rst +++ b/doc/sphinx-guides/source/user/index.rst @@ -16,3 +16,4 @@ User Guide dataset-management tabulardataingest/index appendix + dataset-types diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 402a95c0e16..64e16a0c5ae 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -17,6 +17,7 @@ services: SKIP_DEPLOY: "${SKIP_DEPLOY}" DATAVERSE_JSF_REFRESH_PERIOD: "1" DATAVERSE_FEATURE_API_BEARER_AUTH: "1" + DATAVERSE_FEATURE_DATASET_TYPES: "1" DATAVERSE_MAIL_SYSTEM_EMAIL: "dataverse@localhost" DATAVERSE_MAIL_MTA_HOST: "smtp" DATAVERSE_AUTH_OIDC_ENABLED: "1" diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index eaf406d01bf..96fa3a0bef3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -38,6 +38,7 @@ import edu.harvard.iq.dataverse.storageuse.StorageUse; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.persistence.Transient; /** * @@ -128,6 +129,9 @@ public class Dataset extends DvObjectContainer { */ private boolean useGenericThumbnail; + @Transient + private String datasetType; + @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) @JoinColumn(name = "guestbook_id", unique = false, nullable = true, insertable = true, updatable = true) private Guestbook guestbook; @@ -736,7 +740,15 @@ public boolean isUseGenericThumbnail() { public void setUseGenericThumbnail(boolean useGenericThumbnail) { this.useGenericThumbnail = useGenericThumbnail; } - + + public String getDatasetType() { + return datasetType; + } + + public void setDatasetType(String datasetType) { + this.datasetType = datasetType; + } + public List getDatasetMetrics() { return datasetMetrics; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 02b60fdb32a..76f38008fef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -240,6 +241,13 @@ public Response createDataset(@Context ContainerRequestContext crc, String jsonB //Throw BadRequestException if metadataLanguage isn't compatible with setting DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); + try { + logger.info("about to call checkDatasetType..."); + DataverseUtil.checkDatasetType(ds, FeatureFlags.DATASET_TYPES.enabled()); + } catch (BadRequestException ex) { + return badRequest(ex.getLocalizedMessage()); + } + // clean possible version metadata DatasetVersion version = ds.getVersions().get(0); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index f45a9058e7c..0fac22ef480 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; @@ -122,4 +123,17 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map Date: Mon, 15 Jul 2024 16:59:36 -0400 Subject: [PATCH 02/45] persist "software" etc in new datasetType entity #10517 --- .../edu/harvard/iq/dataverse/Dataset.java | 11 +-- .../iq/dataverse/dataset/DatasetType.java | 79 +++++++++++++++++++ .../iq/dataverse/dataverse/DataverseUtil.java | 3 +- .../iq/dataverse/util/json/JsonParser.java | 3 +- .../iq/dataverse/util/json/JsonPrinter.java | 5 ++ .../iq/dataverse/api/DatasetTypesIT.java | 13 ++- 6 files changed, 102 insertions(+), 12 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 96fa3a0bef3..ef2f39620e9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.license.License; @@ -38,7 +39,6 @@ import edu.harvard.iq.dataverse.storageuse.StorageUse; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.persistence.Transient; /** * @@ -129,8 +129,9 @@ public class Dataset extends DvObjectContainer { */ private boolean useGenericThumbnail; - @Transient - private String datasetType; + @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) + @JoinColumn(name = "datasettype_id", nullable = true) + private DatasetType datasetType; @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) @JoinColumn(name = "guestbook_id", unique = false, nullable = true, insertable = true, updatable = true) @@ -741,11 +742,11 @@ public void setUseGenericThumbnail(boolean useGenericThumbnail) { this.useGenericThumbnail = useGenericThumbnail; } - public String getDatasetType() { + public DatasetType getDatasetType() { return datasetType; } - public void setDatasetType(String datasetType) { + public void setDatasetType(DatasetType datasetType) { this.datasetType = datasetType; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java new file mode 100644 index 00000000000..0d47d6a5885 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java @@ -0,0 +1,79 @@ +package edu.harvard.iq.dataverse.dataset; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import java.io.Serializable; +import java.util.Arrays; + +@Entity +public class DatasetType implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(nullable = false) + @Enumerated(EnumType.STRING) + private Type baseType; + + /** + * This default constructor is only here to prevent this error at + * deployment: + * + * Exception Description: The instance creation method + * [...DatasetType.], with no parameters, does not + * exist, or is not accessible + * + * Don't use it. + */ + @Deprecated + public DatasetType() { + } + + public DatasetType(Type baseType) { + this.baseType = baseType; + } + + public enum Type { + + DATASET("dataset"), + SOFTWARE("software"), + WORKFLOW("workflow"); + + private final String text; + + private Type(final String text) { + this.text = text; + } + + public static Type fromString(String text) { + if (text != null) { + for (Type type : Type.values()) { + if (text.equals(type.text)) { + return type; + } + } + } + throw new IllegalArgumentException("Type must be one of these values: " + Arrays.asList(Type.values()) + "."); + } + + @Override + public String toString() { + return text; + } + } + + public Type getBaseType() { + return baseType; + } + + public void setBaseType(Type baseType) { + this.baseType = baseType; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index 0fac22ef480..f13f73f2dc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -125,13 +125,12 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map Date: Tue, 16 Jul 2024 16:27:57 -0400 Subject: [PATCH 03/45] set datasetType using Semantic API #10517 --- .../iq/dataverse/util/json/JSONLDUtil.java | 12 +++++++ .../iq/dataverse/api/DatasetTypesIT.java | 34 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 9 +++++ 3 files changed, 49 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 52491a5a7e1..512576adff7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -49,6 +49,7 @@ import com.apicatalog.jsonld.document.JsonDocument; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.pidproviders.PidProvider; @@ -96,6 +97,17 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, //Store the metadatalanguage if sent - the caller needs to check whether it is allowed (as with any GlobalID) ds.setMetadataLanguage(jsonld.getString(JsonLDTerm.schemaOrg("inLanguage").getUrl(),null)); + try (StringReader rdr = new StringReader(jsonLDBody)) { + try (JsonReader jsonReader = Json.createReader(rdr)) { + JsonObject jsonObject = jsonReader.readObject(); + String datasetType = jsonObject.getString("datasetType", null); + logger.info("datasetType: " + datasetType); + if (datasetType != null) { + ds.setDatasetType(new DatasetType(DatasetType.Type.fromString(datasetType))); + } + } + } + dsv = updateDatasetVersionMDFromJsonLD(dsv, jsonld, metadataBlockSvc, datasetFieldSvc, append, migrating, licenseSvc); dsv.setDataset(ds); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index bb3ec1e94ba..6c67e3d32f2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -5,10 +5,8 @@ import io.restassured.response.Response; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; -import org.junit.jupiter.api.Assertions; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; public class DatasetTypesIT { @@ -46,14 +44,38 @@ public void testCreateSoftwareDatasetNative() { getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); System.out.println("datasetType: " + dataseType); - assertEquals("software", dataseType); + assertEquals("SOFTWARE", dataseType); } - @Disabled @Test public void testCreateSoftwareDatasetSemantic() { - String jsonIn = "doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld"; - System.out.println("semantic: " + jsonIn); + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + String jsonIn = UtilIT.getDatasetJson("doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld"); + + Response createSoftware = UtilIT.createDatasetSemantic(dataverseAlias, jsonIn, apiToken); + createSoftware.prettyPrint(); + createSoftware.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + //TODO: try sending "junk" instead of "software". + Integer datasetId = UtilIT.getDatasetIdFromResponse(createSoftware); + String datasetPid = JsonPath.from(createSoftware.getBody().asString()).getString("data.persistentId"); + + Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJson.prettyPrint(); + getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); + String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); + System.out.println("datasetType: " + dataseType); + assertEquals("SOFTWARE", dataseType); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 0216859b869..585b60834d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -497,6 +497,15 @@ static Response createDataset(String dataverseAlias, String datasetJson, String return createDatasetResponse; } + static Response createDatasetSemantic(String dataverseAlias, String datasetJson, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(datasetJson) + .contentType("application/ld+json") + .post("/api/dataverses/" + dataverseAlias + "/datasets"); + return response; + } + static String getDatasetJson(String pathToJsonFile) { File datasetVersionJson = new File(pathToJsonFile); try { From 4f055b6d89e790ff4df6af490f7152e88e1c431d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 17 Jul 2024 17:01:55 -0400 Subject: [PATCH 04/45] assert that importing software via JSON works #10517 Also fix case of returned dataset type. --- .../iq/dataverse/api/DatasetTypesIT.java | 37 ++++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 25 +++++++++++++ 2 files changed, 60 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 6c67e3d32f2..e5f3eb20a7f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -44,7 +44,7 @@ public void testCreateSoftwareDatasetNative() { getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); System.out.println("datasetType: " + dataseType); - assertEquals("SOFTWARE", dataseType); + assertEquals("software", dataseType); } @Test @@ -75,7 +75,40 @@ public void testCreateSoftwareDatasetSemantic() { getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); System.out.println("datasetType: " + dataseType); - assertEquals("SOFTWARE", dataseType); + assertEquals("software", dataseType); } + @Test + public void testImportJson() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + String jsonIn = UtilIT.getDatasetJson("doc/sphinx-guides/source/_static/api/dataset-create-software.json"); + + String randomString = UtilIT.getRandomString(6); + + Response importJson = UtilIT.importDatasetNativeJson(apiToken, dataverseAlias, jsonIn, "doi:10.5072/FK2/" + randomString, "no"); + importJson.prettyPrint(); + importJson.then().assertThat().statusCode(CREATED.getStatusCode()); + + Integer datasetId = JsonPath.from(importJson.getBody().asString()).getInt("data.id"); + String datasetPid = JsonPath.from(importJson.getBody().asString()).getString("data.persistentId"); + + Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJson.prettyPrint(); + getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); + String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); + System.out.println("datasetType: " + dataseType); + assertEquals("software", dataseType); + + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 585b60834d4..3d5b97d00e8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3551,6 +3551,31 @@ private static DatasetField constructPrimitive(String fieldName, String value) { return field; } + static Response importDatasetNativeJson(String apiToken, String dataverseAlias, String jsonString, String pid, String release) { + + String postString = "/api/dataverses/" + dataverseAlias + "/datasets/:import"; + if (pid != null || release != null) { + //postString = postString + "?"; + if (pid != null) { + postString = postString + "?pid=" + pid; + if (release != null && release.compareTo("yes") == 0) { + postString = postString + "&release=" + release.toString(); + } + } else { + if (release != null && release.compareTo("yes") == 0) { + postString = postString + "?release=" + release.toString(); + } + } + } + + RequestSpecification importJson = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .urlEncodingEnabled(false) + .body(jsonString) + .contentType("application/json"); + + return importJson.post(postString); + } static Response importDatasetDDIViaNativeApi(String apiToken, String dataverseAlias, String xml, String pid, String release) { From 47c5b302846f11e076193ec1feecaf7eeb5816e6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 17 Jul 2024 17:03:47 -0400 Subject: [PATCH 05/45] fix typo #10517 --- .../iq/dataverse/api/DatasetTypesIT.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index e5f3eb20a7f..c19aec33383 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -42,9 +42,9 @@ public void testCreateSoftwareDatasetNative() { Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); getDatasetJson.prettyPrint(); getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); - String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); - System.out.println("datasetType: " + dataseType); - assertEquals("software", dataseType); + String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); + System.out.println("datasetType: " + datasetType); + assertEquals("software", datasetType); } @Test @@ -73,9 +73,9 @@ public void testCreateSoftwareDatasetSemantic() { Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); getDatasetJson.prettyPrint(); getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); - String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); - System.out.println("datasetType: " + dataseType); - assertEquals("software", dataseType); + String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); + System.out.println("datasetType: " + datasetType); + assertEquals("software", datasetType); } @Test @@ -106,9 +106,9 @@ public void testImportJson() { Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); getDatasetJson.prettyPrint(); getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); - String dataseType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); - System.out.println("datasetType: " + dataseType); - assertEquals("software", dataseType); + String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); + System.out.println("datasetType: " + datasetType); + assertEquals("software", datasetType); } } From cc68c7dae2c8d874ddef470616c608f728e3ba53 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 22 Jul 2024 16:54:52 -0400 Subject: [PATCH 06/45] allow dataset type to be specified in DDI import #10517 --- doc/release-notes/10517-datasetType.md | 6 +- .../api/dataset-create-software-ddi.xml | 196 ++++++++++++++++++ .../source/user/dataset-types.rst | 7 + .../harvard/iq/dataverse/api/Dataverses.java | 34 ++- .../iq/dataverse/api/DatasetTypesIT.java | 35 ++++ 5 files changed, 275 insertions(+), 3 deletions(-) create mode 100644 doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index afc96a546fa..9e72498cf70 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,3 +1,7 @@ ### Initial Support for Dataset Types (Dataset, Software, Workflow) -Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see doc/sphinx-guides/source/user/dataset-types.rst and #10517. Please note that this feature is highly experimental. +Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. + +next: + +- create with DDI diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml new file mode 100644 index 00000000000..c9e94b32eb2 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml @@ -0,0 +1,196 @@ + + + + + + Replication Data for: Title + + + Root + 2020-02-19 + + + 1 + + LastAuthor1, FirstAuthor1; LastAuthor2, FirstAuthor2, 2020, "Replication Data for: Title", Root, V1 + + + + + + Replication Data for: Title + Subtitle + Alternative Title + OtherIDIdentifier1 + OtherIDIdentifier2 + + + LastAuthor1, FirstAuthor1 + LastAuthor2, FirstAuthor2 + LastContributor1, FirstContributor1 + LastContributor2, FirstContributor2 + + + LastProducer1, FirstProducer1 + LastProducer2, FirstProducer2 + 1003-01-01 + ProductionPlace One + ProductionPlace Two + SoftwareName1 + SoftwareName2 + GrantInformationGrantNumber1 + GrantInformationGrantNumber2 + + + Root + LastDistributor1, FirstDistributor1 + LastDistributor2, FirstDistributor2 + LastContact1, FirstContact1 + LastContact2, FirstContact2 + 1004-01-01 + LastDepositor, FirstDepositor + 1002-01-01 + + + SeriesName One + SeriesInformation One + + + SeriesName Two + SeriesInformation Two + + + + + Agricultural Sciences + Business and Management + Engineering + Law + KeywordTerm1 + KeywordTerm2 + + DescriptionText 1 + DescriptionText2 + + 1005-01-01 + 1005-01-02 + 1005-02-01 + 1005-02-02 + 1006-01-01 + 1006-01-01 + 1006-02-01 + 1006-02-02 + software + Afghanistan + GeographicCoverageCity1 + GeographicCoverageStateProvince1 + GeographicCoverageOther1 + Albania + GeographicCoverageCity2 + GeographicCoverageStateProvince2 + GeographicCoverageOther2 + + 10 + 20 + 40 + 30 + + + 70 + 80 + 60 + 50 + + GeographicUnit1 + GeographicUnit2 + UnitOfAnalysis1 + UnitOfAnalysis2 + Universe1 + Universe2 + + Notes1 + + + + TimeMethod + LastDataCollector1, FirstDataCollector1 + CollectorTraining + Frequency + SamplingProcedure + + TargetSampleSizeFormula + 100 + + MajorDeviationsForSampleDesign + + DataSources1 + DataSources2 + OriginOfSources + CharacteristicOfSourcesNoted + DocumentationAndAccessToSources + + CollectionMode + TypeOfResearchInstrument + CharacteristicsOfDataCollectionSituation + ActionsToMinimizeLosses + ControlOperations + Weighting + CleaningOperations + + + ResponseRate + EstimatesOfSamplingError + OtherFormsOfDataAppraisal + + NotesText + + + Terms of Access + Terms of Use + + Data Access Place + Original Archive + Availability Status + Size of Collection + Study Completion + + + Confidentiality Declaration + Special Permissions + Restrictions + Contact for Access + Citation Requirements + Depositor Requirements + Conditions + Disclaimer + + + + RelatedMaterial1 + RelatedMaterial2 + RelatedDatasets1 + RelatedDatasets2 + + + + RelatedPublicationIDNumber1 + + RelatedPublicationCitation1 + + + + + + + RelatedPublicationIDNumber2 + + RelatedPublicationCitation2 + + + + OtherReferences1 + OtherReferences2 + + StudyLevelErrorNotes + + diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst index 619c4cd0c4c..ffc59e212d9 100755 --- a/doc/sphinx-guides/source/user/dataset-types.rst +++ b/doc/sphinx-guides/source/user/dataset-types.rst @@ -28,3 +28,10 @@ Semantic API --------------------------------- An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>` + +DDI Import +---------- + +An example DDI file is available at :download:`dataset-create-software-ddi.xml <../_static/api/dataset-create-software-ddi.xml>` + +Note that for DDI import to work ``dataKind`` must be set to one of the valid types. The first valid type wins. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 76f38008fef..670c221b36e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.*; @@ -429,8 +430,13 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm Dataverse owner = findDataverseOrDie(parentIdtf); Dataset ds = null; try { - ds = jsonParser().parseDataset(importService.ddiToJson(xml)); + JsonObject jsonObject = importService.ddiToJson(xml); + ds = jsonParser().parseDataset(jsonObject); DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); + DatasetType datasetType = getDatasetTypeFromJson(jsonObject); + if (datasetType != null) { + ds.setDatasetType(datasetType); + } } catch (JsonParseException jpe) { return badRequest("Error parsing data as Json: "+jpe.getMessage()); } catch (ImportException e) { @@ -491,7 +497,31 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm return ex.getResponse(); } } - + + public DatasetType getDatasetTypeFromJson(JsonObject jsonObject) { + JsonArray citationFields = jsonObject.getJsonObject("datasetVersion") + .getJsonObject("metadataBlocks") + .getJsonObject("citation") + .getJsonArray("fields"); + for (JsonValue citationField : citationFields) { + JsonObject field = (JsonObject) citationField; + String name = field.getString("typeName"); + if (name.equals(DatasetFieldConstant.kindOfData)) { + JsonArray values = field.getJsonArray("value"); + for (JsonString value : values.getValuesAs(JsonString.class)) { + try { + // return the first DatasetType you find + DatasetType.Type type = DatasetType.Type.fromString(value.getString()); + return new DatasetType(type); + } catch (IllegalArgumentException ex) { + // No worries, it's just some other kind of data. + } + } + } + } + return null; + } + @POST @AuthRequired @Path("{identifier}/datasets/:startmigration") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index c19aec33383..666a65fbd9e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -111,4 +111,39 @@ public void testImportJson() { assertEquals("software", datasetType); } + + @Test + public void testImportDDI() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + String jsonIn = UtilIT.getDatasetJson("doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml"); + + String randomString = UtilIT.getRandomString(6); + + Response importJson = UtilIT.importDatasetDDIViaNativeApi(apiToken, dataverseAlias, jsonIn, "doi:10.5072/FK2/" + randomString, "no"); + importJson.prettyPrint(); + importJson.then().assertThat().statusCode(CREATED.getStatusCode()); + + Integer datasetId = JsonPath.from(importJson.getBody().asString()).getInt("data.id"); + String datasetPid = JsonPath.from(importJson.getBody().asString()).getString("data.persistentId"); + + Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJson.prettyPrint(); + getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); + String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); + System.out.println("datasetType: " + datasetType); + assertEquals("software", datasetType); + + } + } From 25b2ea5aa78c377c29edb5d9fc155f02859a193a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 22 Jul 2024 17:02:12 -0400 Subject: [PATCH 07/45] list import with native json #10517 --- doc/sphinx-guides/source/user/dataset-types.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst index ffc59e212d9..7a014774824 100755 --- a/doc/sphinx-guides/source/user/dataset-types.rst +++ b/doc/sphinx-guides/source/user/dataset-types.rst @@ -29,8 +29,13 @@ Semantic API An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>` -DDI Import ----------- +Import with Native JSON +----------------------- + +The same native JSON file as above can be used when importing a dataset: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>` + +Import with DDI +--------------- An example DDI file is available at :download:`dataset-create-software-ddi.xml <../_static/api/dataset-create-software-ddi.xml>` From 2b83f22c2f0b992807aa950d34e2cf30462d9886 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Jul 2024 14:22:50 -0400 Subject: [PATCH 08/45] make dataset type searchable and facetable #10517 --- .../api/dataset-create-software-ddi.xml | 2 +- .../source/user/dataset-types.rst | 2 + .../iq/dataverse/search/IndexServiceBean.java | 8 ++++ .../iq/dataverse/search/SearchFields.java | 4 ++ .../dataverse/search/SearchServiceBean.java | 7 +++ .../iq/dataverse/search/SolrSearchResult.java | 10 +++++ .../staticSearchFields.properties | 3 +- .../iq/dataverse/api/DatasetTypesIT.java | 44 +++++++++++++++++-- 8 files changed, 74 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml index c9e94b32eb2..efa52fae4e2 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml @@ -80,7 +80,7 @@ 1006-01-01 1006-02-01 1006-02-02 - software + workflow Afghanistan GeographicCoverageCity1 GeographicCoverageStateProvince1 diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst index 7a014774824..45bbb558508 100755 --- a/doc/sphinx-guides/source/user/dataset-types.rst +++ b/doc/sphinx-guides/source/user/dataset-types.rst @@ -11,6 +11,8 @@ Intro Datasets can have a dataset type such as "dataset", "software", or "workflow". +When browsing or searching, these types appear under a facet called "Dataset Type". + Enabling Dataset Types ====================== diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 26b42734d19..3a60d1ba681 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.DataAccessRequest; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VariableMetadata; import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; @@ -1000,6 +1001,13 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Tue, 23 Jul 2024 14:40:39 -0400 Subject: [PATCH 09/45] improve sample data to look more like software or a workflow #10517 --- .../_static/api/dataset-create-software-ddi.xml | 4 ++-- .../_static/api/dataset-create-software.json | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml index efa52fae4e2..bbe14a265d8 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml @@ -3,7 +3,7 @@ - Replication Data for: Title + A Workflow for Whale Identification Root @@ -18,7 +18,7 @@ - Replication Data for: Title + A Workflow for Whale Identification Subtitle Alternative Title OtherIDIdentifier1 diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.json b/doc/sphinx-guides/source/_static/api/dataset-create-software.json index 2556d52f1fc..4c649bff0aa 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-create-software.json +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.json @@ -9,7 +9,7 @@ "citation": { "fields": [ { - "value": "Darwin's Finches", + "value": "pyDataverse", "typeClass": "primitive", "multiple": false, "typeName": "title" @@ -18,13 +18,13 @@ "value": [ { "authorName": { - "value": "Finch, Fiona", + "value": "Range, Jan", "typeClass": "primitive", "multiple": false, "typeName": "authorName" }, "authorAffiliation": { - "value": "Birds Inc.", + "value": "University of Stuttgart", "typeClass": "primitive", "multiple": false, "typeName": "authorAffiliation" @@ -41,13 +41,13 @@ "typeClass": "primitive", "multiple": false, "typeName": "datasetContactEmail", - "value" : "finch@mailinator.com" + "value" : "jan@mailinator.com" }, "datasetContactName" : { "typeClass": "primitive", "multiple": false, "typeName": "datasetContactName", - "value": "Finch, Fiona" + "value": "Range, Jan" } }], "typeClass": "compound", @@ -57,7 +57,7 @@ { "value": [ { "dsDescriptionValue":{ - "value": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "value": "A Python module for Dataverse.", "multiple":false, "typeClass": "primitive", "typeName": "dsDescriptionValue" @@ -68,7 +68,7 @@ }, { "value": [ - "Medicine, Health and Life Sciences" + "Computer and Information Science" ], "typeClass": "controlledVocabulary", "multiple": true, From 3aab5c06bba2a4c65ce6c661226c74f1b4debec7 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jul 2024 10:45:41 -0400 Subject: [PATCH 10/45] stop supporting setting of dataset type via DDI #10517 Maybe someday but we're not confident about which field to use and we're not even sure if there is any interest in this because DDI usually represents data, not software or workflows. --- doc/release-notes/10517-datasetType.md | 4 - .../api/dataset-create-software-ddi.xml | 196 ------------------ .../source/user/dataset-types.rst | 7 - .../harvard/iq/dataverse/api/Dataverses.java | 31 +-- .../iq/dataverse/api/DatasetTypesIT.java | 47 ----- 5 files changed, 1 insertion(+), 284 deletions(-) delete mode 100644 doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 9e72498cf70..3f17035385a 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,7 +1,3 @@ ### Initial Support for Dataset Types (Dataset, Software, Workflow) Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. - -next: - -- create with DDI diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml deleted file mode 100644 index bbe14a265d8..00000000000 --- a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml +++ /dev/null @@ -1,196 +0,0 @@ - - - - - - A Workflow for Whale Identification - - - Root - 2020-02-19 - - - 1 - - LastAuthor1, FirstAuthor1; LastAuthor2, FirstAuthor2, 2020, "Replication Data for: Title", Root, V1 - - - - - - A Workflow for Whale Identification - Subtitle - Alternative Title - OtherIDIdentifier1 - OtherIDIdentifier2 - - - LastAuthor1, FirstAuthor1 - LastAuthor2, FirstAuthor2 - LastContributor1, FirstContributor1 - LastContributor2, FirstContributor2 - - - LastProducer1, FirstProducer1 - LastProducer2, FirstProducer2 - 1003-01-01 - ProductionPlace One - ProductionPlace Two - SoftwareName1 - SoftwareName2 - GrantInformationGrantNumber1 - GrantInformationGrantNumber2 - - - Root - LastDistributor1, FirstDistributor1 - LastDistributor2, FirstDistributor2 - LastContact1, FirstContact1 - LastContact2, FirstContact2 - 1004-01-01 - LastDepositor, FirstDepositor - 1002-01-01 - - - SeriesName One - SeriesInformation One - - - SeriesName Two - SeriesInformation Two - - - - - Agricultural Sciences - Business and Management - Engineering - Law - KeywordTerm1 - KeywordTerm2 - - DescriptionText 1 - DescriptionText2 - - 1005-01-01 - 1005-01-02 - 1005-02-01 - 1005-02-02 - 1006-01-01 - 1006-01-01 - 1006-02-01 - 1006-02-02 - workflow - Afghanistan - GeographicCoverageCity1 - GeographicCoverageStateProvince1 - GeographicCoverageOther1 - Albania - GeographicCoverageCity2 - GeographicCoverageStateProvince2 - GeographicCoverageOther2 - - 10 - 20 - 40 - 30 - - - 70 - 80 - 60 - 50 - - GeographicUnit1 - GeographicUnit2 - UnitOfAnalysis1 - UnitOfAnalysis2 - Universe1 - Universe2 - - Notes1 - - - - TimeMethod - LastDataCollector1, FirstDataCollector1 - CollectorTraining - Frequency - SamplingProcedure - - TargetSampleSizeFormula - 100 - - MajorDeviationsForSampleDesign - - DataSources1 - DataSources2 - OriginOfSources - CharacteristicOfSourcesNoted - DocumentationAndAccessToSources - - CollectionMode - TypeOfResearchInstrument - CharacteristicsOfDataCollectionSituation - ActionsToMinimizeLosses - ControlOperations - Weighting - CleaningOperations - - - ResponseRate - EstimatesOfSamplingError - OtherFormsOfDataAppraisal - - NotesText - - - Terms of Access - Terms of Use - - Data Access Place - Original Archive - Availability Status - Size of Collection - Study Completion - - - Confidentiality Declaration - Special Permissions - Restrictions - Contact for Access - Citation Requirements - Depositor Requirements - Conditions - Disclaimer - - - - RelatedMaterial1 - RelatedMaterial2 - RelatedDatasets1 - RelatedDatasets2 - - - - RelatedPublicationIDNumber1 - - RelatedPublicationCitation1 - - - - - - - RelatedPublicationIDNumber2 - - RelatedPublicationCitation2 - - - - OtherReferences1 - OtherReferences2 - - StudyLevelErrorNotes - - diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst index 45bbb558508..4c96745f434 100755 --- a/doc/sphinx-guides/source/user/dataset-types.rst +++ b/doc/sphinx-guides/source/user/dataset-types.rst @@ -35,10 +35,3 @@ Import with Native JSON ----------------------- The same native JSON file as above can be used when importing a dataset: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>` - -Import with DDI ---------------- - -An example DDI file is available at :download:`dataset-create-software-ddi.xml <../_static/api/dataset-create-software-ddi.xml>` - -Note that for DDI import to work ``dataKind`` must be set to one of the valid types. The first valid type wins. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 670c221b36e..47f05a75e93 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -430,13 +430,8 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm Dataverse owner = findDataverseOrDie(parentIdtf); Dataset ds = null; try { - JsonObject jsonObject = importService.ddiToJson(xml); - ds = jsonParser().parseDataset(jsonObject); + ds = jsonParser().parseDataset(importService.ddiToJson(xml)); DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); - DatasetType datasetType = getDatasetTypeFromJson(jsonObject); - if (datasetType != null) { - ds.setDatasetType(datasetType); - } } catch (JsonParseException jpe) { return badRequest("Error parsing data as Json: "+jpe.getMessage()); } catch (ImportException e) { @@ -498,30 +493,6 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm } } - public DatasetType getDatasetTypeFromJson(JsonObject jsonObject) { - JsonArray citationFields = jsonObject.getJsonObject("datasetVersion") - .getJsonObject("metadataBlocks") - .getJsonObject("citation") - .getJsonArray("fields"); - for (JsonValue citationField : citationFields) { - JsonObject field = (JsonObject) citationField; - String name = field.getString("typeName"); - if (name.equals(DatasetFieldConstant.kindOfData)) { - JsonArray values = field.getJsonArray("value"); - for (JsonString value : values.getValuesAs(JsonString.class)) { - try { - // return the first DatasetType you find - DatasetType.Type type = DatasetType.Type.fromString(value.getString()); - return new DatasetType(type); - } catch (IllegalArgumentException ex) { - // No worries, it's just some other kind of data. - } - } - } - } - return null; - } - @POST @AuthRequired @Path("{identifier}/datasets/:startmigration") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 46eb04b8d25..22b106c2906 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -1,13 +1,11 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.search.SearchFields; import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -137,49 +135,4 @@ public void testImportJson() { } - @Test - public void testImportDdiWorkflow() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String username = UtilIT.getUsernameFromResponse(createUser); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); - - Response createDataverse = UtilIT.createRandomDataverse(apiToken); - createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); - Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); - - UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); - - String jsonIn = UtilIT.getDatasetJson("doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml"); - - String randomString = UtilIT.getRandomString(6); - - Response importJson = UtilIT.importDatasetDDIViaNativeApi(apiToken, dataverseAlias, jsonIn, "doi:10.5072/FK2/" + randomString, "yes"); - importJson.prettyPrint(); - importJson.then().assertThat().statusCode(CREATED.getStatusCode()); - - Integer datasetId = JsonPath.from(importJson.getBody().asString()).getInt("data.id"); - String datasetPid = JsonPath.from(importJson.getBody().asString()).getString("data.persistentId"); - - Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); - getDatasetJson.prettyPrint(); - getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); - String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); - System.out.println("datasetType: " + datasetType); - assertEquals("workflow", datasetType); - - Response search = UtilIT.searchAndShowFacets("id:dataset_" + datasetId, apiToken); - search.prettyPrint(); - search.then().assertThat() - .body("data.total_count", CoreMatchers.is(1)) - .body("data.count_in_response", CoreMatchers.is(1)) - .body("data.facets[0].datasetType_s.friendly", CoreMatchers.is("Dataset Type")) - .body("data.facets[0].datasetType_s.labels[0].workflow", CoreMatchers.is(1)) - .statusCode(OK.getStatusCode()); - - } - } From c8adf259ec9684c7db3d6a2a324973e0407cdfd5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jul 2024 16:22:07 -0400 Subject: [PATCH 11/45] remove enum and put dataset types in database instead #10517 Also populate a few dataset types in database, with "dataset" being the default. Add default type to existing datasets. Also APIs for managing dataset types. --- doc/release-notes/10517-datasetType.md | 23 ++++ .../edu/harvard/iq/dataverse/Dataset.java | 4 +- .../iq/dataverse/EjbDataverseEngine.java | 11 +- .../iq/dataverse/api/AbstractApiBean.java | 6 +- .../harvard/iq/dataverse/api/Datasets.java | 105 ++++++++++++++++++ .../harvard/iq/dataverse/api/Dataverses.java | 13 +-- .../api/imports/ImportGenericServiceBean.java | 7 +- .../api/imports/ImportServiceBean.java | 10 +- .../iq/dataverse/dataset/DatasetType.java | 71 ++++++------ .../dataset/DatasetTypeServiceBean.java | 75 +++++++++++++ .../iq/dataverse/dataverse/DataverseUtil.java | 13 --- .../engine/command/CommandContext.java | 3 + .../impl/AbstractCreateDatasetCommand.java | 16 ++- .../iq/dataverse/search/IndexServiceBean.java | 2 +- .../iq/dataverse/util/json/JSONLDUtil.java | 22 +++- .../iq/dataverse/util/json/JsonParser.java | 28 +++-- .../iq/dataverse/util/json/JsonPrinter.java | 3 +- src/main/resources/db/migration/V6.3.0.1.sql | 9 ++ .../iq/dataverse/api/DatasetTypesIT.java | 49 +++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 27 +++++ .../dataverse/engine/TestCommandContext.java | 8 +- .../export/SchemaDotOrgExporterTest.java | 4 +- .../dataverse/feedback/FeedbackUtilTest.java | 8 +- .../dataverse/util/json/JsonParserTest.java | 9 +- 24 files changed, 436 insertions(+), 90 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java create mode 100644 src/main/resources/db/migration/V6.3.0.1.sql diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 3f17035385a..a8f86432a9f 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,3 +1,26 @@ ### Initial Support for Dataset Types (Dataset, Software, Workflow) Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. + +A handy query: + +``` +% DOCKER_CLI_HINTS=false docker exec -it postgres-1 bash -c "PGPASSWORD=secret psql -h localhost -U dataverse dataverse -c 'select dst.name, count(*) from dataset ds, datasettype dst where ds.datasettype_id = dst.id group by dst.name;'" + name | count +----------+------- + dataset | 136 + software | 14 +(2 rows) +``` + +Most API tests are passing but we do see a few failures: + +``` +[ERROR] Failures: +[ERROR] HarvestingClientsIT.testHarvestingClientRun_AllowHarvestingMissingCVV_False:187->harvestingClientRun:301 expected: <7> but was: <0> +[ERROR] HarvestingClientsIT.testHarvestingClientRun_AllowHarvestingMissingCVV_True:191->harvestingClientRun:301 expected: <8> but was: <0> +[ERROR] MakeDataCountApiIT.testMakeDataCountGetMetric:68 1 expectation failed. +Expected status code <200> but was <400>. +``` + +select dst.name, count(*) from dataset ds, datasettype dst where ds.datasettype_id = dst.id group by dst.name; diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index ef2f39620e9..e56ad723c46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -129,8 +129,8 @@ public class Dataset extends DvObjectContainer { */ private boolean useGenericThumbnail; - @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) - @JoinColumn(name = "datasettype_id", nullable = true) + @ManyToOne + @JoinColumn(name="datasettype_id", nullable = false) private DatasetType datasetType; @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index c8537f2a424..0561fed8a97 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -127,7 +128,10 @@ public class EjbDataverseEngine { @EJB MetadataBlockServiceBean metadataBlockService; - + + @EJB + DatasetTypeServiceBean datasetTypeService; + @EJB DataverseLinkingServiceBean dvLinking; @@ -603,6 +607,11 @@ public MetadataBlockServiceBean metadataBlocks() { return metadataBlockService; } + @Override + public DatasetTypeServiceBean datasetTypes() { + return datasetTypeService; + } + @Override public void beginCommandSequence() { this.commandsCalled = new Stack(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 19df6d8c1c7..3257a3cc7ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -164,6 +165,9 @@ String getWrappedMessageWhenJson() { @EJB protected LicenseServiceBean licenseSvc; + @EJB + protected DatasetTypeServiceBean datasetTypeSvc; + @EJB protected UserServiceBean userSvc; @@ -247,7 +251,7 @@ public enum Format { private final LazyRef jsonParserRef = new LazyRef<>(new Callable() { @Override public JsonParser call() throws Exception { - return new JsonParser(datasetFieldSvc, metadataBlockSvc,settingsSvc, licenseSvc); + return new JsonParser(datasetFieldSvc, metadataBlockSvc,settingsSvc, licenseSvc, datasetTypeSvc); } }); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 4b919c5ed82..4326cd17737 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -99,9 +99,12 @@ import java.util.stream.Collectors; import static edu.harvard.iq.dataverse.api.ApiConstants.*; +import edu.harvard.iq.dataverse.dataset.DatasetType; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -187,6 +190,9 @@ public class Datasets extends AbstractApiBean { @Inject DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + @Inject + DatasetTypeServiceBean datasetTypeSvc; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -5071,4 +5077,103 @@ public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathPar return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId()); } + @GET + @Path("datasetTypes") + public Response getDatasetTypes() { + JsonArrayBuilder jab = Json.createArrayBuilder(); + List datasetTypes = datasetTypeSvc.listAll(); + for (DatasetType datasetType : datasetTypes) { + JsonObjectBuilder job = Json.createObjectBuilder(); + job.add("id", datasetType.getId()); + job.add("name", datasetType.getName()); + jab.add(job); + } + return ok(jab.build()); + } + + @GET + @Path("datasetTypes/byName/{name}") + public Response getDatasetTypes(@PathParam("name") String name) { + DatasetType datasetType = datasetTypeSvc.getByName(name); + if (datasetType != null) { + return ok(datasetType.toJson()); + } else { + return error(NOT_FOUND, "Could not find a dataset type with name " + name); + } + } + + @POST + @AuthRequired + @Path("datasetTypes") + public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) { + System.out.println("json in: " + jsonIn); + AuthenticatedUser user; + try { + user = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Response.Status.BAD_REQUEST, "Authentication is required."); + } + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + + if (jsonIn == null || jsonIn.isEmpty()) { + throw new IllegalArgumentException("JSON input was null or empty!"); + } + JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn); + String nameIn = jsonObject.getString("name", null); + if (nameIn == null) { + throw new IllegalArgumentException("A name for the dataset type is required"); + } + + try { + DatasetType datasetType = new DatasetType(); + datasetType.setName(nameIn); + DatasetType saved = datasetTypeSvc.save(datasetType); + Long typeId = saved.getId(); + String name = saved.getName(); + actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "addDatasetType").setInfo("Dataset type added with id " + typeId + " and name " + name + ".")); + return ok(saved.toJson()); + } catch (WrappedResponse ex) { + return error(BAD_REQUEST, ex.getMessage()); + } + } + + @DELETE + @AuthRequired + @Path("datasetTypes/{id}") + public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) { + AuthenticatedUser user; + try { + user = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Response.Status.BAD_REQUEST, "Authentication is required."); + } + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + + if (doomed == null || doomed.isEmpty()) { + throw new IllegalArgumentException("ID is required!"); + } + + long idToDelete; + try { + idToDelete = Long.parseLong(doomed); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("ID must be a number"); + } + + try { + int numDeleted = datasetTypeSvc.deleteById(idToDelete); + if (numDeleted == 1) { + return ok("deleted"); + } else { + return error(BAD_REQUEST, "Something went wrong. Number of dataset types deleted: " + numDeleted); + } + } catch (WrappedResponse ex) { + return error(BAD_REQUEST, ex.getMessage()); + } + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 47f05a75e93..93c4ac059b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -242,13 +242,6 @@ public Response createDataset(@Context ContainerRequestContext crc, String jsonB //Throw BadRequestException if metadataLanguage isn't compatible with setting DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); - try { - logger.info("about to call checkDatasetType..."); - DataverseUtil.checkDatasetType(ds, FeatureFlags.DATASET_TYPES.enabled()); - } catch (BadRequestException ex) { - return badRequest(ex.getLocalizedMessage()); - } - // clean possible version metadata DatasetVersion version = ds.getVersions().get(0); @@ -311,7 +304,7 @@ public Response createDatasetFromJsonLd(@Context ContainerRequestContext crc, St Dataset ds = new Dataset(); ds.setOwner(owner); - ds = JSONLDUtil.updateDatasetMDFromJsonLD(ds, jsonLDBody, metadataBlockSvc, datasetFieldSvc, false, false, licenseSvc); + ds = JSONLDUtil.updateDatasetMDFromJsonLD(ds, jsonLDBody, metadataBlockSvc, datasetFieldSvc, false, false, licenseSvc, datasetTypeSvc); ds.setOwner(owner); @@ -508,7 +501,7 @@ public Response recreateDataset(@Context ContainerRequestContext crc, String jso Dataset ds = new Dataset(); ds.setOwner(owner); - ds = JSONLDUtil.updateDatasetMDFromJsonLD(ds, jsonLDBody, metadataBlockSvc, datasetFieldSvc, false, true, licenseSvc); + ds = JSONLDUtil.updateDatasetMDFromJsonLD(ds, jsonLDBody, metadataBlockSvc, datasetFieldSvc, false, true, licenseSvc, datasetTypeSvc); //ToDo - verify PID is one Dataverse can manage (protocol/authority/shoulder match) if (!PidUtil.getPidProvider(ds.getGlobalId().getProviderId()).canManagePID()) { throw new BadRequestException( @@ -551,6 +544,8 @@ private Dataset parseDataset(String datasetJson) throws WrappedResponse { try { return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson)); } catch (JsonParsingException | JsonParseException jpe) { + String message = jpe.getLocalizedMessage(); + logger.log(Level.SEVERE, "Error parsing dataset JSON. message: {0}", message); logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index 6068ec45e4f..d32a548c8bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -14,6 +14,7 @@ import edu.harvard.iq.dataverse.api.dto.*; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; @@ -71,9 +72,13 @@ public class ImportGenericServiceBean { @EJB SettingsServiceBean settingsService; + @EJB LicenseServiceBean licenseService; + @EJB + DatasetTypeServiceBean datasetTypeService; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -110,7 +115,7 @@ public void importXML(String xmlToParse, String foreignFormat, DatasetVersion da logger.fine(json); JsonReader jsonReader = Json.createReader(new StringReader(json)); JsonObject obj = jsonReader.readObject(); - DatasetVersion dv = new JsonParser(datasetFieldSvc, blockService, settingsService, licenseService).parseDatasetVersion(obj, datasetVersion); + DatasetVersion dv = new JsonParser(datasetFieldSvc, blockService, settingsService, licenseService, datasetTypeService).parseDatasetVersion(obj, datasetVersion); } catch (XMLStreamException ex) { //Logger.getLogger("global").log(Level.SEVERE, null, ex); throw new EJBException("ERROR occurred while parsing XML fragment ("+xmlToParse.substring(0, 64)+"...); ", ex); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 39977190691..d2bba56f884 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; @@ -104,8 +105,13 @@ public class ImportServiceBean { @EJB IndexServiceBean indexService; + @EJB LicenseServiceBean licenseService; + + @EJB + DatasetTypeServiceBean datasetTypeService; + /** * This is just a convenience method, for testing migration. It creates * a dummy dataverse with the directory name as dataverse name & alias. @@ -264,7 +270,7 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { - JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient); + JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, datasetTypeService, harvestingClient); parser.setLenient(true); Dataset ds = parser.parseDataset(obj); @@ -417,7 +423,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { - JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService); + JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, datasetTypeService); parser.setLenient(!importType.equals(ImportType.NEW)); Dataset ds = parser.parseDataset(obj); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java index 0d47d6a5885..3333819372d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java @@ -1,25 +1,41 @@ package edu.harvard.iq.dataverse.dataset; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import jakarta.persistence.Column; import jakarta.persistence.Entity; -import jakarta.persistence.EnumType; -import jakarta.persistence.Enumerated; import jakarta.persistence.GeneratedValue; import jakarta.persistence.GenerationType; import jakarta.persistence.Id; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; +import jakarta.persistence.UniqueConstraint; import java.io.Serializable; -import java.util.Arrays; +@NamedQueries({ + @NamedQuery(name = "DatasetType.findAll", + query = "SELECT d FROM DatasetType d"), + @NamedQuery(name = "DatasetType.findByName", + query = "SELECT d FROM DatasetType d WHERE d.name=:name"), + @NamedQuery(name = "DatasetType.deleteById", + query = "DELETE FROM DatasetType d WHERE d.id=:id"),}) @Entity +@Table(uniqueConstraints = { + @UniqueConstraint(columnNames = "name"),} +) + public class DatasetType implements Serializable { + public static final String DEFAULT_DATASET_TYPE = "dataset"; + @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; + // Any constraints? @Pattern regexp? @Column(nullable = false) - @Enumerated(EnumType.STRING) - private Type baseType; + private String name; /** * This default constructor is only here to prevent this error at @@ -35,45 +51,26 @@ public class DatasetType implements Serializable { public DatasetType() { } - public DatasetType(Type baseType) { - this.baseType = baseType; + public Long getId() { + return id; } - public enum Type { - - DATASET("dataset"), - SOFTWARE("software"), - WORKFLOW("workflow"); - - private final String text; - - private Type(final String text) { - this.text = text; - } - - public static Type fromString(String text) { - if (text != null) { - for (Type type : Type.values()) { - if (text.equals(type.text)) { - return type; - } - } - } - throw new IllegalArgumentException("Type must be one of these values: " + Arrays.asList(Type.values()) + "."); - } + public void setId(Long id) { + this.id = id; + } - @Override - public String toString() { - return text; - } + public String getName() { + return name; } - public Type getBaseType() { - return baseType; + public void setName(String name) { + this.name = name; } - public void setBaseType(Type baseType) { - this.baseType = baseType; + public JsonObjectBuilder toJson() { + return Json.createObjectBuilder() + .add("id", getId()) + .add("name", getName()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java new file mode 100644 index 00000000000..beaaa3e2578 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java @@ -0,0 +1,75 @@ +package edu.harvard.iq.dataverse.dataset; + +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.api.AbstractApiBean; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.PersistenceException; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +@Stateless +@Named +public class DatasetTypeServiceBean { + + private static final Logger logger = Logger.getLogger(DatasetTypeServiceBean.class.getName()); + + @PersistenceContext + EntityManager em; + + @EJB + ActionLogServiceBean actionLogSvc; + + public List listAll() { + return em.createNamedQuery("DatasetType.findAll", DatasetType.class).getResultList(); + } + + public DatasetType getByName(String name) { + try { + return em.createNamedQuery("DatasetType.findByName", DatasetType.class) + .setParameter("name", name) + .getSingleResult(); + } catch (NoResultException noResultException) { + logger.log(Level.WARNING, "Couldn't find a dataset type named " + name); + return null; + } + } + + public DatasetType save(DatasetType datasetType) throws AbstractApiBean.WrappedResponse { + if (datasetType.getId() != null) { + throw new AbstractApiBean.WrappedResponse(new IllegalArgumentException("There shouldn't be an ID in the request body"), null); + } + try { + em.persist(datasetType); + em.flush(); + } catch (PersistenceException p) { + if (p.getMessage().contains("duplicate key")) { + throw new AbstractApiBean.WrappedResponse(new IllegalStateException("A dataset type with the same name is already present.", p), null); + } else { + throw p; + } + } + return datasetType; + } + + public int deleteById(long id) throws AbstractApiBean.WrappedResponse { + actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete dataset type") + .setInfo(Long.toString(id))); + try { + return em.createNamedQuery("DatasetType.deleteById").setParameter("id", id).executeUpdate(); + } catch (PersistenceException p) { + if (p.getMessage().contains("violates foreign key constraint")) { + throw new AbstractApiBean.WrappedResponse(new IllegalStateException("Dataset type with id " + id + " is referenced and cannot be deleted.", p), null); + } else { + throw p; + } + } + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index f13f73f2dc8..f45a9058e7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -7,7 +7,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; @@ -123,16 +122,4 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map contextMap) { public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, MetadataBlockServiceBean metadataBlockSvc, DatasetFieldServiceBean datasetFieldSvc, boolean append, - boolean migrating, LicenseServiceBean licenseSvc) { + boolean migrating, LicenseServiceBean licenseSvc, DatasetTypeServiceBean datasetTypeSvc) { DatasetVersion dsv = new DatasetVersion(); @@ -100,10 +101,21 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, try (StringReader rdr = new StringReader(jsonLDBody)) { try (JsonReader jsonReader = Json.createReader(rdr)) { JsonObject jsonObject = jsonReader.readObject(); - String datasetType = jsonObject.getString("datasetType", null); - logger.info("datasetType: " + datasetType); - if (datasetType != null) { - ds.setDatasetType(new DatasetType(DatasetType.Type.fromString(datasetType))); + String datasetTypeIn = jsonObject.getString("datasetType", null); + logger.fine("datasetTypeIn: " + datasetTypeIn); + DatasetType defaultDatasetType = datasetTypeSvc.getByName(DatasetType.DEFAULT_DATASET_TYPE); + if (defaultDatasetType == null) { + throw new RuntimeException("Couldn't find default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE); + } + if (datasetTypeIn == null) { + ds.setDatasetType(defaultDatasetType); + } else { + DatasetType datasetType = datasetTypeSvc.getByName(datasetTypeIn); + if (datasetType != null) { + ds.setDatasetType(datasetType); + } else { + throw new RuntimeException("Invalid dataset type: " + datasetTypeIn); + } } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index f534317ae61..139065faef1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -25,6 +25,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddressRange; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.dataset.DatasetType; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.license.License; @@ -69,6 +70,7 @@ public class JsonParser { MetadataBlockServiceBean blockService; SettingsServiceBean settingsService; LicenseServiceBean licenseService; + DatasetTypeServiceBean datasetTypeService; HarvestingClient harvestingClient = null; boolean allowHarvestingMissingCVV = false; @@ -84,15 +86,16 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB this.settingsService = settingsService; } - public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService) { - this(datasetFieldSvc, blockService, settingsService, licenseService, null); + public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, DatasetTypeServiceBean datasetTypeService) { + this(datasetFieldSvc, blockService, settingsService, licenseService, datasetTypeService, null); } - public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, HarvestingClient harvestingClient) { + public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, DatasetTypeServiceBean datasetTypeService, HarvestingClient harvestingClient) { this.datasetFieldSvc = datasetFieldSvc; this.blockService = blockService; this.settingsService = settingsService; this.licenseService = licenseService; + this.datasetTypeService = datasetTypeService; this.harvestingClient = harvestingClient; this.allowHarvestingMissingCVV = harvestingClient != null && harvestingClient.getAllowHarvestingMissingCVV(); } @@ -329,10 +332,21 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { }else { throw new JsonParseException("Specified metadatalanguage not allowed."); } - String datasetType = obj.getString("datasetType",null); - logger.info("datasetType: " + datasetType); - if (datasetType != null) { - dataset.setDatasetType(new DatasetType(DatasetType.Type.fromString(datasetType))); + DatasetType defaultDatasetType = datasetTypeService.getByName(DatasetType.DEFAULT_DATASET_TYPE); + if (defaultDatasetType == null) { + throw new JsonParseException("Couldn't find default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE); + } + String datasetTypeIn = obj.getString("datasetType", null); + logger.fine("datasetTypeIn: " + datasetTypeIn); + if (datasetTypeIn == null) { + dataset.setDatasetType(defaultDatasetType); + } else { + DatasetType datasetType = datasetTypeService.getByName(datasetTypeIn); + if (datasetType != null) { + dataset.setDatasetType(datasetType); + } else { + throw new JsonParseException("Invalid dataset type: " + datasetTypeIn); + } } DatasetVersion dsv = new DatasetVersion(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 3d27224692d..dac7a9089ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -56,7 +56,6 @@ import jakarta.ejb.Singleton; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import java.math.BigDecimal; /** * Convert objects to Json. @@ -409,7 +408,7 @@ public static JsonObjectBuilder json(Dataset ds, Boolean returnOwners) { } DatasetType datasetType = ds.getDatasetType(); if (datasetType != null) { - bld.add("datasetType", datasetType.getBaseType().toString()); + bld.add("datasetType", datasetType.getName()); } return bld; } diff --git a/src/main/resources/db/migration/V6.3.0.1.sql b/src/main/resources/db/migration/V6.3.0.1.sql new file mode 100644 index 00000000000..d4a2731bc70 --- /dev/null +++ b/src/main/resources/db/migration/V6.3.0.1.sql @@ -0,0 +1,9 @@ +-- Dataset types have been added. See #10517 and #10694 +-- +-- First, insert some types (dataset is the default). +INSERT INTO datasettype (name) VALUES ('dataset'); +INSERT INTO datasettype (name) VALUES ('software'); +INSERT INTO datasettype (name) VALUES ('workflow'); +-- +-- Then, give existing datasets a type of "dataset". +UPDATE dataset SET datasettype_id = (SELECT id FROM datasettype WHERE name = 'dataset'); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 22b106c2906..2ae76731aaf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -1,11 +1,15 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.dataset.DatasetType; import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; +import jakarta.json.Json; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; +import java.util.UUID; import org.hamcrest.CoreMatchers; +import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -36,7 +40,6 @@ public void testCreateSoftwareDatasetNative() { createSoftware.then().assertThat() .statusCode(CREATED.getStatusCode()); - //TODO: try sending "junk" instead of "software". Integer datasetId = UtilIT.getDatasetIdFromResponse(createSoftware); String datasetPid = JsonPath.from(createSoftware.getBody().asString()).getString("data.persistentId"); @@ -88,7 +91,6 @@ public void testCreateWorkflowDatasetSemantic() { createSoftware.then().assertThat() .statusCode(CREATED.getStatusCode()); - //TODO: try sending "junk" instead of "software". Integer datasetId = UtilIT.getDatasetIdFromResponse(createSoftware); String datasetPid = JsonPath.from(createSoftware.getBody().asString()).getString("data.persistentId"); @@ -135,4 +137,47 @@ public void testImportJson() { } + @Test + public void testGetDatasetTypes() { + Response getTypes = UtilIT.getDatasetTypes(); + getTypes.prettyPrint(); + getTypes.then().assertThat() + .statusCode(OK.getStatusCode()) + // non-null because types were added by a Flyway script + .body("data", CoreMatchers.not(equalTo(null))); + } + + @Test + public void testGetDefaultDatasetType() { + Response getType = UtilIT.getDatasetTypeByName(DatasetType.DEFAULT_DATASET_TYPE); + getType.prettyPrint(); + getType.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.name", equalTo(DatasetType.DEFAULT_DATASET_TYPE)); + } + + @Test + public void testAddAndDeleteDatasetType() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + String randomName = UUID.randomUUID().toString().substring(0, 8); + String jsonIn = Json.createObjectBuilder().add("name", randomName).build().toString(); + + System.out.println("adding type with name " + randomName); + Response typeAdded = UtilIT.addDatasetType(jsonIn, apiToken); + typeAdded.prettyPrint(); + typeAdded.then().assertThat().statusCode(OK.getStatusCode()); + + long doomed = JsonPath.from(typeAdded.getBody().asString()).getLong("data.id"); + System.out.println("deleting type with id " + doomed); + Response typeDeleted = UtilIT.deleteDatasetTypes(doomed, apiToken); + typeDeleted.prettyPrint(); + typeDeleted.then().assertThat().statusCode(OK.getStatusCode()); + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 3d5b97d00e8..0bb863494bc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4020,4 +4020,31 @@ public static Response getOpenAPI(String accept, String format) { .get("/openapi"); return response; } + + public static Response getDatasetTypes() { + Response response = given() + .get("/api/datasets/datasetTypes"); + return response; + } + + static Response getDatasetTypeByName(String name) { + return given() + .get("/api/datasets/datasetTypes/byName/" + name); + } + + static Response addDatasetType(String jsonIn, String apiToken) { + System.out.println("called addDatasetType..."); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonIn) + .contentType(ContentType.JSON) + .post("/api/datasets/datasetTypes"); + } + + static Response deleteDatasetTypes(long doomed, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/datasets/datasetTypes/" + doomed); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java index f2c03adea20..b4b9c0d33f2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; @@ -234,7 +235,12 @@ public ActionLogServiceBean actionLog() { public MetadataBlockServiceBean metadataBlocks() { return null; } - + + @Override + public DatasetTypeServiceBean datasetTypes() { + return null; + } + @Override public StorageUseServiceBean storageUse() { return null; diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index 2139589b4c3..9850e9d80e9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.branding.BrandingUtilTest; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.XMLExporter; import edu.harvard.iq.dataverse.license.License; @@ -53,6 +54,7 @@ public class SchemaDotOrgExporterTest { private static final MockDatasetFieldSvc datasetFieldTypeSvc = new MockDatasetFieldSvc(); private static final SettingsServiceBean settingsService = Mockito.mock(SettingsServiceBean.class); private static final LicenseServiceBean licenseService = Mockito.mock(LicenseServiceBean.class); + private static final DatasetTypeServiceBean datasetTypeService = Mockito.mock(DatasetTypeServiceBean.class); private static final SchemaDotOrgExporter schemaDotOrgExporter = new SchemaDotOrgExporter(); @BeforeAll @@ -173,7 +175,7 @@ public void testExportDescriptionTruncation() throws JsonParseException, ParseEx private JsonObject createExportFromJson(ExportDataProvider provider) throws JsonParseException, ParseException { License license = new License("CC0 1.0", "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", URI.create("http://creativecommons.org/publicdomain/zero/1.0/"), URI.create("/resources/images/cc0.png"), true, 1l); license.setDefault(true); - JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService); + JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService, datasetTypeService); DatasetVersion version = jsonParser.parseDatasetVersion(provider.getDatasetJson().getJsonObject("datasetVersion")); version.setVersionState(DatasetVersion.VersionState.RELEASED); SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd"); diff --git a/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java index 7c31db5bee2..072be13dcec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java @@ -13,6 +13,7 @@ import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc; import edu.harvard.iq.dataverse.mocks.MocksFactory; @@ -55,6 +56,7 @@ public class FeedbackUtilTest { private static InternetAddress systemAddress; private static final SettingsServiceBean settingsService = Mockito.mock(SettingsServiceBean.class); private static final LicenseServiceBean licenseService = Mockito.mock(LicenseServiceBean.class); + private static final DatasetTypeServiceBean datasetTypeService = Mockito.mock(DatasetTypeServiceBean.class); private static final String systemEmail = "support@librascholar.edu"; private static final boolean weKnowHowToCreateMockAuthenticatedUsers = false; @@ -144,7 +146,7 @@ public static void setUpClass() throws IOException, JsonParseException, AddressE JsonReader jsonReader1 = Json.createReader(new StringReader(datasetVersionAsJson)); JsonObject json1 = jsonReader1.readObject(); - JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService); + JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService, datasetTypeService); dsVersion = jsonParser.parseDatasetVersion(json1.getJsonObject("datasetVersion")); File datasetVersionJson2 = new File("tests/data/datasetContacts1.json"); @@ -153,14 +155,14 @@ public static void setUpClass() throws IOException, JsonParseException, AddressE JsonReader jsonReader12 = Json.createReader(new StringReader(datasetVersionAsJson2)); JsonObject json12 = jsonReader12.readObject(); - JsonParser jsonParser2 = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService); + JsonParser jsonParser2 = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService, datasetTypeService); dsVersion2 = jsonParser2.parseDatasetVersion(json12.getJsonObject("datasetVersion")); File datasetVersionJsonNoContacts = new File("tests/data/datasetNoContacts.json"); String datasetVersionAsJsonNoContacts = new String(Files.readAllBytes(Paths.get(datasetVersionJsonNoContacts.getAbsolutePath()))); JsonReader jsonReaderNoContacts = Json.createReader(new StringReader(datasetVersionAsJsonNoContacts)); JsonObject jsonNoContacts = jsonReaderNoContacts.readObject(); - JsonParser jsonParserNoContacts = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService); + JsonParser jsonParserNoContacts = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService, datasetTypeService); dsVersionNoContacts = jsonParserNoContacts.parseDatasetVersion(jsonNoContacts.getJsonObject("datasetVersion")); FeedbackUtil justForCodeCoverage = new FeedbackUtil(); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 972fc9c41cd..59e175f30c1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -23,6 +23,8 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroupTest; import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.dataset.DatasetType; +import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc; @@ -72,6 +74,7 @@ public class JsonParserTest { MockDatasetFieldSvc datasetFieldTypeSvc = null; MockSettingsSvc settingsSvc = null; LicenseServiceBean licenseService = Mockito.mock(LicenseServiceBean.class); + DatasetTypeServiceBean datasetTypeService = Mockito.mock(DatasetTypeServiceBean.class); DatasetFieldType keywordType; DatasetFieldType descriptionType; DatasetFieldType subjectType; @@ -124,7 +127,11 @@ public void setUp() { } compoundSingleType.setChildDatasetFieldTypes(childTypes); settingsSvc = new MockSettingsSvc(); - sut = new JsonParser(datasetFieldTypeSvc, null, settingsSvc, licenseService); + DatasetType datasetType = new DatasetType(); + datasetType.setName(DatasetType.DEFAULT_DATASET_TYPE); + datasetType.setId(1l); + Mockito.when(datasetTypeService.getByName(DatasetType.DEFAULT_DATASET_TYPE)).thenReturn(datasetType); + sut = new JsonParser(datasetFieldTypeSvc, null, settingsSvc, licenseService, datasetTypeService); } @Test From bf668a94bc7461c95097dd0ffc29297cba4108cb Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jul 2024 11:33:12 -0400 Subject: [PATCH 12/45] fix sql script, add column etc #10517 --- src/main/resources/db/migration/V6.3.0.2.sql | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/main/resources/db/migration/V6.3.0.2.sql b/src/main/resources/db/migration/V6.3.0.2.sql index d4a2731bc70..2edfee64ca4 100644 --- a/src/main/resources/db/migration/V6.3.0.2.sql +++ b/src/main/resources/db/migration/V6.3.0.2.sql @@ -1,9 +1,16 @@ -- Dataset types have been added. See #10517 and #10694 -- --- First, insert some types (dataset is the default). +-- Insert some types (dataset is the default). INSERT INTO datasettype (name) VALUES ('dataset'); INSERT INTO datasettype (name) VALUES ('software'); INSERT INTO datasettype (name) VALUES ('workflow'); -- --- Then, give existing datasets a type of "dataset". +-- Add the new column (if it doesn't exist) and foreign key. +ALTER TABLE dataset ADD COLUMN IF NOT EXISTS datasettype_id bigint; +ALTER TABLE dataset ADD CONSTRAINT fk_dataset_datasettype_id FOREIGN KEY (datasettype_id) REFERENCES datasettype(id); +-- +-- Give existing datasets a type of "dataset". UPDATE dataset SET datasettype_id = (SELECT id FROM datasettype WHERE name = 'dataset'); +-- +-- Make the column non-null +ALTER TABLE dataset ALTER COLUMN datasettype_id SET NOT NULL; From 067d41654b52c486d303c590cfcd483224847feb Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jul 2024 14:54:24 -0400 Subject: [PATCH 13/45] make dataset types translatable and capitalized consisently #10517 Before this commit, the facet looked like this... Dataset Type (3) Dataset (2) software (1) workflow ... that is, "Dataset" was capitalized but "sofware" and "workflow" were not. This commit fixes this, making all types capitalized, and it makes the values translatable in other languages. However, it does nothing to address some confusion that Search API users will feel. They'll get back the capitalized values but will need to pass in the lower case version (in English) to narrow their search results. --- .../harvard/iq/dataverse/search/SearchServiceBean.java | 4 ++++ src/main/java/propertyFiles/Bundle.properties | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index cda7c53a9e5..bc24eb949b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -775,6 +775,10 @@ public SolrQueryResponse search( } } else { try { + // This is where facets are capitalized. + // This will be a problem for the API clients because they get back a string like this from the Search API... + // {"datasetType_s":{"friendly":"Dataset Type","labels":[{"Dataset":1},{"Software":1}]} + // ... but they will need to use the lower case version (e.g. "software") to narrow results. localefriendlyName = BundleUtil.getStringFromPropertyFile(facetFieldCount.getName(), "Bundle"); } catch (Exception e) { localefriendlyName = facetFieldCount.getName(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 4b366522966..76374edb07e 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -3,7 +3,17 @@ newDataverse=New Dataverse hostDataverse=Host Dataverse dataverses=Dataverses passwd=Password +# BEGIN dataset types +# `dataset=Dataset` has been here since 4.0 but now that we have dataset types, +# we need to add the rest of the types here for two reasons. First, we want +# translators to be able to translate these types. Second, in English it looks +# weird to have only "Dataset" capitalized in the facet but not "software" and +# "workflow". This capitalization (looking up here in the bundel) is done by +# SearchServiceBean near the comment "This is where facets are capitalized". dataset=Dataset +software=Software +workflow=Workflow +# END dataset types datasets=Datasets newDataset=New Dataset files=Files From d74795916359733588b02c3336e1c6951427084b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jul 2024 15:22:51 -0400 Subject: [PATCH 14/45] rename datasetType_s to datasetType #10517 Also add upgrade instructions for Solr. Note that the change from "software" to "Software" should have been included in the last commit about capitalization. --- conf/solr/schema.xml | 1 + doc/release-notes/10517-datasetType.md | 12 ++++++++++++ .../harvard/iq/dataverse/search/SearchFields.java | 2 +- .../iq/dataverse/search/SearchServiceBean.java | 2 +- .../java/propertyFiles/staticSearchFields.properties | 2 +- .../edu/harvard/iq/dataverse/api/DatasetTypesIT.java | 8 ++++---- 6 files changed, 20 insertions(+), 7 deletions(-) diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index 5dde750573d..274511e9c93 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -205,6 +205,7 @@ + diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index a8f86432a9f..35a1e21b8c3 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -2,6 +2,18 @@ Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. +Upgrade instructions +-------------------- + +Add the following line to your Solr schema.xml file and do a full reindex: + +``` + +``` + +Developer notes +--------------- + A handy query: ``` diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java index 7327150bdca..bf9a1c47541 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java @@ -267,7 +267,7 @@ more targeted results for just datasets. The format is YYYY (i.e. /** * Datasets can be software, workflow, etc. See the DatasetType object. */ - public static final String DATASET_TYPE = "datasetType_s"; + public static final String DATASET_TYPE = "datasetType"; public static final String VARIABLE_NAME = "variableName"; public static final String VARIABLE_LABEL = "variableLabel"; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index bc24eb949b4..f361cffe24c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -777,7 +777,7 @@ public SolrQueryResponse search( try { // This is where facets are capitalized. // This will be a problem for the API clients because they get back a string like this from the Search API... - // {"datasetType_s":{"friendly":"Dataset Type","labels":[{"Dataset":1},{"Software":1}]} + // {"datasetType":{"friendly":"Dataset Type","labels":[{"Dataset":1},{"Software":1}]} // ... but they will need to use the lower case version (e.g. "software") to narrow results. localefriendlyName = BundleUtil.getStringFromPropertyFile(facetFieldCount.getName(), "Bundle"); } catch (Exception e) { diff --git a/src/main/java/propertyFiles/staticSearchFields.properties b/src/main/java/propertyFiles/staticSearchFields.properties index 4a454500ee4..9a208e841d6 100644 --- a/src/main/java/propertyFiles/staticSearchFields.properties +++ b/src/main/java/propertyFiles/staticSearchFields.properties @@ -10,4 +10,4 @@ staticSearchFields.fileTag=File Tag staticSearchFields.fileAccess=Access staticSearchFields.publicationStatus=Publication Status staticSearchFields.subject_ss=Subject -staticSearchFields.datasetType_s=Dataset Type +staticSearchFields.datasetType=Dataset Type diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 2ae76731aaf..8bf2ffb6bd3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -55,8 +55,8 @@ public void testCreateSoftwareDatasetNative() { searchDraft.then().assertThat() .body("data.total_count", CoreMatchers.is(1)) .body("data.count_in_response", CoreMatchers.is(1)) - .body("data.facets[0].datasetType_s.friendly", CoreMatchers.is("Dataset Type")) - .body("data.facets[0].datasetType_s.labels[0].software", CoreMatchers.is(1)) + .body("data.facets[0].datasetType.friendly", CoreMatchers.is("Dataset Type")) + .body("data.facets[0].datasetType.labels[0].Software", CoreMatchers.is(1)) .statusCode(OK.getStatusCode()); UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); @@ -67,8 +67,8 @@ public void testCreateSoftwareDatasetNative() { // searchAsGuest.then().assertThat() // .body("data.total_count", CoreMatchers.is(1)) // .body("data.count_in_response", CoreMatchers.is(1)) -// .body("data.facets[0].datasetType_s.friendly", CoreMatchers.is("Dataset Type")) -// .body("data.facets[0].datasetType_s.labels[0].software", CoreMatchers.is(1)) +// .body("data.facets[0].datasetType.friendly", CoreMatchers.is("Dataset Type")) +// .body("data.facets[0].datasetType.labels[0].software", CoreMatchers.is(1)) // .statusCode(OK.getStatusCode()); } From 78a3c1a27199abb1b99b61b508c3439c1be4f342 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jul 2024 15:46:16 -0400 Subject: [PATCH 15/45] fix sql script when adding foreign key constraint #10517 --- src/main/resources/db/migration/V6.3.0.2.sql | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/src/main/resources/db/migration/V6.3.0.2.sql b/src/main/resources/db/migration/V6.3.0.2.sql index 2edfee64ca4..610d6899d68 100644 --- a/src/main/resources/db/migration/V6.3.0.2.sql +++ b/src/main/resources/db/migration/V6.3.0.2.sql @@ -5,12 +5,21 @@ INSERT INTO datasettype (name) VALUES ('dataset'); INSERT INTO datasettype (name) VALUES ('software'); INSERT INTO datasettype (name) VALUES ('workflow'); -- --- Add the new column (if it doesn't exist) and foreign key. +-- Add the new column (if it doesn't exist). ALTER TABLE dataset ADD COLUMN IF NOT EXISTS datasettype_id bigint; -ALTER TABLE dataset ADD CONSTRAINT fk_dataset_datasettype_id FOREIGN KEY (datasettype_id) REFERENCES datasettype(id); +-- +-- Add the foreign key. +DO $$ +BEGIN + BEGIN + ALTER TABLE dataset ADD CONSTRAINT fk_dataset_datasettype_id FOREIGN KEY (datasettype_id) REFERENCES datasettype(id); + EXCEPTION + WHEN duplicate_object THEN RAISE NOTICE 'Table constraint fk_dataset_datasettype_id already exists'; + END; +END $$; -- -- Give existing datasets a type of "dataset". UPDATE dataset SET datasettype_id = (SELECT id FROM datasettype WHERE name = 'dataset'); -- --- Make the column non-null +-- Make the column non-null. ALTER TABLE dataset ALTER COLUMN datasettype_id SET NOT NULL; From 8593d328ea5fbb94c1435b0ff4651199dcf7abef Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jul 2024 17:13:59 -0400 Subject: [PATCH 16/45] send to DataCite either Dataset, Software, or Workflow #10517 For resourceTypeGeneral. --- .../pidproviders/doi/XmlMetadataTemplate.java | 26 +++++++++++++++++++ .../doi/datacite_metadata_template.xml | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index 30e4dfd79cc..694c09b8122 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetAuthor; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.pidproviders.AbstractPidProvider; public class XmlMetadataTemplate { @@ -43,6 +44,7 @@ public class XmlMetadataTemplate { private String publisher; private String publisherYear; private List authors; + private String resourceTypeGeneral; private String description; private List contacts; private List producers; @@ -197,6 +199,22 @@ public String generateXML(DvObject dvObject) { } } + if (dvObject.isInstanceofDataset()) { + Dataset dataset = (Dataset) dvObject; + String datasetTypeName = dataset.getDatasetType().getName(); + resourceTypeGeneral = switch (datasetTypeName) { + case "dataset" -> + "Dataset"; + case "software" -> + "Software"; + case "workflow" -> + "Workflow"; + default -> + "Dataset"; + }; + xmlMetadata = xmlMetadata.replace("${resourceTypeGeneral}", resourceTypeGeneral); + } + String relIdentifiers = generateRelatedIdentifiers(dvObject); xmlMetadata = xmlMetadata.replace("${relatedIdentifiers}", relIdentifiers); @@ -311,4 +329,12 @@ public void setPublisherYear(String publisherYear) { this.publisherYear = publisherYear; } + public String getResourceTypeGeneral() { + return resourceTypeGeneral; + } + + public void setResourceTypeGeneral(String resourceTypeGeneral) { + this.resourceTypeGeneral = resourceTypeGeneral; + } + } \ No newline at end of file diff --git a/src/main/resources/edu/harvard/iq/dataverse/pidproviders/doi/datacite_metadata_template.xml b/src/main/resources/edu/harvard/iq/dataverse/pidproviders/doi/datacite_metadata_template.xml index abe7ce79972..150a098834e 100644 --- a/src/main/resources/edu/harvard/iq/dataverse/pidproviders/doi/datacite_metadata_template.xml +++ b/src/main/resources/edu/harvard/iq/dataverse/pidproviders/doi/datacite_metadata_template.xml @@ -9,7 +9,7 @@ ${publisher} ${publisherYear} - + ${relatedIdentifiers} ${description} From 771d85a79a8c102d6e6b4e324c95d35d833ef40a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 30 Jul 2024 15:41:15 -0400 Subject: [PATCH 17/45] expose if feature flags are enabled or disabled via API #10732 --- doc/sphinx-guides/source/api/native-api.rst | 45 +++++++++++++++++++ .../source/installation/config.rst | 2 + .../edu/harvard/iq/dataverse/api/Admin.java | 25 +++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 11 +++++ 4 files changed, 83 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e7ed71f06ef..8773afb1b7a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -5219,6 +5219,51 @@ Delete Database Setting Delete the setting under ``name``:: DELETE http://$SERVER/api/admin/settings/$name + +.. _list-all-feature-flags: + +List All Feature Flags +~~~~~~~~~~~~~~~~~~~~~~ + +Experimental and preview features are sometimes hidden behind feature flags. See :ref:`feature-flags` in the Installation Guide for a list of flags and how to configure them. + +This API endpoint provides a list of feature flags and "enabled" or "disabled" for each one. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + + curl "$SERVER_URL/api/admin/featureFlags" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "http://localhost:8080/api/admin/featureFlags" + +.. _show-feature-flag-status: + +Show Feature Flag Status +~~~~~~~~~~~~~~~~~~~~~~~~ + +This endpoint reports "enabled" as true for false for a single feature flag. (For all flags, see :ref:`list-all-feature-flags`.) + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export FLAG=DATASET_TYPES + + curl "$SERVER_URL/api/admin/featureFlags/$FLAG" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "http://localhost:8080/api/admin/featureFlags/DATASET_TYPES" Manage Banner Messages ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 0038c188ea5..d68eaaa3876 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3299,6 +3299,8 @@ please find all known feature flags below. Any of these flags can be activated u **Note:** Feature flags can be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_FEATURE_XXX`` (e.g. ``DATAVERSE_FEATURE_API_SESSION_AUTH=1``). These environment variables can be set in your shell before starting Payara. If you are using :doc:`Docker for development `, you can set them in the `docker compose `_ file. +To check the status of feature flags via API, see :ref:`list-all-feature-flags` in the API Guide. + .. _:ApplicationServerSettings: Application Server Settings diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index d60884bad2f..7593f65e027 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -99,6 +99,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.userdata.UserListMaker; import edu.harvard.iq.dataverse.userdata.UserListResult; @@ -126,6 +127,7 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; +import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -2515,4 +2517,27 @@ public Response downloadTmpFile(@Context ContainerRequestContext crc, @QueryPara } } + @GET + @Path("/featureFlags") + public Response getFeatureFlags() { + Map map = new TreeMap<>(); + for (FeatureFlags flag : FeatureFlags.values()) { + map.put(flag.name(), flag.enabled() ? "enabled" : "disabled"); + } + return ok(Json.createObjectBuilder(map)); + } + + @GET + @Path("/featureFlags/{flag}") + public Response getFeatureFlag(@PathParam("flag") String flagIn) { + try { + FeatureFlags flag = FeatureFlags.valueOf(flagIn); + JsonObjectBuilder job = Json.createObjectBuilder(); + job.add("enabled", flag.enabled()); + return ok(job); + } catch (IllegalArgumentException ex) { + return error(Status.NOT_FOUND, "Feature flag not found. Try listing all feature flags."); + } + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 91469f7a893..e2aaefbcd1a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -46,6 +46,7 @@ import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldValue; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.util.StringUtil; import java.util.Collections; @@ -2219,6 +2220,16 @@ public static Response setSetting(String settingKey, String value) { return response; } + static Response getFeatureFlags() { + Response response = given().when().get("/api/admin/featureFlags"); + return response; + } + + static Response getFeatureFlag(FeatureFlags featureFlag) { + Response response = given().when().get("/api/admin/featureFlags/" + featureFlag); + return response; + } + static Response getRoleAssignmentsOnDataverse(String dataverseAliasOrId, String apiToken) { String url = "/api/dataverses/" + dataverseAliasOrId + "/assignments"; return given() From c69c3ae9ea09f3a2fe61e33ef94142f168bfcea7 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 30 Jul 2024 17:15:01 -0400 Subject: [PATCH 18/45] improve error handling for dataset types #10517 Especially, pay attention to if the feature is enabled or not. --- .../harvard/iq/dataverse/api/Datasets.java | 13 ++- .../harvard/iq/dataverse/api/Dataverses.java | 16 ++++ .../iq/dataverse/dataverse/DataverseUtil.java | 12 +++ .../iq/dataverse/util/json/JSONLDUtil.java | 7 +- .../iq/dataverse/api/DatasetTypesIT.java | 91 ++++++++++++++++--- 5 files changed, 122 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 4326cd17737..3b1f23b6b5c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5080,6 +5080,9 @@ public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathPar @GET @Path("datasetTypes") public Response getDatasetTypes() { + if (!FeatureFlags.DATASET_TYPES.enabled()) { + return error(Status.FORBIDDEN, "Dataset type feature not enabled. Listing types not allowed."); + } JsonArrayBuilder jab = Json.createArrayBuilder(); List datasetTypes = datasetTypeSvc.listAll(); for (DatasetType datasetType : datasetTypes) { @@ -5094,6 +5097,9 @@ public Response getDatasetTypes() { @GET @Path("datasetTypes/byName/{name}") public Response getDatasetTypes(@PathParam("name") String name) { + if (!FeatureFlags.DATASET_TYPES.enabled()) { + return error(Status.FORBIDDEN, "Dataset type feature not enabled. Showing a type not allowed."); + } DatasetType datasetType = datasetTypeSvc.getByName(name); if (datasetType != null) { return ok(datasetType.toJson()); @@ -5106,7 +5112,9 @@ public Response getDatasetTypes(@PathParam("name") String name) { @AuthRequired @Path("datasetTypes") public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) { - System.out.println("json in: " + jsonIn); + if (!FeatureFlags.DATASET_TYPES.enabled()) { + return error(Status.FORBIDDEN, "Dataset type feature not enabled. Creating types not allowed."); + } AuthenticatedUser user; try { user = getRequestAuthenticatedUserOrDie(crc); @@ -5143,6 +5151,9 @@ public Response addDatasetType(@Context ContainerRequestContext crc, String json @AuthRequired @Path("datasetTypes/{id}") public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) { + if (!FeatureFlags.DATASET_TYPES.enabled()) { + return error(Status.FORBIDDEN, "Dataset type feature not enabled. Deleting types not allowed."); + } AuthenticatedUser user; try { user = getRequestAuthenticatedUserOrDie(crc); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 055238cb070..c8ddd29ff1f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -268,6 +268,13 @@ public Response createDataset(@Context ContainerRequestContext crc, String jsonB //Throw BadRequestException if metadataLanguage isn't compatible with setting DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); + try { + logger.fine("in createDataset, about to call checkDatasetType..."); + DataverseUtil.checkDatasetType(ds, FeatureFlags.DATASET_TYPES.enabled()); + } catch (BadRequestException ex) { + return badRequest(ex.getLocalizedMessage()); + } + // clean possible version metadata DatasetVersion version = ds.getVersions().get(0); @@ -359,6 +366,8 @@ public Response createDatasetFromJsonLd(@Context ContainerRequestContext crc, St } catch (WrappedResponse ex) { return ex.getResponse(); + } catch (Exception ex) { + return error(Status.BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -382,6 +391,13 @@ public Response importDataset(@Context ContainerRequestContext crc, String jsonB //Throw BadRequestException if metadataLanguage isn't compatible with setting DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); + try { + logger.fine("in importDataset, about to call checkDatasetType..."); + DataverseUtil.checkDatasetType(ds, FeatureFlags.DATASET_TYPES.enabled()); + } catch (BadRequestException ex) { + return badRequest(ex.getLocalizedMessage()); + } + DatasetVersion version = ds.getVersions().get(0); if (version.getVersionState() == null) { version.setVersionState(DatasetVersion.VersionState.DRAFT); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index f45a9058e7c..bf111c1c135 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; @@ -122,4 +123,15 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map Date: Wed, 31 Jul 2024 14:35:10 -0400 Subject: [PATCH 19/45] add docs for dataset types #10517 --- doc/release-notes/10517-datasetType.md | 2 +- doc/sphinx-guides/source/api/native-api.rst | 115 ++++++++++++++++++ .../dataset-semantic-metadata-api.rst | 13 +- .../source/installation/config.rst | 3 + .../source/user/dataset-management.rst | 32 +++++ .../source/user/dataset-types.rst | 37 ------ doc/sphinx-guides/source/user/index.rst | 1 - 7 files changed, 163 insertions(+), 40 deletions(-) delete mode 100755 doc/sphinx-guides/source/user/dataset-types.rst diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 35a1e21b8c3..c75c4db63e9 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,6 +1,6 @@ ### Initial Support for Dataset Types (Dataset, Software, Workflow) -Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. +Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. Upgrade instructions -------------------- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 8773afb1b7a..ea556990bcd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -693,6 +693,8 @@ To create a dataset, you must supply a JSON file that contains at least the foll - Description Text - Subject +.. _api-create-dataset-incomplete: + Submit Incomplete Dataset ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -750,6 +752,8 @@ The following is an example HTTP call with deactivated validation: **Note:** You may learn about an instance's support for deposition of incomplete datasets via :ref:`info-incomplete-metadata`. +.. _api-create-dataset: + Submit Dataset ^^^^^^^^^^^^^^ @@ -779,6 +783,17 @@ You should expect an HTTP 200 ("OK") response and JSON indicating the database I .. note:: Only a Dataverse installation account with superuser permissions is allowed to include files when creating a dataset via this API. Adding files this way only adds their file metadata to the database, you will need to manually add the physical files to the file system. +.. _api-create-dataset-with-type: + +Create a Dataset with a Dataset Type (Software, etc.) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Note: this feature is only available if your installation has the dataset types feature enabled. See :ref:`dataset-types`. + +Follow :ref:`api-create-dataset` as normal but include a line like `"datasetType": "software"` in your JSON. You can check which types are supported by your installation using the :ref:`api-list-dataset-types` API endpoint. + +Here is an example JSON file for reference: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>`. + .. _api-import-dataset: Import a Dataset into a Dataverse Collection @@ -821,6 +836,16 @@ Before calling the API, make sure the data files referenced by the ``POST``\ ed * This API endpoint does not support importing *files'* persistent identifiers. * A Dataverse installation can import datasets with a valid PID that uses a different protocol or authority than said server is configured for. However, the server will not update the PID metadata on subsequent update and publish actions. +.. _import-dataset-with-type: + +Import a Dataset with a Dataset Type (Software, etc.) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Note: this feature is only available if your installation has the dataset types feature enabled. See :ref:`dataset-types`. + +The same native JSON file as above under :ref:`api-create-dataset-with-type` can be used when importing a dataset. + +A file like this is the only difference. Otherwise, follow :ref:`api-import-dataset` as normal. Import a Dataset into a Dataverse Installation with a DDI file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2985,6 +3010,96 @@ The API can also be used to reset the dataset to use the default/inherited value The default will always be the same provider as for the dataset PID if that provider can generate new PIDs, and will be the PID Provider set for the collection or the global default otherwise. +.. _api-dataset-types: + +Dataset Types +~~~~~~~~~~~~~ + +See :ref:`dataset-types` in the User Guide for an overview of the feature. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. _api-list-dataset-types: + +List Dataset Types +^^^^^^^^^^^^^^^^^^ + +Show which dataset types are available. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + + curl "$SERVER_URL/api/datasets/datasetTypes" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/datasetTypes" + +.. _api-list-dataset-type: + +Get Dataset Type by Name +^^^^^^^^^^^^^^^^^^^^^^^^ + +Show a single dataset type based on its name. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export TYPE=software + + curl $SERVER_URL/api/datasets/datasetTypes/byName/$TYPE" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/datasetTypes/byName/software" + +.. _api-add-dataset-type: + +Add Dataset Type +^^^^^^^^^^^^^^^^ + +Superuser only. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export JSON='{"name": "newType"}' + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-Type: application/json" "$SERVER_URL/api/datasets/datasetTypes" -X POST -d $JSON + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/datasetTypes" -X POST -d '{"name": "newType"}' + +.. _api-delete-dataset-type: + +Delete Dataset Type +^^^^^^^^^^^^^^^^^^^ + +Superuser only. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export TYPE_ID=3 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/datasetTypes/$TYPE_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/datasetTypes/3" + Files ----- diff --git a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst index ded62288eb2..9791a1e05b7 100644 --- a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst @@ -83,6 +83,7 @@ Note, this example uses the term URI directly rather than adding an ``@context`` You should expect a 200 ("OK") response indicating whether a draft Dataset version was created or an existing draft was updated. +.. _api-semantic-create-dataset: Create a Dataset ---------------- @@ -105,4 +106,14 @@ With curl, this is done by adding the following header: curl -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets --upload-file dataset-create.jsonld An example jsonld file is available at :download:`dataset-create.jsonld <../_static/api/dataset-create.jsonld>` (:download:`dataset-create_en.jsonld <../_static/api/dataset-create.jsonld>` is a version that sets the metadata language (see :ref:`:MetadataLanguages`) to English (en).) - + +.. _api-semantic-create-dataset-with-type: + +Create a Dataset with a Dataset Type +------------------------------------ + +Note: this feature is only available if your installation has the dataset types feature enabled. See :ref:`dataset-types`. + +An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>`. + +You can use this file with the normal :ref:`api-semantic-create-dataset` endpoint above. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d68eaaa3876..9d1171f2be4 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3294,6 +3294,9 @@ please find all known feature flags below. Any of these flags can be activated u * - disable-return-to-author-reason - Removes the reason field in the `Publish/Return To Author` dialog that was added as a required field in v6.2 and makes the reason an optional parameter in the :ref:`return-a-dataset` API call. - ``Off`` + * - dataverse.feature.dataset-types + - At dataset creation time when using the API, a dataset can be specified as "dataset", "software", or "workflow". See :ref:`dataset-types` in the User Guide for more. + - ``Off`` **Note:** Feature flags can be set via any `supported MicroProfile Config API source`_, e.g. the environment variable diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index a1e214589e3..7f277cba9ae 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -780,6 +780,38 @@ If you deaccession the most recently published version of the dataset but not al **Important Note**: A tombstone landing page with the basic citation metadata will always be accessible to the public if they use the persistent URL (Handle or DOI) provided in the citation for that dataset. Users will not be able to see any of the files or additional metadata that were previously available prior to deaccession. +.. _dataset-types: + +Dataset Types +============= + +If your installation has an experimental feature called "dataset types" enabled (see :ref:`feature-flags` in the Installation Guide), datasets can have a dataset type such as "dataset", "software", or "workflow". + +When browsing or searching, these types appear under a facet called "Dataset Type". + +If your installation is configured to use DataCite as a persistent ID (PID) provider, the appropriate type ("Dataset", "Software", "Workflow") will be sent to DataCite when the dataset is published for those three types. + +Currently, the dataset type can only be specified via API and only when the dataset is created. For details, see the following sections of the API guide: + +- :ref:`api-create-dataset-with-type` (Native API) +- :ref:`api-semantic-create-dataset-with-type` (Semantic API) +- :ref:`import-dataset-with-type` + +Dataset types can be listed, added, or deleted via API. See :ref:`api-dataset-types` in the API Guide for more. + +Development of the dataset types feature is ongoing. Please see https://github.com/IQSS/dataverse/issues/10489 for details. + +.. _supported-dataset-types: + +Supported Dataset Types +----------------------- + +Out of the box, Dataverse is configured with three dataset types: + +- dataset (default) +- software +- workflow + .. |image1| image:: ./img/DatasetDiagram.png :class: img-responsive .. |image3| image:: ./img/data_publishing_version_workflow.png diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst deleted file mode 100755 index 4c96745f434..00000000000 --- a/doc/sphinx-guides/source/user/dataset-types.rst +++ /dev/null @@ -1,37 +0,0 @@ -Dataset Types -+++++++++++++ - -NOTE: This separate page will be folded into individual pages and removed as the pull request is finalized - -.. contents:: |toctitle| - :local: - -Intro -===== - -Datasets can have a dataset type such as "dataset", "software", or "workflow". - -When browsing or searching, these types appear under a facet called "Dataset Type". - -Enabling Dataset Types -====================== - -Turn on ``dataverse.feature.dataset-types``. See also :ref:`feature-flags`. - -Specifying a Dataset Type When Creating a Dataset -================================================= - -Native API ----------- - -An example JSON file is available at :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>` - -Semantic API ---------------------------------- - -An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>` - -Import with Native JSON ------------------------ - -The same native JSON file as above can be used when importing a dataset: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>` diff --git a/doc/sphinx-guides/source/user/index.rst b/doc/sphinx-guides/source/user/index.rst index 60f6e473b68..857bd27ca22 100755 --- a/doc/sphinx-guides/source/user/index.rst +++ b/doc/sphinx-guides/source/user/index.rst @@ -16,4 +16,3 @@ User Guide dataset-management tabulardataingest/index appendix - dataset-types From 680fe0514485b98a0e383a99d0db88e30507941f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 31 Jul 2024 14:35:49 -0400 Subject: [PATCH 20/45] remove developer notes #10517 --- doc/release-notes/10517-datasetType.md | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index c75c4db63e9..13cc9d2b443 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -10,29 +10,3 @@ Add the following line to your Solr schema.xml file and do a full reindex: ``` ``` - -Developer notes ---------------- - -A handy query: - -``` -% DOCKER_CLI_HINTS=false docker exec -it postgres-1 bash -c "PGPASSWORD=secret psql -h localhost -U dataverse dataverse -c 'select dst.name, count(*) from dataset ds, datasettype dst where ds.datasettype_id = dst.id group by dst.name;'" - name | count -----------+------- - dataset | 136 - software | 14 -(2 rows) -``` - -Most API tests are passing but we do see a few failures: - -``` -[ERROR] Failures: -[ERROR] HarvestingClientsIT.testHarvestingClientRun_AllowHarvestingMissingCVV_False:187->harvestingClientRun:301 expected: <7> but was: <0> -[ERROR] HarvestingClientsIT.testHarvestingClientRun_AllowHarvestingMissingCVV_True:191->harvestingClientRun:301 expected: <8> but was: <0> -[ERROR] MakeDataCountApiIT.testMakeDataCountGetMetric:68 1 expectation failed. -Expected status code <200> but was <400>. -``` - -select dst.name, count(*) from dataset ds, datasettype dst where ds.datasettype_id = dst.id group by dst.name; From 2e811877d7e2bcbcb3416b8ef68f185cb35f7977 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 31 Jul 2024 15:33:51 -0400 Subject: [PATCH 21/45] improve error handling when adding a dataset type #10517 --- .../edu/harvard/iq/dataverse/api/Datasets.java | 14 ++++++++++---- .../harvard/iq/dataverse/api/DatasetTypesIT.java | 4 ++++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 3b1f23b6b5c..e8b5d69ad05 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5126,12 +5126,18 @@ public Response addDatasetType(@Context ContainerRequestContext crc, String json } if (jsonIn == null || jsonIn.isEmpty()) { - throw new IllegalArgumentException("JSON input was null or empty!"); + return error(BAD_REQUEST, "JSON input was null or empty!"); + } + + String nameIn = null; + try { + JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn); + nameIn = jsonObject.getString("name", null); + } catch (JsonParsingException ex) { + return error(BAD_REQUEST, "Problem parsing supplied JSON: " + ex.getLocalizedMessage()); } - JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn); - String nameIn = jsonObject.getString("name", null); if (nameIn == null) { - throw new IllegalArgumentException("A name for the dataset type is required"); + return error(BAD_REQUEST, "A name for the dataset type is required"); } try { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 48f4dbdc984..94a50c8409e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -218,6 +218,10 @@ public void testAddAndDeleteDatasetType() { String apiToken = UtilIT.getApiTokenFromResponse(createUser); UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + Response badJson = UtilIT.addDatasetType("this isn't even JSON", apiToken); + badJson.prettyPrint(); + badJson.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + String randomName = UUID.randomUUID().toString().substring(0, 8); String jsonIn = Json.createObjectBuilder().add("name", randomName).build().toString(); From dd7541fb3954b81c660cfb167e22bcb66e055278 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 5 Aug 2024 15:26:48 -0400 Subject: [PATCH 22/45] move check to JsonParser #10517 Normal "create dataset" operations were failing with: "The dataset type feature is not enabled but a type was sent: dataset" --- .../harvard/iq/dataverse/api/Dataverses.java | 14 ---------- .../iq/dataverse/dataverse/DataverseUtil.java | 11 -------- .../iq/dataverse/util/json/JsonParser.java | 4 +++ .../iq/dataverse/api/DatasetTypesIT.java | 28 +++++++++++-------- 4 files changed, 21 insertions(+), 36 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index c8ddd29ff1f..e6375b2f594 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -268,13 +268,6 @@ public Response createDataset(@Context ContainerRequestContext crc, String jsonB //Throw BadRequestException if metadataLanguage isn't compatible with setting DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); - try { - logger.fine("in createDataset, about to call checkDatasetType..."); - DataverseUtil.checkDatasetType(ds, FeatureFlags.DATASET_TYPES.enabled()); - } catch (BadRequestException ex) { - return badRequest(ex.getLocalizedMessage()); - } - // clean possible version metadata DatasetVersion version = ds.getVersions().get(0); @@ -391,13 +384,6 @@ public Response importDataset(@Context ContainerRequestContext crc, String jsonB //Throw BadRequestException if metadataLanguage isn't compatible with setting DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); - try { - logger.fine("in importDataset, about to call checkDatasetType..."); - DataverseUtil.checkDatasetType(ds, FeatureFlags.DATASET_TYPES.enabled()); - } catch (BadRequestException ex) { - return badRequest(ex.getLocalizedMessage()); - } - DatasetVersion version = ds.getVersions().get(0); if (version.getVersionState() == null) { version.setVersionState(DatasetVersion.VersionState.DRAFT); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index bf111c1c135..04a6ebdb6c4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -123,15 +123,4 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map Date: Thu, 8 Aug 2024 16:21:37 -0400 Subject: [PATCH 23/45] remove "dataset-types" feature flag #10517 --- doc/release-notes/10517-datasetType.md | 2 +- doc/sphinx-guides/source/api/native-api.rst | 8 +- .../dataset-semantic-metadata-api.rst | 4 +- .../source/installation/config.rst | 4 - .../source/user/dataset-management.rst | 2 +- docker-compose-dev.yml | 1 - .../harvard/iq/dataverse/api/Datasets.java | 12 -- .../iq/dataverse/search/IndexServiceBean.java | 8 +- .../dataverse/search/SearchServiceBean.java | 4 +- .../iq/dataverse/settings/FeatureFlags.java | 5 - .../iq/dataverse/util/json/JSONLDUtil.java | 4 - .../iq/dataverse/util/json/JsonParser.java | 4 - .../iq/dataverse/api/DatasetTypesIT.java | 105 ++++-------------- 13 files changed, 33 insertions(+), 130 deletions(-) diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 13cc9d2b443..12c2eaa46f2 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,6 +1,6 @@ ### Initial Support for Dataset Types (Dataset, Software, Workflow) -Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. +Datasets now have types. By default the dataset type will be "dataset", but out of the box datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental and is expected to evolve. Upgrade instructions -------------------- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 8d7f732e8c1..6be7706a2a4 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -788,12 +788,12 @@ You should expect an HTTP 200 ("OK") response and JSON indicating the database I Create a Dataset with a Dataset Type (Software, etc.) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Note: this feature is only available if your installation has the dataset types feature enabled. See :ref:`dataset-types`. - Follow :ref:`api-create-dataset` as normal but include a line like `"datasetType": "software"` in your JSON. You can check which types are supported by your installation using the :ref:`api-list-dataset-types` API endpoint. Here is an example JSON file for reference: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>`. +See also :ref:`dataset-types`. + .. _api-import-dataset: Import a Dataset into a Dataverse Collection @@ -841,12 +841,12 @@ Before calling the API, make sure the data files referenced by the ``POST``\ ed Import a Dataset with a Dataset Type (Software, etc.) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Note: this feature is only available if your installation has the dataset types feature enabled. See :ref:`dataset-types`. - The same native JSON file as above under :ref:`api-create-dataset-with-type` can be used when importing a dataset. A file like this is the only difference. Otherwise, follow :ref:`api-import-dataset` as normal. +See also :ref:`dataset-types`. + Import a Dataset into a Dataverse Installation with a DDI file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst index 9791a1e05b7..44188891786 100644 --- a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst @@ -112,8 +112,8 @@ An example jsonld file is available at :download:`dataset-create.jsonld <../_sta Create a Dataset with a Dataset Type ------------------------------------ -Note: this feature is only available if your installation has the dataset types feature enabled. See :ref:`dataset-types`. - An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>`. You can use this file with the normal :ref:`api-semantic-create-dataset` endpoint above. + +See also :ref:`dataset-types`. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4b49f6117f1..b2d9bd3d342 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3301,10 +3301,6 @@ please find all known feature flags below. Any of these flags can be activated u * - disable-return-to-author-reason - Removes the reason field in the `Publish/Return To Author` dialog that was added as a required field in v6.2 and makes the reason an optional parameter in the :ref:`return-a-dataset` API call. - ``Off`` - * - dataverse.feature.dataset-types - - At dataset creation time when using the API, a dataset can be specified as "dataset", "software", or "workflow". See :ref:`dataset-types` in the User Guide for more. - - ``Off`` - **Note:** Feature flags can be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_FEATURE_XXX`` (e.g. ``DATAVERSE_FEATURE_API_SESSION_AUTH=1``). These environment variables can be set in your shell before starting Payara. If you are using :doc:`Docker for development `, you can set them in the `docker compose `_ file. diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index c376300d53a..9bf166fae0a 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -787,7 +787,7 @@ If you deaccession the most recently published version of the dataset but not al Dataset Types ============= -If your installation has an experimental feature called "dataset types" enabled (see :ref:`feature-flags` in the Installation Guide), datasets can have a dataset type such as "dataset", "software", or "workflow". +Datasets can have a dataset type such as "dataset", "software", or "workflow". When browsing or searching, these types appear under a facet called "Dataset Type". diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 64e16a0c5ae..402a95c0e16 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -17,7 +17,6 @@ services: SKIP_DEPLOY: "${SKIP_DEPLOY}" DATAVERSE_JSF_REFRESH_PERIOD: "1" DATAVERSE_FEATURE_API_BEARER_AUTH: "1" - DATAVERSE_FEATURE_DATASET_TYPES: "1" DATAVERSE_MAIL_SYSTEM_EMAIL: "dataverse@localhost" DATAVERSE_MAIL_MTA_HOST: "smtp" DATAVERSE_AUTH_OIDC_ENABLED: "1" diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e8b5d69ad05..adefd7bc8b7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5080,9 +5080,6 @@ public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathPar @GET @Path("datasetTypes") public Response getDatasetTypes() { - if (!FeatureFlags.DATASET_TYPES.enabled()) { - return error(Status.FORBIDDEN, "Dataset type feature not enabled. Listing types not allowed."); - } JsonArrayBuilder jab = Json.createArrayBuilder(); List datasetTypes = datasetTypeSvc.listAll(); for (DatasetType datasetType : datasetTypes) { @@ -5097,9 +5094,6 @@ public Response getDatasetTypes() { @GET @Path("datasetTypes/byName/{name}") public Response getDatasetTypes(@PathParam("name") String name) { - if (!FeatureFlags.DATASET_TYPES.enabled()) { - return error(Status.FORBIDDEN, "Dataset type feature not enabled. Showing a type not allowed."); - } DatasetType datasetType = datasetTypeSvc.getByName(name); if (datasetType != null) { return ok(datasetType.toJson()); @@ -5112,9 +5106,6 @@ public Response getDatasetTypes(@PathParam("name") String name) { @AuthRequired @Path("datasetTypes") public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) { - if (!FeatureFlags.DATASET_TYPES.enabled()) { - return error(Status.FORBIDDEN, "Dataset type feature not enabled. Creating types not allowed."); - } AuthenticatedUser user; try { user = getRequestAuthenticatedUserOrDie(crc); @@ -5157,9 +5148,6 @@ public Response addDatasetType(@Context ContainerRequestContext crc, String json @AuthRequired @Path("datasetTypes/{id}") public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) { - if (!FeatureFlags.DATASET_TYPES.enabled()) { - return error(Status.FORBIDDEN, "Dataset type feature not enabled. Deleting types not allowed."); - } AuthenticatedUser user; try { user = getRequestAuthenticatedUserOrDie(crc); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 74b8d3b2b56..a54aa6e9504 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1003,11 +1003,9 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Mon, 12 Aug 2024 10:32:06 -0400 Subject: [PATCH 24/45] hide "Dataset Type" facet if all one type #10517 --- .../iq/dataverse/search/SearchServiceBean.java | 12 ++++++++++++ .../edu/harvard/iq/dataverse/api/DatasetTypesIT.java | 5 +++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 20def60b410..8187bf752a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -725,11 +725,14 @@ public SolrQueryResponse search( boolean deaccessionedAvailable = false; boolean hideMetadataSourceFacet = true; boolean hideLicenseFacet = true; + boolean hideDatasetTypeFacet = true; + System.out.println("getting facet fields.."); for (FacetField facetField : queryResponse.getFacetFields()) { FacetCategory facetCategory = new FacetCategory(); List facetLabelList = new ArrayList<>(); int numMetadataSources = 0; int numLicenses = 0; + int numDatasetTypes = 0; String metadataBlockName = ""; String datasetFieldName = ""; /** @@ -798,6 +801,8 @@ public SolrQueryResponse search( numMetadataSources++; } else if (facetField.getName().equals(SearchFields.DATASET_LICENSE)) { numLicenses++; + } else if (facetField.getName().equals(SearchFields.DATASET_TYPE)) { + numDatasetTypes++; } } } @@ -807,6 +812,9 @@ public SolrQueryResponse search( if (numLicenses > 1) { hideLicenseFacet = false; } + if (numDatasetTypes > 1 ) { + hideDatasetTypeFacet = false; + } facetCategory.setName(facetField.getName()); // hopefully people will never see the raw facetField.getName() because it may well have an _s at the end facetCategory.setFriendlyName(facetField.getName()); @@ -887,6 +895,10 @@ public SolrQueryResponse search( if (!hideLicenseFacet) { facetCategoryList.add(facetCategory); } + } else if (facetCategory.getName().equals(SearchFields.DATASET_TYPE)) { + if (!hideDatasetTypeFacet) { + facetCategoryList.add(facetCategory); + } } else { facetCategoryList.add(facetCategory); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 74a834d693f..2ff98ecd557 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -56,8 +56,9 @@ public void testCreateSoftwareDatasetNative() { searchDraft.then().assertThat() .body("data.total_count", CoreMatchers.is(1)) .body("data.count_in_response", CoreMatchers.is(1)) - .body("data.facets[0].datasetType.friendly", CoreMatchers.is("Dataset Type")) - .body("data.facets[0].datasetType.labels[0].Software", CoreMatchers.is(1)) + // No "Dataset Type" or count for "Software" because we hide the facet if there is only one type. + .body("data.facets[0].datasetType.friendly", CoreMatchers.nullValue()) + .body("data.facets[0].datasetType.labels[0].Software", CoreMatchers.nullValue()) .statusCode(OK.getStatusCode()); UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); From 1beed5d4493521c0de404180232fdb20dc9f28e9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 12 Aug 2024 10:35:05 -0400 Subject: [PATCH 25/45] remove debug line #10517 --- .../java/edu/harvard/iq/dataverse/search/SearchServiceBean.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 8187bf752a7..f517ee0fa51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -726,7 +726,6 @@ public SolrQueryResponse search( boolean hideMetadataSourceFacet = true; boolean hideLicenseFacet = true; boolean hideDatasetTypeFacet = true; - System.out.println("getting facet fields.."); for (FacetField facetField : queryResponse.getFacetFields()) { FacetCategory facetCategory = new FacetCategory(); List facetLabelList = new ArrayList<>(); From 200a45a261e20fa81ba51a01d3bde86b748279a7 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 12 Aug 2024 12:19:30 -0400 Subject: [PATCH 26/45] don't add "software" or "workflow" with Flyway #10517 --- doc/release-notes/10517-datasetType.md | 6 +++-- doc/sphinx-guides/source/api/native-api.rst | 14 ++++++++---- .../dataset-semantic-metadata-api.rst | 2 ++ .../source/user/dataset-management.rst | 4 ++-- .../iq/dataverse/dataset/DatasetType.java | 5 ++++- .../pidproviders/doi/XmlMetadataTemplate.java | 6 ++--- src/main/resources/db/migration/V6.3.0.2.sql | 4 +--- .../iq/dataverse/api/DatasetTypesIT.java | 22 +++++++++++++++++++ 8 files changed, 48 insertions(+), 15 deletions(-) diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 12c2eaa46f2..722b2f96321 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,6 +1,8 @@ -### Initial Support for Dataset Types (Dataset, Software, Workflow) +### Initial Support for Dataset Types -Datasets now have types. By default the dataset type will be "dataset", but out of the box datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental and is expected to evolve. +Out of the box, all datasets have the type "dataset" but superusers can add additional types. At this time the type can only be set at creation time via API. The types "dataset", "software", and "workflow" will be sent to DataCite when the dataset is published. + +For details see and #10517. Please note that this feature is highly experimental and is expected to evolve. Upgrade instructions -------------------- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6be7706a2a4..b38bc2d1b5f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -788,7 +788,9 @@ You should expect an HTTP 200 ("OK") response and JSON indicating the database I Create a Dataset with a Dataset Type (Software, etc.) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Follow :ref:`api-create-dataset` as normal but include a line like `"datasetType": "software"` in your JSON. You can check which types are supported by your installation using the :ref:`api-list-dataset-types` API endpoint. +By default, datasets are given the type "dataset" but if your installation had added additional types (see :ref:`api-add-dataset-type`), you can specify the type. + +Follow :ref:`api-create-dataset` as normal but include a line like ``"datasetType": "software"`` in your JSON. You can check which types are supported by your installation using the :ref:`api-list-dataset-types` API endpoint. Here is an example JSON file for reference: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>`. @@ -841,6 +843,8 @@ Before calling the API, make sure the data files referenced by the ``POST``\ ed Import a Dataset with a Dataset Type (Software, etc.) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +By default, datasets are given the type "dataset" but if your installation had added additional types (see :ref:`api-add-dataset-type`), you can specify the type. + The same native JSON file as above under :ref:`api-create-dataset-with-type` can be used when importing a dataset. A file like this is the only difference. Otherwise, follow :ref:`api-import-dataset` as normal. @@ -3066,13 +3070,15 @@ The fully expanded example above (without environment variables) looks like this Add Dataset Type ^^^^^^^^^^^^^^^^ -Superuser only. +Note: Before you add any types of your own, there should be a single type called "dataset". If you add "software" or "workflow", these types will be sent to DataCite (if you use DataCite). Otherwise, the only functionality you gain currently from adding types is an entry in the "Dataset Type" facet but be advised that if you add a type other than "software" or "workflow", you will need to add your new type to your Bundle.properties file for it to appear in Title Case rather than lower case in the "Dataset Type" facet. + +With all that said, we'll add a "software" type in the example below. This API endpoint is superuser only. .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - export JSON='{"name": "newType"}' + export JSON='{"name": "software"}' curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-Type: application/json" "$SERVER_URL/api/datasets/datasetTypes" -X POST -d $JSON @@ -3080,7 +3086,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/datasetTypes" -X POST -d '{"name": "newType"}' + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/datasetTypes" -X POST -d '{"name": "software"}' .. _api-delete-dataset-type: diff --git a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst index 44188891786..4f374bdc039 100644 --- a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst @@ -112,6 +112,8 @@ An example jsonld file is available at :download:`dataset-create.jsonld <../_sta Create a Dataset with a Dataset Type ------------------------------------ +By default, datasets are given the type "dataset" but if your installation had added additional types (see :ref:`api-add-dataset-type`), you can specify the type. + An example JSON-LD file is available at :download:`dataset-create-software.jsonld <../_static/api/dataset-create-software.jsonld>`. You can use this file with the normal :ref:`api-semantic-create-dataset` endpoint above. diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 9bf166fae0a..02381000519 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -787,9 +787,9 @@ If you deaccession the most recently published version of the dataset but not al Dataset Types ============= -Datasets can have a dataset type such as "dataset", "software", or "workflow". +Out of the box, all datasets have a dataset type of "dataset". Superusers can add additional types such as "software" or "workflow" using the :ref:`api-add-dataset-type` API endpoint. -When browsing or searching, these types appear under a facet called "Dataset Type". +Once more than one type appears in search results, a facet called "Dataset Type" will appear allowing you to filter down to a certain type. If your installation is configured to use DataCite as a persistent ID (PID) provider, the appropriate type ("Dataset", "Software", "Workflow") will be sent to DataCite when the dataset is published for those three types. diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java index 3333819372d..962799257dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java @@ -27,7 +27,10 @@ public class DatasetType implements Serializable { - public static final String DEFAULT_DATASET_TYPE = "dataset"; + public static final String DATASET_TYPE_DATASET = "dataset"; + public static final String DATASET_TYPE_SOFTWARE = "software"; + public static final String DATASET_TYPE_WORKFLOW = "workflow"; + public static final String DEFAULT_DATASET_TYPE = DATASET_TYPE_DATASET; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index 694c09b8122..fb4e294d246 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -203,11 +203,11 @@ public String generateXML(DvObject dvObject) { Dataset dataset = (Dataset) dvObject; String datasetTypeName = dataset.getDatasetType().getName(); resourceTypeGeneral = switch (datasetTypeName) { - case "dataset" -> + case DatasetType.DATASET_TYPE_DATASET -> "Dataset"; - case "software" -> + case DatasetType.DATASET_TYPE_SOFTWARE -> "Software"; - case "workflow" -> + case DatasetType.DATASET_TYPE_WORKFLOW -> "Workflow"; default -> "Dataset"; diff --git a/src/main/resources/db/migration/V6.3.0.2.sql b/src/main/resources/db/migration/V6.3.0.2.sql index 610d6899d68..437572f3f0c 100644 --- a/src/main/resources/db/migration/V6.3.0.2.sql +++ b/src/main/resources/db/migration/V6.3.0.2.sql @@ -1,9 +1,7 @@ -- Dataset types have been added. See #10517 and #10694 -- --- Insert some types (dataset is the default). +-- Insert the default dataset type: dataset. INSERT INTO datasettype (name) VALUES ('dataset'); -INSERT INTO datasettype (name) VALUES ('software'); -INSERT INTO datasettype (name) VALUES ('workflow'); -- -- Add the new column (if it doesn't exist). ALTER TABLE dataset ADD COLUMN IF NOT EXISTS datasettype_id bigint; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 2ff98ecd557..ef24f0102b7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -20,6 +20,28 @@ public class DatasetTypesIT { @BeforeAll public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + + Response getSoftwareType = UtilIT.getDatasetTypeByName(DatasetType.DATASET_TYPE_SOFTWARE); + getSoftwareType.prettyPrint(); + + String typeFound = JsonPath.from(getSoftwareType.getBody().asString()).getString("data.name"); + System.out.println("type found: " + typeFound); + if (DatasetType.DATASET_TYPE_SOFTWARE.equals(typeFound)) { + return; + } + + System.out.println("The \"software\" type wasn't found. Create it."); + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + String jsonIn = Json.createObjectBuilder().add("name", DatasetType.DATASET_TYPE_SOFTWARE).build().toString(); + + Response typeAdded = UtilIT.addDatasetType(jsonIn, apiToken); + typeAdded.prettyPrint(); + typeAdded.then().assertThat().statusCode(OK.getStatusCode()); } @Test From f8e8c4f7ee8ca57906f1051bd2afcddf4dc55bb2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Aug 2024 13:49:10 -0400 Subject: [PATCH 27/45] remove deprecation notice from default constructor #10517 --- .../edu/harvard/iq/dataverse/dataset/DatasetType.java | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java index 962799257dc..f0977b0fa22 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java @@ -40,17 +40,6 @@ public class DatasetType implements Serializable { @Column(nullable = false) private String name; - /** - * This default constructor is only here to prevent this error at - * deployment: - * - * Exception Description: The instance creation method - * [...DatasetType.], with no parameters, does not - * exist, or is not accessible - * - * Don't use it. - */ - @Deprecated public DatasetType() { } From 867f548cea81fb765855a6d52f734fd510ffbcaa Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Aug 2024 14:37:53 -0400 Subject: [PATCH 28/45] support id or name for GET of dataset type #10517 --- doc/sphinx-guides/source/api/native-api.rst | 10 +++++----- .../edu/harvard/iq/dataverse/api/Datasets.java | 18 ++++++++++++++---- .../iq/dataverse/dataset/DatasetType.java | 2 ++ .../dataset/DatasetTypeServiceBean.java | 11 +++++++++++ .../iq/dataverse/api/DatasetTypesIT.java | 12 +++++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 4 ++-- 6 files changed, 43 insertions(+), 14 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index b38bc2d1b5f..429bcaa0022 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3047,23 +3047,23 @@ The fully expanded example above (without environment variables) looks like this .. _api-list-dataset-type: -Get Dataset Type by Name -^^^^^^^^^^^^^^^^^^^^^^^^ +Get Dataset Type +^^^^^^^^^^^^^^^^ -Show a single dataset type based on its name. +Show a dataset type by passing either its database id (e.g. "2") or its name (e.g. "software"). .. code-block:: bash export SERVER_URL=https://demo.dataverse.org export TYPE=software - curl $SERVER_URL/api/datasets/datasetTypes/byName/$TYPE" + curl $SERVER_URL/api/datasets/datasetTypes/$TYPE" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/datasets/datasetTypes/byName/software" + curl "https://demo.dataverse.org/api/datasets/datasetTypes/software" .. _api-add-dataset-type: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index adefd7bc8b7..89ef51eb2e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5092,13 +5092,23 @@ public Response getDatasetTypes() { } @GET - @Path("datasetTypes/byName/{name}") - public Response getDatasetTypes(@PathParam("name") String name) { - DatasetType datasetType = datasetTypeSvc.getByName(name); + @Path("datasetTypes/{idOrName}") + public Response getDatasetTypes(@PathParam("idOrName") String idOrName) { + DatasetType datasetType = null; + if (StringUtils.isNumeric(idOrName)) { + try { + long id = Long.parseLong(idOrName); + datasetType = datasetTypeSvc.getById(id); + } catch (NumberFormatException ex) { + return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName); + } + } else { + datasetType = datasetTypeSvc.getByName(idOrName); + } if (datasetType != null) { return ok(datasetType.toJson()); } else { - return error(NOT_FOUND, "Could not find a dataset type with name " + name); + return error(NOT_FOUND, "Could not find a dataset type with name " + idOrName); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java index f0977b0fa22..78bf232e1a6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java @@ -16,6 +16,8 @@ @NamedQueries({ @NamedQuery(name = "DatasetType.findAll", query = "SELECT d FROM DatasetType d"), + @NamedQuery(name = "DatasetType.findById", + query = "SELECT d FROM DatasetType d WHERE d.id=:id"), @NamedQuery(name = "DatasetType.findByName", query = "SELECT d FROM DatasetType d WHERE d.name=:name"), @NamedQuery(name = "DatasetType.deleteById", diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java index beaaa3e2578..dcb6822d524 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java @@ -30,6 +30,17 @@ public List listAll() { return em.createNamedQuery("DatasetType.findAll", DatasetType.class).getResultList(); } + public DatasetType getById(long id) { + try { + return em.createNamedQuery("DatasetType.findById", DatasetType.class) + .setParameter("id", id) + .getSingleResult(); + } catch (NoResultException noResultException) { + logger.log(Level.WARNING, "Couldn't find a dataset type with id " + id); + return null; + } + } + public DatasetType getByName(String name) { try { return em.createNamedQuery("DatasetType.findByName", DatasetType.class) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index ef24f0102b7..360cddf7ee2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -21,7 +21,7 @@ public class DatasetTypesIT { public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); - Response getSoftwareType = UtilIT.getDatasetTypeByName(DatasetType.DATASET_TYPE_SOFTWARE); + Response getSoftwareType = UtilIT.getDatasetType(DatasetType.DATASET_TYPE_SOFTWARE); getSoftwareType.prettyPrint(); String typeFound = JsonPath.from(getSoftwareType.getBody().asString()).getString("data.name"); @@ -175,7 +175,7 @@ public void testGetDatasetTypes() { @Test public void testGetDefaultDatasetType() { - Response getType = UtilIT.getDatasetTypeByName(DatasetType.DEFAULT_DATASET_TYPE); + Response getType = UtilIT.getDatasetType(DatasetType.DEFAULT_DATASET_TYPE); getType.prettyPrint(); getType.then().assertThat() .statusCode(OK.getStatusCode()) @@ -203,7 +203,13 @@ public void testAddAndDeleteDatasetType() { typeAdded.then().assertThat().statusCode(OK.getStatusCode()); - long doomed = JsonPath.from(typeAdded.getBody().asString()).getLong("data.id"); + Long doomed = JsonPath.from(typeAdded.getBody().asString()).getLong("data.id"); + + System.out.println("doomed: " + doomed); + Response getTypeById = UtilIT.getDatasetType(doomed.toString()); + getTypeById.prettyPrint(); + getTypeById.then().assertThat().statusCode(OK.getStatusCode()); + System.out.println("deleting type with id " + doomed); Response typeDeleted = UtilIT.deleteDatasetTypes(doomed, apiToken); typeDeleted.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e2aaefbcd1a..c24ff599d9c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4089,9 +4089,9 @@ public static Response getDatasetTypes() { return response; } - static Response getDatasetTypeByName(String name) { + static Response getDatasetType(String idOrName) { return given() - .get("/api/datasets/datasetTypes/byName/" + name); + .get("/api/datasets/datasetTypes/" + idOrName); } static Response addDatasetType(String jsonIn, String apiToken) { From 2fb4fa6dca1dda00d5250d218ac6f99a071dc5d2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Aug 2024 14:44:24 -0400 Subject: [PATCH 29/45] stop logging to actionlogrecord for dataset types #10517 --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 1 - .../iq/dataverse/dataset/DatasetTypeServiceBean.java | 7 ------- 2 files changed, 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 89ef51eb2e5..5bf75402a15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5147,7 +5147,6 @@ public Response addDatasetType(@Context ContainerRequestContext crc, String json DatasetType saved = datasetTypeSvc.save(datasetType); Long typeId = saved.getId(); String name = saved.getName(); - actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "addDatasetType").setInfo("Dataset type added with id " + typeId + " and name " + name + ".")); return ok(saved.toJson()); } catch (WrappedResponse ex) { return error(BAD_REQUEST, ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java index dcb6822d524..832182f2a4a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java @@ -1,9 +1,7 @@ package edu.harvard.iq.dataverse.dataset; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; -import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.api.AbstractApiBean; -import jakarta.ejb.EJB; import jakarta.ejb.Stateless; import jakarta.inject.Named; import jakarta.persistence.EntityManager; @@ -23,9 +21,6 @@ public class DatasetTypeServiceBean { @PersistenceContext EntityManager em; - @EJB - ActionLogServiceBean actionLogSvc; - public List listAll() { return em.createNamedQuery("DatasetType.findAll", DatasetType.class).getResultList(); } @@ -70,8 +65,6 @@ public DatasetType save(DatasetType datasetType) throws AbstractApiBean.WrappedR } public int deleteById(long id) throws AbstractApiBean.WrappedResponse { - actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete dataset type") - .setInfo(Long.toString(id))); try { return em.createNamedQuery("DatasetType.deleteById").setParameter("id", id).executeUpdate(); } catch (PersistenceException p) { From eb20155779656e8109c2bc5ac9e3662bb0a9e8ea Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Aug 2024 14:57:31 -0400 Subject: [PATCH 30/45] prevent default dataset type from being deleted #10517 --- .../harvard/iq/dataverse/api/Datasets.java | 9 ++++++++ .../iq/dataverse/api/DatasetTypesIT.java | 23 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 5bf75402a15..9799cb8eaf1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5178,6 +5178,15 @@ public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathPar throw new IllegalArgumentException("ID must be a number"); } + DatasetType datasetTypeToDelete = datasetTypeSvc.getById(idToDelete); + if (datasetTypeToDelete == null) { + return error(BAD_REQUEST, "Could not find dataset type with id " + idToDelete); + } + + if (DatasetType.DEFAULT_DATASET_TYPE.equals(datasetTypeToDelete.getName())) { + return error(Status.FORBIDDEN, "You cannot delete the default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE); + } + try { int numDeleted = datasetTypeSvc.deleteById(idToDelete); if (numDeleted == 1) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 360cddf7ee2..072e4878663 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -7,6 +7,7 @@ import jakarta.json.Json; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.CREATED; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static jakarta.ws.rs.core.Response.Status.OK; import java.util.UUID; import org.hamcrest.CoreMatchers; @@ -182,6 +183,28 @@ public void testGetDefaultDatasetType() { .body("data.name", equalTo(DatasetType.DEFAULT_DATASET_TYPE)); } + @Test + public void testDeleteDefaultDatasetType() { + Response getType = UtilIT.getDatasetType(DatasetType.DEFAULT_DATASET_TYPE); + getType.prettyPrint(); + getType.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.name", equalTo(DatasetType.DEFAULT_DATASET_TYPE)); + + Long doomed = JsonPath.from(getType.getBody().asString()).getLong("data.id"); + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteType = UtilIT.deleteDatasetTypes(doomed, apiToken); + deleteType.prettyPrint(); + deleteType.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()); + } + @Test public void testAddAndDeleteDatasetType() { Response createUser = UtilIT.createRandomUser(); From 1a834de0e7d715111fae310cb727712c7911a310 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Aug 2024 16:11:57 -0400 Subject: [PATCH 31/45] get rid of unneeded null checks #10517 These days the field is non-null able and we prevent deletion of the default dataset type. --- .../engine/command/impl/AbstractCreateDatasetCommand.java | 3 --- .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 4 +--- .../edu/harvard/iq/dataverse/search/SearchServiceBean.java | 4 +--- .../java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java | 3 --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 3 --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 5 +---- .../harvard/iq/dataverse/search/IndexServiceBeanTest.java | 4 ++++ 7 files changed, 7 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 0b39e31b890..7b7c5fd0e93 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -122,9 +122,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { handlePid(theDataset, ctxt); DatasetType defaultDatasetType = ctxt.datasetTypes().getByName(DatasetType.DEFAULT_DATASET_TYPE); - if (defaultDatasetType == null) { - throw new CommandException("Couldn't find default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE, this); - } DatasetType existingDatasetType = theDataset.getDatasetType(); logger.fine("existing dataset type: " + existingDatasetType); if (existingDatasetType != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index a54aa6e9504..fd769846490 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1004,9 +1004,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Tue, 13 Aug 2024 16:38:20 -0400 Subject: [PATCH 32/45] use proper JSON-LD for dataset type #10517 --- .../api/dataset-create-software.jsonld | 2 +- .../iq/dataverse/util/json/JSONLDUtil.java | 24 +++++-------------- .../iq/dataverse/util/json/JsonLDTerm.java | 2 ++ 3 files changed, 9 insertions(+), 19 deletions(-) diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld index e5c93f58c03..6f072967dc8 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld @@ -12,5 +12,5 @@ "https://dataverse.org/schema/citation/dsDescription": { "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." }, - "datasetType": "software" + "https://dataverse.org/schema/core#datasetType": "software" } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index c1ec6f2f0ca..380cef6aa9d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -98,26 +98,14 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, //Store the metadatalanguage if sent - the caller needs to check whether it is allowed (as with any GlobalID) ds.setMetadataLanguage(jsonld.getString(JsonLDTerm.schemaOrg("inLanguage").getUrl(),null)); - try (StringReader rdr = new StringReader(jsonLDBody)) { - try (JsonReader jsonReader = Json.createReader(rdr)) { - JsonObject jsonObject = jsonReader.readObject(); - String datasetTypeIn = jsonObject.getString("datasetType", null); - logger.fine("datasetTypeIn: " + datasetTypeIn); - DatasetType defaultDatasetType = datasetTypeSvc.getByName(DatasetType.DEFAULT_DATASET_TYPE); - if (datasetTypeIn == null) { - ds.setDatasetType(defaultDatasetType); - } else { - DatasetType datasetType = datasetTypeSvc.getByName(datasetTypeIn); - if (datasetType != null) { - ds.setDatasetType(datasetType); - } else { - throw new BadRequestException("Invalid dataset type: " + datasetTypeIn); - } - } - } + String datasetTypeIn = jsonld.getString(JsonLDTerm.datasetType.getUrl(), DatasetType.DEFAULT_DATASET_TYPE); + DatasetType datasetType = datasetTypeSvc.getByName(datasetTypeIn); + if (datasetType != null) { + ds.setDatasetType(datasetType); + } else { + throw new BadRequestException("Invalid dataset type: " + datasetTypeIn); } - dsv = updateDatasetVersionMDFromJsonLD(dsv, jsonld, metadataBlockSvc, datasetFieldSvc, append, migrating, licenseSvc); dsv.setDataset(ds); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java index 3193f762538..3166fa9dbfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java @@ -52,6 +52,8 @@ public class JsonLDTerm { public static JsonLDTerm fileCount = JsonLDTerm.DVCore("fileCount"); public static JsonLDTerm maxFileSize = JsonLDTerm.DVCore("maxFileSize"); + public static JsonLDTerm datasetType = JsonLDTerm.DVCore("datasetType"); + public JsonLDTerm(JsonLDNamespace namespace, String term) { this.namespace = namespace; this.term = term; From faace91b2042f22fd20b4554cede36c149040f6c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 22 Aug 2024 15:44:31 -0400 Subject: [PATCH 33/45] add copyField for datasetType #10517 --- conf/solr/schema.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index 274511e9c93..b323d0c74af 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -427,6 +427,7 @@ + From 260ac3acd8ce551d23fc16d679db08e1133c0a56 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 22 Aug 2024 15:48:33 -0400 Subject: [PATCH 34/45] make Solr schema.xml instructions more generic #10517 --- doc/release-notes/10517-datasetType.md | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 722b2f96321..2e3aff940c7 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -7,8 +7,4 @@ For details see -``` +Update your Solr schema.xml file to pick up the "datasetType" additions and do a full reindex. From 647121acbd950ed06b56ea24aeaee8673c9b489b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 22 Aug 2024 15:53:41 -0400 Subject: [PATCH 35/45] whoops, should have been removed as part of 200a45a #10517 --- doc/sphinx-guides/source/user/dataset-management.rst | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 02381000519..5f8934ea30e 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -803,17 +803,6 @@ Dataset types can be listed, added, or deleted via API. See :ref:`api-dataset-ty Development of the dataset types feature is ongoing. Please see https://github.com/IQSS/dataverse/issues/10489 for details. -.. _supported-dataset-types: - -Supported Dataset Types ------------------------ - -Out of the box, Dataverse is configured with three dataset types: - -- dataset (default) -- software -- workflow - .. |image1| image:: ./img/DatasetDiagram.png :class: img-responsive .. |image3| image:: ./img/data_publishing_version_workflow.png From d865521af66b11a06bc91f89f195bb0864891f48 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 22 Aug 2024 15:56:42 -0400 Subject: [PATCH 36/45] reformat to replace tabs with spaces, etc #10517 Netbeans was used to reformat the code. "IQSS has standardized on Netbeans" is less true than it once was but we still say that here: https://guides.dataverse.org/en/6.3/developers/coding-style.html#format-code-you-changed-with-netbeans --- .../iq/dataverse/search/SolrSearchResult.java | 2421 +++++++++-------- 1 file changed, 1226 insertions(+), 1195 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index 07b44cef3bf..01cbf7c1055 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -26,137 +26,143 @@ import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; public class SolrSearchResult { - // TODO: remove all tabs from this file - private static final Logger logger = Logger.getLogger(SolrSearchResult.class.getCanonicalName()); - - private String id; - private Long entityId; - private DvObject entity; - private String identifier; - private String type; - private String htmlUrl; - private String persistentUrl; - private String downloadUrl; - private String apiUrl; - /** - * This is called "imageUrl" because it used to really be a URL. While performance improvements were being made in the 4.2 timeframe, we started - * putting base64 representations of images in this String instead, which broke the Search API and probably things built on top of it such as MyData. - * See "`image_url` from Search API results no longer yields a downloadable image" at https://github.com/IQSS/dataverse/issues/3616 - */ - private String imageUrl; - private DatasetThumbnail datasetThumbnail; - private String query; - private String name; - private String nameSort; - private String status; - private Date releaseOrCreateDate; - private String dateToDisplayOnCard; - private List publicationStatuses = new ArrayList<>(); - - /** - * @todo: how important is it to differentiate between name and title? - */ - private String title; - private String descriptionNoSnippet; - private List datasetAuthors = new ArrayList<>(); - private String deaccessionReason; - private List highlightsAsList = new ArrayList<>(); - private Map highlightsMap; - private Map highlightsAsMap; - - // parent can be dataverse or dataset, store the name and id - /** - * The "identifier" of a file's parent (a dataset) is a globalId (often a doi). - */ - public static String PARENT_IDENTIFIER = "identifier"; - private Map parent; - private String dataverseAffiliation; - private String citation; - private String citationHtml; - private String datasetType; - /** - * Files and datasets might have a UNF. Dataverses don't. - */ - private String unf; - private String filetype; - private String fileContentType; - private Long fileSizeInBytes; - /** - * fileMD5 is here for legacy and backward-compatibility reasons. It might be deprecated some day in favor of "fileChecksumType" and - * "fileChecksumValue" - */ - private String fileMd5; - private DataFile.ChecksumType fileChecksumType; - private String fileChecksumValue; - private String dataverseAlias; - private String dataverseParentAlias; + + private static final Logger logger = Logger.getLogger(SolrSearchResult.class.getCanonicalName()); + + private String id; + private Long entityId; + private DvObject entity; + private String identifier; + private String type; + private String htmlUrl; + private String persistentUrl; + private String downloadUrl; + private String apiUrl; + /** + * This is called "imageUrl" because it used to really be a URL. While + * performance improvements were being made in the 4.2 timeframe, we started + * putting base64 representations of images in this String instead, which + * broke the Search API and probably things built on top of it such as + * MyData. See "`image_url` from Search API results no longer yields a + * downloadable image" at https://github.com/IQSS/dataverse/issues/3616 + */ + private String imageUrl; + private DatasetThumbnail datasetThumbnail; + private String query; + private String name; + private String nameSort; + private String status; + private Date releaseOrCreateDate; + private String dateToDisplayOnCard; + private List publicationStatuses = new ArrayList<>(); + + /** + * @todo: how important is it to differentiate between name and title? + */ + private String title; + private String descriptionNoSnippet; + private List datasetAuthors = new ArrayList<>(); + private String deaccessionReason; + private List highlightsAsList = new ArrayList<>(); + private Map highlightsMap; + private Map highlightsAsMap; + + // parent can be dataverse or dataset, store the name and id + /** + * The "identifier" of a file's parent (a dataset) is a globalId (often a + * doi). + */ + public static String PARENT_IDENTIFIER = "identifier"; + private Map parent; + private String dataverseAffiliation; + private String citation; + private String citationHtml; + private String datasetType; + /** + * Files and datasets might have a UNF. Dataverses don't. + */ + private String unf; + private String filetype; + private String fileContentType; + private Long fileSizeInBytes; + /** + * fileMD5 is here for legacy and backward-compatibility reasons. It might + * be deprecated some day in favor of "fileChecksumType" and + * "fileChecksumValue" + */ + private String fileMd5; + private DataFile.ChecksumType fileChecksumType; + private String fileChecksumValue; + private String dataverseAlias; + private String dataverseParentAlias; // private boolean statePublished; - /** - * @todo Investigate/remove this "unpublishedState" variable. For files that have been published along with a dataset it says "true", which makes no - * sense. - */ - private boolean publishedState = false; - private boolean unpublishedState = false; - private boolean draftState = false; - private boolean inReviewState = false; - private boolean deaccessionedState = false; - private long datasetVersionId; - private String versionNumberFriendly; - // Determine if the search result is owned by any of the dvs in the tree of the DV displayed - private boolean isInTree; - private float score; - private List userRole; - private boolean harvested = false; - private String dvTree; - private String harvestingDescription = null; - private List fileCategories = null; - private List tabularDataTags = null; - - private String identifierOfDataverse = null; - private String nameOfDataverse = null; - - private String filePersistentId = null; - - private Long embargoEndDate; - - private Long retentionEndDate; - - private boolean datasetValid; - - public String getDvTree() { - return dvTree; - } - - public void setDvTree(String dvTree) { - this.dvTree = dvTree; - } - - public boolean isIsInTree() { - return isInTree; - } - - public void setIsInTree(boolean isInTree) { - this.isInTree = isInTree; - } - - public boolean isHarvested() { - return harvested; - } - - public void setHarvested(boolean harvested) { - this.harvested = harvested; - } - - public String getHarvestingDescription() { - // if (this.isHarvested()) { - return harvestingDescription; - // } - // return null; - } - - public void setHarvestingDescription(String harvestingDescription) { - this.harvestingDescription = harvestingDescription; - } + /** + * @todo Investigate/remove this "unpublishedState" variable. For files that + * have been published along with a dataset it says "true", which makes no + * sense. + */ + private boolean publishedState = false; + private boolean unpublishedState = false; + private boolean draftState = false; + private boolean inReviewState = false; + private boolean deaccessionedState = false; + private long datasetVersionId; + private String versionNumberFriendly; + // Determine if the search result is owned by any of the dvs in the tree of the DV displayed + private boolean isInTree; + private float score; + private List userRole; + private boolean harvested = false; + private String dvTree; + private String harvestingDescription = null; + private List fileCategories = null; + private List tabularDataTags = null; + + private String identifierOfDataverse = null; + private String nameOfDataverse = null; + + private String filePersistentId = null; + + private Long embargoEndDate; + + private Long retentionEndDate; + + private boolean datasetValid; + + public String getDvTree() { + return dvTree; + } + + public void setDvTree(String dvTree) { + this.dvTree = dvTree; + } + + public boolean isIsInTree() { + return isInTree; + } + + public void setIsInTree(boolean isInTree) { + this.isInTree = isInTree; + } + + public boolean isHarvested() { + return harvested; + } + + public void setHarvested(boolean harvested) { + this.harvested = harvested; + } + + public String getHarvestingDescription() { + // if (this.isHarvested()) { + return harvestingDescription; + // } + // return null; + } + + public void setHarvestingDescription(String harvestingDescription) { + this.harvestingDescription = harvestingDescription; + } // public boolean isStatePublished() { // return statePublished; // } @@ -164,794 +170,817 @@ public void setHarvestingDescription(String harvestingDescription) { // this.statePublished = statePublished; // } - public boolean isPublishedState() { - return publishedState; - } - - public void setPublishedState(boolean publishedState) { - this.publishedState = publishedState; - } - - public boolean isUnpublishedState() { - return unpublishedState; - } - - public void setUnpublishedState(boolean unpublishedState) { - this.unpublishedState = unpublishedState; - } - - public void setPublicationStatuses(List statuses) { - - if (statuses == null) { - this.publicationStatuses = new ArrayList<>(); - return; - } - this.publicationStatuses = statuses; - - // set booleans for individual statuses - // - for (String status : this.publicationStatuses) { - - if (status.equals(IndexServiceBean.getUNPUBLISHED_STRING())) { - this.setUnpublishedState(true); - - } else if (status.equals(IndexServiceBean.getPUBLISHED_STRING())) { - this.setPublishedState(true); - - } else if (status.equals(IndexServiceBean.getDRAFT_STRING())) { - this.setDraftState(true); - - } else if (status.equals(IndexServiceBean.getIN_REVIEW_STRING())) { - this.setInReviewState(true); - - } else if (status.equals(IndexServiceBean.getDEACCESSIONED_STRING())) { - this.setDeaccessionedState(true); - } - } - } // setPublicationStatuses - - /** - * Never return null, return an empty list instead - * - * @return - */ - public List getPublicationStatuses() { - - if (this.publicationStatuses == null) { - this.publicationStatuses = new ArrayList<>(); - } - return this.publicationStatuses; - } - - public JsonArrayBuilder getPublicationStatusesAsJSON() { - - JsonArrayBuilder statuses = Json.createArrayBuilder(); - for (String status : this.getPublicationStatuses()) { - statuses.add(status); - } - return statuses; - } - - public boolean isDraftState() { - return draftState; - } - - public void setDraftState(boolean draftState) { - this.draftState = draftState; - } - - public boolean isInReviewState() { - return inReviewState; - } - - public void setInReviewState(boolean inReviewState) { - this.inReviewState = inReviewState; - } - - public boolean isDeaccessionedState() { - return deaccessionedState; - } - - public void setDeaccessionedState(boolean deaccessionedState) { - this.deaccessionedState = deaccessionedState; - } - - /** - * @todo: used? remove - */ - private List matchedFields; - - // External Status Label (enabled via AllowedCurationLabels setting) - private String externalStatus; - - /** - * @todo: remove name? - */ - SolrSearchResult(String queryFromUser, String name) { - this.query = queryFromUser; + public boolean isPublishedState() { + return publishedState; + } + + public void setPublishedState(boolean publishedState) { + this.publishedState = publishedState; + } + + public boolean isUnpublishedState() { + return unpublishedState; + } + + public void setUnpublishedState(boolean unpublishedState) { + this.unpublishedState = unpublishedState; + } + + public void setPublicationStatuses(List statuses) { + + if (statuses == null) { + this.publicationStatuses = new ArrayList<>(); + return; + } + this.publicationStatuses = statuses; + + // set booleans for individual statuses + // + for (String status : this.publicationStatuses) { + + if (status.equals(IndexServiceBean.getUNPUBLISHED_STRING())) { + this.setUnpublishedState(true); + + } else if (status.equals(IndexServiceBean.getPUBLISHED_STRING())) { + this.setPublishedState(true); + + } else if (status.equals(IndexServiceBean.getDRAFT_STRING())) { + this.setDraftState(true); + + } else if (status.equals(IndexServiceBean.getIN_REVIEW_STRING())) { + this.setInReviewState(true); + + } else if (status.equals(IndexServiceBean.getDEACCESSIONED_STRING())) { + this.setDeaccessionedState(true); + } + } + } // setPublicationStatuses + + /** + * Never return null, return an empty list instead + * + * @return + */ + public List getPublicationStatuses() { + + if (this.publicationStatuses == null) { + this.publicationStatuses = new ArrayList<>(); + } + return this.publicationStatuses; + } + + public JsonArrayBuilder getPublicationStatusesAsJSON() { + + JsonArrayBuilder statuses = Json.createArrayBuilder(); + for (String status : this.getPublicationStatuses()) { + statuses.add(status); + } + return statuses; + } + + public boolean isDraftState() { + return draftState; + } + + public void setDraftState(boolean draftState) { + this.draftState = draftState; + } + + public boolean isInReviewState() { + return inReviewState; + } + + public void setInReviewState(boolean inReviewState) { + this.inReviewState = inReviewState; + } + + public boolean isDeaccessionedState() { + return deaccessionedState; + } + + public void setDeaccessionedState(boolean deaccessionedState) { + this.deaccessionedState = deaccessionedState; + } + + /** + * @todo: used? remove + */ + private List matchedFields; + + // External Status Label (enabled via AllowedCurationLabels setting) + private String externalStatus; + + /** + * @todo: remove name? + */ + SolrSearchResult(String queryFromUser, String name) { + this.query = queryFromUser; // this.name = name; - } - - public Map getHighlightsAsMap() { - return highlightsAsMap; - } - - public void setHighlightsAsMap(Map highlightsAsMap) { - this.highlightsAsMap = highlightsAsMap; - } - - public String getNameHighlightSnippet() { - Highlight highlight = highlightsAsMap.get(SearchFields.NAME); - if (highlight != null) { - String firstSnippet = highlight.getSnippets().get(0); - if (firstSnippet != null) { - return firstSnippet; - } - } - return null; - } - - public String getDataverseAffiliationHighlightSnippet() { - Highlight highlight = highlightsAsMap.get(SearchFields.AFFILIATION); - if (highlight != null) { - String firstSnippet = highlight.getSnippets().get(0); - if (firstSnippet != null) { - return firstSnippet; - } - } - return null; - } - - public String getFileTypeHighlightSnippet() { - Highlight highlight = highlightsAsMap.get(SearchFields.FILE_TYPE_FRIENDLY); - if (highlight != null) { - String firstSnippet = highlight.getSnippets().get(0); - if (firstSnippet != null) { - return firstSnippet; - } - } - return null; - } - - public String getTitleHighlightSnippet() { - /** - * @todo: don't hard-code title, look it up properly... or start indexing titles as names: https://redmine.hmdc.harvard.edu/issues/3798#note-2 - */ - Highlight highlight = highlightsAsMap.get("title"); - if (highlight != null) { - String firstSnippet = highlight.getSnippets().get(0); - if (firstSnippet != null) { - return firstSnippet; - } - } - return null; - } - - public List getDescriptionSnippets() { - for (Map.Entry entry : highlightsMap.entrySet()) { - SolrField solrField = entry.getKey(); - Highlight highlight = entry.getValue(); - logger.fine("SolrSearchResult class: " + solrField.getNameSearchable() + ":" + highlight.getSnippets()); - } - - Highlight highlight = highlightsAsMap.get(SearchFields.DESCRIPTION); - if (type.equals("datasets")) { - highlight = highlightsAsMap.get(SearchFields.DATASET_DESCRIPTION); - } - if (highlight != null) { - return highlight.getSnippets(); - } else { - return new ArrayList<>(); - } - } - - public Map getHighlightsMap() { - return highlightsMap; - } - - public void setHighlightsMap(Map highlightsMap) { - this.highlightsMap = highlightsMap; - } - - public List getMatchedFields() { - return matchedFields; - } - - public void setMatchedFields(List matchedFields) { - this.matchedFields = matchedFields; - } - - @Override - public String toString() { - if (this.name != null) { - return this.id + ":" + this.name + ":" + this.entityId; - } else { - return this.id + ":" + this.title + ":" + this.entityId; - } - } - - public JsonArrayBuilder getRelevance() { - JsonArrayBuilder matchedFieldsArray = Json.createArrayBuilder(); - JsonObjectBuilder matchedFieldObject = Json.createObjectBuilder(); - for (Map.Entry entry : highlightsMap.entrySet()) { - SolrField solrField = entry.getKey(); - Highlight snippets = entry.getValue(); - JsonArrayBuilder snippetArrayBuilder = Json.createArrayBuilder(); - JsonObjectBuilder matchedFieldDetails = Json.createObjectBuilder(); - for (String highlight : snippets.getSnippets()) { - snippetArrayBuilder.add(highlight); - } - /** - * @todo for the Search API, it might be nice to return offset numbers rather than html snippets surrounded by span tags or whatever. - * - * That's what the GitHub Search API does: "Requests can opt to receive those text fragments in the response, and every fragment is accompanied - * by numeric offsets identifying the exact location of each matching search term." https://developer.github.com/v3/search/#text-match-metadata - * - * It's not clear if getting the offset values is possible with Solr, however: - * stackoverflow.com/questions/13863118/can-solr-highlighting-also-indicate-the-position-or-offset-of-the-returned-fragments-within-the-original-field - */ - matchedFieldDetails.add("snippets", snippetArrayBuilder); - /** - * @todo In addition to the name of the field used by Solr , it would be nice to show the "friendly" name of the field we show in the GUI. - */ + } + + public Map getHighlightsAsMap() { + return highlightsAsMap; + } + + public void setHighlightsAsMap(Map highlightsAsMap) { + this.highlightsAsMap = highlightsAsMap; + } + + public String getNameHighlightSnippet() { + Highlight highlight = highlightsAsMap.get(SearchFields.NAME); + if (highlight != null) { + String firstSnippet = highlight.getSnippets().get(0); + if (firstSnippet != null) { + return firstSnippet; + } + } + return null; + } + + public String getDataverseAffiliationHighlightSnippet() { + Highlight highlight = highlightsAsMap.get(SearchFields.AFFILIATION); + if (highlight != null) { + String firstSnippet = highlight.getSnippets().get(0); + if (firstSnippet != null) { + return firstSnippet; + } + } + return null; + } + + public String getFileTypeHighlightSnippet() { + Highlight highlight = highlightsAsMap.get(SearchFields.FILE_TYPE_FRIENDLY); + if (highlight != null) { + String firstSnippet = highlight.getSnippets().get(0); + if (firstSnippet != null) { + return firstSnippet; + } + } + return null; + } + + public String getTitleHighlightSnippet() { + /** + * @todo: don't hard-code title, look it up properly... or start + * indexing titles as names: + * https://redmine.hmdc.harvard.edu/issues/3798#note-2 + */ + Highlight highlight = highlightsAsMap.get("title"); + if (highlight != null) { + String firstSnippet = highlight.getSnippets().get(0); + if (firstSnippet != null) { + return firstSnippet; + } + } + return null; + } + + public List getDescriptionSnippets() { + for (Map.Entry entry : highlightsMap.entrySet()) { + SolrField solrField = entry.getKey(); + Highlight highlight = entry.getValue(); + logger.fine("SolrSearchResult class: " + solrField.getNameSearchable() + ":" + highlight.getSnippets()); + } + + Highlight highlight = highlightsAsMap.get(SearchFields.DESCRIPTION); + if (type.equals("datasets")) { + highlight = highlightsAsMap.get(SearchFields.DATASET_DESCRIPTION); + } + if (highlight != null) { + return highlight.getSnippets(); + } else { + return new ArrayList<>(); + } + } + + public Map getHighlightsMap() { + return highlightsMap; + } + + public void setHighlightsMap(Map highlightsMap) { + this.highlightsMap = highlightsMap; + } + + public List getMatchedFields() { + return matchedFields; + } + + public void setMatchedFields(List matchedFields) { + this.matchedFields = matchedFields; + } + + @Override + public String toString() { + if (this.name != null) { + return this.id + ":" + this.name + ":" + this.entityId; + } else { + return this.id + ":" + this.title + ":" + this.entityId; + } + } + + public JsonArrayBuilder getRelevance() { + JsonArrayBuilder matchedFieldsArray = Json.createArrayBuilder(); + JsonObjectBuilder matchedFieldObject = Json.createObjectBuilder(); + for (Map.Entry entry : highlightsMap.entrySet()) { + SolrField solrField = entry.getKey(); + Highlight snippets = entry.getValue(); + JsonArrayBuilder snippetArrayBuilder = Json.createArrayBuilder(); + JsonObjectBuilder matchedFieldDetails = Json.createObjectBuilder(); + for (String highlight : snippets.getSnippets()) { + snippetArrayBuilder.add(highlight); + } + /** + * @todo for the Search API, it might be nice to return offset + * numbers rather than html snippets surrounded by span tags or + * whatever. + * + * That's what the GitHub Search API does: "Requests can opt to + * receive those text fragments in the response, and every fragment + * is accompanied by numeric offsets identifying the exact location + * of each matching search term." + * https://developer.github.com/v3/search/#text-match-metadata + * + * It's not clear if getting the offset values is possible with + * Solr, however: + * stackoverflow.com/questions/13863118/can-solr-highlighting-also-indicate-the-position-or-offset-of-the-returned-fragments-within-the-original-field + */ + matchedFieldDetails.add("snippets", snippetArrayBuilder); + /** + * @todo In addition to the name of the field used by Solr , it + * would be nice to show the "friendly" name of the field we show in + * the GUI. + */ // matchedFieldDetails.add("friendly", "FIXME"); - matchedFieldObject.add(solrField.getNameSearchable(), matchedFieldDetails); - matchedFieldsArray.add(matchedFieldObject); - } - return matchedFieldsArray; - } - - /** - * Add additional fields for the MyData page - * - * @return - */ - public JsonObjectBuilder getJsonForMyData(boolean isValid) { - - JsonObjectBuilder myDataJson = json(true, true, true);// boolean showRelevance, boolean showEntityIds, boolean showApiUrls) - - myDataJson.add("publication_statuses", this.getPublicationStatusesAsJSON()) - .add("is_draft_state", this.isDraftState()).add("is_in_review_state", this.isInReviewState()) - .add("is_unpublished_state", this.isUnpublishedState()).add("is_published", this.isPublishedState()) - .add("is_deaccesioned", this.isDeaccessionedState()) - .add("is_valid", isValid) - .add("date_to_display_on_card", getDateToDisplayOnCard()); - - // Add is_deaccessioned attribute, even though MyData currently screens any deaccessioned info out - // - if ((this.isDeaccessionedState()) && (this.getPublicationStatuses().size() == 1)) { - myDataJson.add("deaccesioned_is_only_pubstatus", true); - } - - if ((this.getParent() != null) && (!this.getParent().isEmpty())) { - // System.out.println("keys:" + parent.keySet().toString()); - if (this.entity != null && this.entity.isInstanceofDataFile()) { - myDataJson.add("parentIdentifier", this.getParent().get(SolrSearchResult.PARENT_IDENTIFIER)) - .add("parentName", this.getParent().get("name")); - - } else { - // for Dataverse and Dataset, get parent which is a Dataverse - myDataJson.add("parentId", this.getParent().get("id")).add("parentName", this.getParent().get("name")); - } - } - - return myDataJson; - } // getJsonForMydata - - public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { - return json(showRelevance, showEntityIds, showApiUrls, null, null); - } - - public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) { - if (this.type == null) { - return jsonObjectBuilder(); - } - - String displayName = null; - - String identifierLabel = null; - String datasetCitation = null; - String datasetName = null; - String datasetId = null; - String datasetPersistentId = null; - String filePersistentId = null; - String preferredUrl = null; - String apiUrl = null; - String publisherName = null; - - if (this.type.equals(SearchConstants.DATAVERSES)) { - displayName = this.name; - identifierLabel = "identifier"; - preferredUrl = getHtmlUrl(); - } else if (this.type.equals(SearchConstants.DATASETS)) { - displayName = this.title; - identifierLabel = "global_id"; - preferredUrl = getPersistentUrl(); - publisherName = this.parent.get("name"); - // if - /** - * @todo Should we show the name of the parent dataverse? - */ - } else if (this.type.equals(SearchConstants.FILES)) { - displayName = this.name; - identifierLabel = "file_id"; - preferredUrl = getDownloadUrl(); - /** - * @todo show more information for a file's parent, such as the title of the dataset it belongs to. - */ - datasetCitation = parent.get("citation"); - datasetName = parent.get("name"); - datasetId = parent.get("id"); - datasetPersistentId = parent.get(SolrSearchResult.PARENT_IDENTIFIER); - } - - // displayName = null; // testing NullSafeJsonBuilder - // because we are using NullSafeJsonBuilder key/value pairs will be dropped if the value is null - NullSafeJsonBuilder nullSafeJsonBuilder = jsonObjectBuilder().add("name", displayName) - .add("type", getDisplayType(getType())).add("url", preferredUrl).add("image_url", getImageUrl()) - // .add("persistent_url", this.persistentUrl) - // .add("download_url", this.downloadUrl) - /** - * @todo How much value is there in exposing the identifier for dataverses? For - */ - .add(identifierLabel, this.identifier) - /** - * @todo Get dataset description from dsDescriptionValue. Also, is descriptionNoSnippet the right field to use generally? - * - * @todo What about the fact that datasets can now have multiple descriptions? Should we create an array called "additional_descriptions" that gets - * populated if there is more than one dataset description? - * - * @todo Why aren't file descriptions ever null? They always have an empty string at least. - */ - .add("description", this.descriptionNoSnippet) - /** - * @todo In the future we'd like to support non-public datasets per https://github.com/IQSS/dataverse/issues/1299 but for now we are only supporting - * non-public searches. - */ - .add("published_at", getDateTimePublished()) - /** - * @todo Expose MIME Type: https://github.com/IQSS/dataverse/issues/1595 - */ - .add("file_type", this.filetype).add("file_content_type", this.fileContentType) - .add("size_in_bytes", getFileSizeInBytes()) - /** - * "md5" was the only possible value so it's hard-coded here but we might want to deprecate it someday since we now put the MD5 or SHA-1 in - * "checksum". - */ - .add("md5", getFileMd5()) - .add("checksum", JsonPrinter.getChecksumTypeAndValue(getFileChecksumType(), getFileChecksumValue())) - .add("unf", getUnf()).add("file_persistent_id", this.filePersistentId).add("dataset_name", datasetName) - .add("dataset_id", datasetId).add("publisher", publisherName) - .add("dataset_persistent_id", datasetPersistentId).add("dataset_citation", datasetCitation) - .add("deaccession_reason", this.deaccessionReason).add("citationHtml", this.citationHtml) - .add("identifier_of_dataverse", this.identifierOfDataverse) - .add("name_of_dataverse", this.nameOfDataverse).add("citation", this.citation); - // Now that nullSafeJsonBuilder has been instatiated, check for null before adding to it! - if (showRelevance) { - nullSafeJsonBuilder.add("matches", getRelevance()); - nullSafeJsonBuilder.add("score", getScore()); - } - if (showEntityIds) { - if (this.entityId != null) { - nullSafeJsonBuilder.add("entity_id", this.entityId); - } - } - if (!getPublicationStatuses().isEmpty()) { - nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON()); - } - - if (this.entity == null) { - - } else { - if (this.entity.isInstanceofDataset()) { - nullSafeJsonBuilder.add("storageIdentifier", this.entity.getStorageIdentifier()); - Dataset ds = (Dataset) this.entity; - DatasetVersion dv = ds.getVersionFromId(this.datasetVersionId); - - if (!dv.getKeywords().isEmpty()) { - JsonArrayBuilder keyWords = Json.createArrayBuilder(); - for (String keyword : dv.getKeywords()) { - keyWords.add(keyword); - } - nullSafeJsonBuilder.add("keywords", keyWords); - } - - JsonArrayBuilder subjects = Json.createArrayBuilder(); - for (String subject : dv.getDatasetSubjects()) { - subjects.add(subject); - } - nullSafeJsonBuilder.add("subjects", subjects); - nullSafeJsonBuilder.add("fileCount", datasetFileCount); - nullSafeJsonBuilder.add("versionId", dv.getId()); - nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString()); - if (this.isPublishedState()) { - nullSafeJsonBuilder.add("majorVersion", dv.getVersionNumber()); - nullSafeJsonBuilder.add("minorVersion", dv.getMinorVersionNumber()); - } - - nullSafeJsonBuilder.add("createdAt", ds.getCreateDate()); - nullSafeJsonBuilder.add("updatedAt", ds.getModificationTime()); - - if (!dv.getDatasetContacts().isEmpty()) { - JsonArrayBuilder contacts = Json.createArrayBuilder(); - NullSafeJsonBuilder nullSafeJsonBuilderInner = jsonObjectBuilder(); - for (String contact[] : dv.getDatasetContacts(false)) { - nullSafeJsonBuilderInner.add("name", contact[0]); - nullSafeJsonBuilderInner.add("affiliation", contact[1]); - contacts.add(nullSafeJsonBuilderInner); - } - nullSafeJsonBuilder.add("contacts", contacts); - } - if (!dv.getRelatedPublications().isEmpty()) { - JsonArrayBuilder relPub = Json.createArrayBuilder(); - NullSafeJsonBuilder inner = jsonObjectBuilder(); - for (DatasetRelPublication dsRelPub : dv.getRelatedPublications()) { - inner.add("title", dsRelPub.getTitle()); - inner.add("citation", dsRelPub.getText()); - inner.add("url", dsRelPub.getUrl()); - relPub.add(inner); - } - nullSafeJsonBuilder.add("publications", relPub); - } - - if (!dv.getDatasetProducers().isEmpty()) { - JsonArrayBuilder producers = Json.createArrayBuilder(); - for (String[] producer : dv.getDatasetProducers()) { - producers.add(producer[0]); - } - nullSafeJsonBuilder.add("producers", producers); - } - if (!dv.getRelatedMaterial().isEmpty()) { - JsonArrayBuilder relatedMaterials = Json.createArrayBuilder(); - for (String relatedMaterial : dv.getRelatedMaterial()) { - relatedMaterials.add(relatedMaterial); - } - nullSafeJsonBuilder.add("relatedMaterial", relatedMaterials); - } - - if (!dv.getGeographicCoverage().isEmpty()) { - JsonArrayBuilder geoCov = Json.createArrayBuilder(); - NullSafeJsonBuilder inner = jsonObjectBuilder(); - for (String ind[] : dv.getGeographicCoverage()) { - inner.add("country", ind[0]); - inner.add("state", ind[1]); - inner.add("city", ind[2]); - inner.add("other", ind[3]); - geoCov.add(inner); - } - nullSafeJsonBuilder.add("geographicCoverage", geoCov); - } - if (!dv.getDataSource().isEmpty()) { - JsonArrayBuilder dataSources = Json.createArrayBuilder(); - for (String dsource : dv.getDataSource()) { - dataSources.add(dsource); - } - nullSafeJsonBuilder.add("dataSources", dataSources); - } - - if (CollectionUtils.isNotEmpty(metadataFields)) { - // create metadata fields map names - Map> metadataFieldMapNames = computeRequestedMetadataFieldMapNames( - metadataFields); - - // add metadatafields objet to wrap all requeested fields - NullSafeJsonBuilder metadataFieldBuilder = jsonObjectBuilder(); - - Map> groupedFields = DatasetField - .groupByBlock(dv.getFlatDatasetFields()); - json(metadataFieldMapNames, groupedFields, metadataFieldBuilder); - - nullSafeJsonBuilder.add("metadataBlocks", metadataFieldBuilder); - } - } - } - - if (showApiUrls) { - /** - * @todo We should probably have a metadata_url or api_url concept enabled by default, not hidden behind an undocumented boolean. For datasets, this - * would be http://example.com/api/datasets/10 or whatever (to get more detailed JSON), but right now this requires an API token. Discuss at - * https://docs.google.com/document/d/1d8sT2GLSavgiAuMTVX8KzTCX0lROEET1edhvHHRDZOs/edit?usp=sharing"; - */ - if (getApiUrl() != null) { - nullSafeJsonBuilder.add("api_url", getApiUrl()); - } - } - // NullSafeJsonBuilder is awesome but can't build null safe arrays. :( - if (!datasetAuthors.isEmpty()) { - JsonArrayBuilder authors = Json.createArrayBuilder(); - for (String datasetAuthor : datasetAuthors) { - authors.add(datasetAuthor); - } - nullSafeJsonBuilder.add("authors", authors); - } - return nullSafeJsonBuilder; - } - - private void json(Map> metadataFieldMapNames, - Map> groupedFields, NullSafeJsonBuilder metadataFieldBuilder) { - for (Map.Entry> metadataFieldNamesEntry : metadataFieldMapNames.entrySet()) { - String metadataBlockName = metadataFieldNamesEntry.getKey(); - List metadataBlockFieldNames = metadataFieldNamesEntry.getValue(); - for (MetadataBlock metadataBlock : groupedFields.keySet()) { - if (metadataBlockName.equals(metadataBlock.getName())) { - // create metadataBlock object - NullSafeJsonBuilder metadataBlockBuilder = jsonObjectBuilder(); - metadataBlockBuilder.add("displayName", metadataBlock.getDisplayName()); - JsonArrayBuilder fieldsArray = Json.createArrayBuilder(); - - List datasetFields = groupedFields.get(metadataBlock); - for (DatasetField datasetField : datasetFields) { - if (metadataBlockFieldNames.contains("*") - || metadataBlockFieldNames.contains(datasetField.getDatasetFieldType().getName())) { - if (datasetField.getDatasetFieldType().isCompound() || !datasetField.getDatasetFieldType().isHasParent()) { - JsonObject item = JsonPrinter.json(datasetField); - if (item != null) { - fieldsArray.add(item); - } - } - } - } - // with a fields to hold all requested properties - metadataBlockBuilder.add("fields", fieldsArray); - - metadataFieldBuilder.add(metadataBlock.getName(), metadataBlockBuilder); - } - } - } - } - - private Map> computeRequestedMetadataFieldMapNames(List metadataFields) { - Map> metadataFieldMapNames = new HashMap<>(); - for (String metadataField : metadataFields) { - String parts[] = metadataField.split(":"); - if (parts.length == 2) { - List metadataFieldNames = metadataFieldMapNames.get(parts[0]); - if (metadataFieldNames == null) { - metadataFieldNames = new ArrayList<>(); - metadataFieldMapNames.put(parts[0], metadataFieldNames); - } - metadataFieldNames.add(parts[1]); - } - } - return metadataFieldMapNames; - } - - private String getDateTimePublished() { - String datePublished = null; - if (draftState == false) { - datePublished = releaseOrCreateDate == null ? null : Util.getDateTimeFormat().format(releaseOrCreateDate); - } - return datePublished; - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public Long getEntityId() { - return entityId; - } - - public void setEntityId(Long entityId) { - this.entityId = entityId; - } - - public DvObject getEntity() { - return entity; - } - - public void setEntity(DvObject entity) { - this.entity = entity; - } - - public String getIdentifier() { - return identifier; - } - - public void setIdentifier(String identifier) { - this.identifier = identifier; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getHtmlUrl() { - return htmlUrl; - } - - public void setHtmlUrl(String htmlUrl) { - this.htmlUrl = htmlUrl; - } - - public String getPersistentUrl() { - return persistentUrl; - } - - public void setPersistentUrl(String persistentUrl) { - this.persistentUrl = persistentUrl; - } - - public String getDownloadUrl() { - return downloadUrl; - } - - public void setDownloadUrl(String downloadUrl) { - this.downloadUrl = downloadUrl; - } - - public String getApiUrl() { - return apiUrl; - } - - public void setApiUrl(String apiUrl) { - this.apiUrl = apiUrl; - } - - public String getImageUrl() { - return imageUrl; - } - - public void setImageUrl(String imageUrl) { - this.imageUrl = imageUrl; - } - - public DatasetThumbnail getDatasetThumbnail() { - return datasetThumbnail; - } - - public void setDatasetThumbnail(DatasetThumbnail datasetThumbnail) { - this.datasetThumbnail = datasetThumbnail; - } - - public String getQuery() { - return query; - } - - public void setQuery(String query) { - this.query = query; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - public String getDescriptionNoSnippet() { - return descriptionNoSnippet; - } - - public void setDescriptionNoSnippet(String descriptionNoSnippet) { - this.descriptionNoSnippet = descriptionNoSnippet; - } - - public List getDatasetAuthors() { - return datasetAuthors; - } - - public void setDatasetAuthors(List datasetAuthors) { - this.datasetAuthors = datasetAuthors; - } - - public String getDeaccessionReason() { - return deaccessionReason; - } - - public void setDeaccessionReason(String deaccessionReason) { - this.deaccessionReason = deaccessionReason; - } - - public List getHighlightsAsListOrig() { - return highlightsAsList; - } - - public List getHighlightsAsList() { - List filtered = new ArrayList<>(); - for (Highlight highlight : highlightsAsList) { - String field = highlight.getSolrField().getNameSearchable(); - /** - * @todo don't hard code "title" here. And should we collapse name and title together anyway? - */ - if (!field.equals(SearchFields.NAME) && !field.equals(SearchFields.DESCRIPTION) - && !field.equals(SearchFields.DATASET_DESCRIPTION) && !field.equals(SearchFields.AFFILIATION) - && !field.equals("title")) { - filtered.add(highlight); - } - } - return filtered; - } - - public void setHighlightsAsList(List highlightsAsList) { - this.highlightsAsList = highlightsAsList; - } - - public List getFileCategories() { - return fileCategories; - } - - public void setFileCategories(List fileCategories) { - this.fileCategories = fileCategories; - } - - public List getTabularDataTags() { - return tabularDataTags; - } - - public void setTabularDataTags(List tabularDataTags) { - this.tabularDataTags = tabularDataTags; - } - - public Map getParent() { - return parent; - } - - public Long getParentIdAsLong() { - - if (this.getParent() == null) { - return null; - } - if (!this.getParent().containsKey("id")) { - return null; - } - - String parentIdString = getParent().get("id"); - if (parentIdString == null) { - return null; - } - - try { - return Long.parseLong(parentIdString); - } catch (NumberFormatException ex) { - return null; - } - } - - public void setParent(Map parent) { - this.parent = parent; - } - - public String getDataverseAffiliation() { - return dataverseAffiliation; - } - - public void setDataverseAffiliation(String dataverseAffiliation) { - this.dataverseAffiliation = dataverseAffiliation; - } - - public String getCitation() { - return citation; - } - - public void setCitation(String citation) { - this.citation = citation; - } - - public String getCitationHtml() { - return citationHtml; - } - - public void setCitationHtml(String citationHtml) { - this.citationHtml = citationHtml; - } + matchedFieldObject.add(solrField.getNameSearchable(), matchedFieldDetails); + matchedFieldsArray.add(matchedFieldObject); + } + return matchedFieldsArray; + } + + /** + * Add additional fields for the MyData page + * + * @return + */ + public JsonObjectBuilder getJsonForMyData(boolean isValid) { + + JsonObjectBuilder myDataJson = json(true, true, true);// boolean showRelevance, boolean showEntityIds, boolean showApiUrls) + + myDataJson.add("publication_statuses", this.getPublicationStatusesAsJSON()) + .add("is_draft_state", this.isDraftState()).add("is_in_review_state", this.isInReviewState()) + .add("is_unpublished_state", this.isUnpublishedState()).add("is_published", this.isPublishedState()) + .add("is_deaccesioned", this.isDeaccessionedState()) + .add("is_valid", isValid) + .add("date_to_display_on_card", getDateToDisplayOnCard()); + + // Add is_deaccessioned attribute, even though MyData currently screens any deaccessioned info out + // + if ((this.isDeaccessionedState()) && (this.getPublicationStatuses().size() == 1)) { + myDataJson.add("deaccesioned_is_only_pubstatus", true); + } + + if ((this.getParent() != null) && (!this.getParent().isEmpty())) { + // System.out.println("keys:" + parent.keySet().toString()); + if (this.entity != null && this.entity.isInstanceofDataFile()) { + myDataJson.add("parentIdentifier", this.getParent().get(SolrSearchResult.PARENT_IDENTIFIER)) + .add("parentName", this.getParent().get("name")); + + } else { + // for Dataverse and Dataset, get parent which is a Dataverse + myDataJson.add("parentId", this.getParent().get("id")).add("parentName", this.getParent().get("name")); + } + } + + return myDataJson; + } // getJsonForMydata + + public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { + return json(showRelevance, showEntityIds, showApiUrls, null, null); + } + + public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) { + if (this.type == null) { + return jsonObjectBuilder(); + } + + String displayName = null; + + String identifierLabel = null; + String datasetCitation = null; + String datasetName = null; + String datasetId = null; + String datasetPersistentId = null; + String filePersistentId = null; + String preferredUrl = null; + String apiUrl = null; + String publisherName = null; + + if (this.type.equals(SearchConstants.DATAVERSES)) { + displayName = this.name; + identifierLabel = "identifier"; + preferredUrl = getHtmlUrl(); + } else if (this.type.equals(SearchConstants.DATASETS)) { + displayName = this.title; + identifierLabel = "global_id"; + preferredUrl = getPersistentUrl(); + publisherName = this.parent.get("name"); + // if + /** + * @todo Should we show the name of the parent dataverse? + */ + } else if (this.type.equals(SearchConstants.FILES)) { + displayName = this.name; + identifierLabel = "file_id"; + preferredUrl = getDownloadUrl(); + /** + * @todo show more information for a file's parent, such as the + * title of the dataset it belongs to. + */ + datasetCitation = parent.get("citation"); + datasetName = parent.get("name"); + datasetId = parent.get("id"); + datasetPersistentId = parent.get(SolrSearchResult.PARENT_IDENTIFIER); + } + + // displayName = null; // testing NullSafeJsonBuilder + // because we are using NullSafeJsonBuilder key/value pairs will be dropped if the value is null + NullSafeJsonBuilder nullSafeJsonBuilder = jsonObjectBuilder().add("name", displayName) + .add("type", getDisplayType(getType())).add("url", preferredUrl).add("image_url", getImageUrl()) + // .add("persistent_url", this.persistentUrl) + // .add("download_url", this.downloadUrl) + /** + * @todo How much value is there in exposing the identifier for + * dataverses? For + */ + .add(identifierLabel, this.identifier) + /** + * @todo Get dataset description from dsDescriptionValue. Also, + * is descriptionNoSnippet the right field to use generally? + * + * @todo What about the fact that datasets can now have multiple + * descriptions? Should we create an array called + * "additional_descriptions" that gets populated if there is + * more than one dataset description? + * + * @todo Why aren't file descriptions ever null? They always + * have an empty string at least. + */ + .add("description", this.descriptionNoSnippet) + /** + * @todo In the future we'd like to support non-public datasets + * per https://github.com/IQSS/dataverse/issues/1299 but for now + * we are only supporting non-public searches. + */ + .add("published_at", getDateTimePublished()) + /** + * @todo Expose MIME Type: + * https://github.com/IQSS/dataverse/issues/1595 + */ + .add("file_type", this.filetype).add("file_content_type", this.fileContentType) + .add("size_in_bytes", getFileSizeInBytes()) + /** + * "md5" was the only possible value so it's hard-coded here but + * we might want to deprecate it someday since we now put the + * MD5 or SHA-1 in "checksum". + */ + .add("md5", getFileMd5()) + .add("checksum", JsonPrinter.getChecksumTypeAndValue(getFileChecksumType(), getFileChecksumValue())) + .add("unf", getUnf()).add("file_persistent_id", this.filePersistentId).add("dataset_name", datasetName) + .add("dataset_id", datasetId).add("publisher", publisherName) + .add("dataset_persistent_id", datasetPersistentId).add("dataset_citation", datasetCitation) + .add("deaccession_reason", this.deaccessionReason).add("citationHtml", this.citationHtml) + .add("identifier_of_dataverse", this.identifierOfDataverse) + .add("name_of_dataverse", this.nameOfDataverse).add("citation", this.citation); + // Now that nullSafeJsonBuilder has been instatiated, check for null before adding to it! + if (showRelevance) { + nullSafeJsonBuilder.add("matches", getRelevance()); + nullSafeJsonBuilder.add("score", getScore()); + } + if (showEntityIds) { + if (this.entityId != null) { + nullSafeJsonBuilder.add("entity_id", this.entityId); + } + } + if (!getPublicationStatuses().isEmpty()) { + nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON()); + } + + if (this.entity == null) { + + } else { + if (this.entity.isInstanceofDataset()) { + nullSafeJsonBuilder.add("storageIdentifier", this.entity.getStorageIdentifier()); + Dataset ds = (Dataset) this.entity; + DatasetVersion dv = ds.getVersionFromId(this.datasetVersionId); + + if (!dv.getKeywords().isEmpty()) { + JsonArrayBuilder keyWords = Json.createArrayBuilder(); + for (String keyword : dv.getKeywords()) { + keyWords.add(keyword); + } + nullSafeJsonBuilder.add("keywords", keyWords); + } + + JsonArrayBuilder subjects = Json.createArrayBuilder(); + for (String subject : dv.getDatasetSubjects()) { + subjects.add(subject); + } + nullSafeJsonBuilder.add("subjects", subjects); + nullSafeJsonBuilder.add("fileCount", datasetFileCount); + nullSafeJsonBuilder.add("versionId", dv.getId()); + nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString()); + if (this.isPublishedState()) { + nullSafeJsonBuilder.add("majorVersion", dv.getVersionNumber()); + nullSafeJsonBuilder.add("minorVersion", dv.getMinorVersionNumber()); + } + + nullSafeJsonBuilder.add("createdAt", ds.getCreateDate()); + nullSafeJsonBuilder.add("updatedAt", ds.getModificationTime()); + + if (!dv.getDatasetContacts().isEmpty()) { + JsonArrayBuilder contacts = Json.createArrayBuilder(); + NullSafeJsonBuilder nullSafeJsonBuilderInner = jsonObjectBuilder(); + for (String contact[] : dv.getDatasetContacts(false)) { + nullSafeJsonBuilderInner.add("name", contact[0]); + nullSafeJsonBuilderInner.add("affiliation", contact[1]); + contacts.add(nullSafeJsonBuilderInner); + } + nullSafeJsonBuilder.add("contacts", contacts); + } + if (!dv.getRelatedPublications().isEmpty()) { + JsonArrayBuilder relPub = Json.createArrayBuilder(); + NullSafeJsonBuilder inner = jsonObjectBuilder(); + for (DatasetRelPublication dsRelPub : dv.getRelatedPublications()) { + inner.add("title", dsRelPub.getTitle()); + inner.add("citation", dsRelPub.getText()); + inner.add("url", dsRelPub.getUrl()); + relPub.add(inner); + } + nullSafeJsonBuilder.add("publications", relPub); + } + + if (!dv.getDatasetProducers().isEmpty()) { + JsonArrayBuilder producers = Json.createArrayBuilder(); + for (String[] producer : dv.getDatasetProducers()) { + producers.add(producer[0]); + } + nullSafeJsonBuilder.add("producers", producers); + } + if (!dv.getRelatedMaterial().isEmpty()) { + JsonArrayBuilder relatedMaterials = Json.createArrayBuilder(); + for (String relatedMaterial : dv.getRelatedMaterial()) { + relatedMaterials.add(relatedMaterial); + } + nullSafeJsonBuilder.add("relatedMaterial", relatedMaterials); + } + + if (!dv.getGeographicCoverage().isEmpty()) { + JsonArrayBuilder geoCov = Json.createArrayBuilder(); + NullSafeJsonBuilder inner = jsonObjectBuilder(); + for (String ind[] : dv.getGeographicCoverage()) { + inner.add("country", ind[0]); + inner.add("state", ind[1]); + inner.add("city", ind[2]); + inner.add("other", ind[3]); + geoCov.add(inner); + } + nullSafeJsonBuilder.add("geographicCoverage", geoCov); + } + if (!dv.getDataSource().isEmpty()) { + JsonArrayBuilder dataSources = Json.createArrayBuilder(); + for (String dsource : dv.getDataSource()) { + dataSources.add(dsource); + } + nullSafeJsonBuilder.add("dataSources", dataSources); + } + + if (CollectionUtils.isNotEmpty(metadataFields)) { + // create metadata fields map names + Map> metadataFieldMapNames = computeRequestedMetadataFieldMapNames( + metadataFields); + + // add metadatafields objet to wrap all requeested fields + NullSafeJsonBuilder metadataFieldBuilder = jsonObjectBuilder(); + + Map> groupedFields = DatasetField + .groupByBlock(dv.getFlatDatasetFields()); + json(metadataFieldMapNames, groupedFields, metadataFieldBuilder); + + nullSafeJsonBuilder.add("metadataBlocks", metadataFieldBuilder); + } + } + } + + if (showApiUrls) { + /** + * @todo We should probably have a metadata_url or api_url concept + * enabled by default, not hidden behind an undocumented boolean. + * For datasets, this would be http://example.com/api/datasets/10 or + * whatever (to get more detailed JSON), but right now this requires + * an API token. Discuss at + * https://docs.google.com/document/d/1d8sT2GLSavgiAuMTVX8KzTCX0lROEET1edhvHHRDZOs/edit?usp=sharing"; + */ + if (getApiUrl() != null) { + nullSafeJsonBuilder.add("api_url", getApiUrl()); + } + } + // NullSafeJsonBuilder is awesome but can't build null safe arrays. :( + if (!datasetAuthors.isEmpty()) { + JsonArrayBuilder authors = Json.createArrayBuilder(); + for (String datasetAuthor : datasetAuthors) { + authors.add(datasetAuthor); + } + nullSafeJsonBuilder.add("authors", authors); + } + return nullSafeJsonBuilder; + } + + private void json(Map> metadataFieldMapNames, + Map> groupedFields, NullSafeJsonBuilder metadataFieldBuilder) { + for (Map.Entry> metadataFieldNamesEntry : metadataFieldMapNames.entrySet()) { + String metadataBlockName = metadataFieldNamesEntry.getKey(); + List metadataBlockFieldNames = metadataFieldNamesEntry.getValue(); + for (MetadataBlock metadataBlock : groupedFields.keySet()) { + if (metadataBlockName.equals(metadataBlock.getName())) { + // create metadataBlock object + NullSafeJsonBuilder metadataBlockBuilder = jsonObjectBuilder(); + metadataBlockBuilder.add("displayName", metadataBlock.getDisplayName()); + JsonArrayBuilder fieldsArray = Json.createArrayBuilder(); + + List datasetFields = groupedFields.get(metadataBlock); + for (DatasetField datasetField : datasetFields) { + if (metadataBlockFieldNames.contains("*") + || metadataBlockFieldNames.contains(datasetField.getDatasetFieldType().getName())) { + if (datasetField.getDatasetFieldType().isCompound() || !datasetField.getDatasetFieldType().isHasParent()) { + JsonObject item = JsonPrinter.json(datasetField); + if (item != null) { + fieldsArray.add(item); + } + } + } + } + // with a fields to hold all requested properties + metadataBlockBuilder.add("fields", fieldsArray); + + metadataFieldBuilder.add(metadataBlock.getName(), metadataBlockBuilder); + } + } + } + } + + private Map> computeRequestedMetadataFieldMapNames(List metadataFields) { + Map> metadataFieldMapNames = new HashMap<>(); + for (String metadataField : metadataFields) { + String parts[] = metadataField.split(":"); + if (parts.length == 2) { + List metadataFieldNames = metadataFieldMapNames.get(parts[0]); + if (metadataFieldNames == null) { + metadataFieldNames = new ArrayList<>(); + metadataFieldMapNames.put(parts[0], metadataFieldNames); + } + metadataFieldNames.add(parts[1]); + } + } + return metadataFieldMapNames; + } + + private String getDateTimePublished() { + String datePublished = null; + if (draftState == false) { + datePublished = releaseOrCreateDate == null ? null : Util.getDateTimeFormat().format(releaseOrCreateDate); + } + return datePublished; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Long getEntityId() { + return entityId; + } + + public void setEntityId(Long entityId) { + this.entityId = entityId; + } + + public DvObject getEntity() { + return entity; + } + + public void setEntity(DvObject entity) { + this.entity = entity; + } + + public String getIdentifier() { + return identifier; + } + + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getHtmlUrl() { + return htmlUrl; + } + + public void setHtmlUrl(String htmlUrl) { + this.htmlUrl = htmlUrl; + } + + public String getPersistentUrl() { + return persistentUrl; + } + + public void setPersistentUrl(String persistentUrl) { + this.persistentUrl = persistentUrl; + } + + public String getDownloadUrl() { + return downloadUrl; + } + + public void setDownloadUrl(String downloadUrl) { + this.downloadUrl = downloadUrl; + } + + public String getApiUrl() { + return apiUrl; + } + + public void setApiUrl(String apiUrl) { + this.apiUrl = apiUrl; + } + + public String getImageUrl() { + return imageUrl; + } + + public void setImageUrl(String imageUrl) { + this.imageUrl = imageUrl; + } + + public DatasetThumbnail getDatasetThumbnail() { + return datasetThumbnail; + } + + public void setDatasetThumbnail(DatasetThumbnail datasetThumbnail) { + this.datasetThumbnail = datasetThumbnail; + } + + public String getQuery() { + return query; + } + + public void setQuery(String query) { + this.query = query; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getDescriptionNoSnippet() { + return descriptionNoSnippet; + } + + public void setDescriptionNoSnippet(String descriptionNoSnippet) { + this.descriptionNoSnippet = descriptionNoSnippet; + } + + public List getDatasetAuthors() { + return datasetAuthors; + } + + public void setDatasetAuthors(List datasetAuthors) { + this.datasetAuthors = datasetAuthors; + } + + public String getDeaccessionReason() { + return deaccessionReason; + } + + public void setDeaccessionReason(String deaccessionReason) { + this.deaccessionReason = deaccessionReason; + } + + public List getHighlightsAsListOrig() { + return highlightsAsList; + } + + public List getHighlightsAsList() { + List filtered = new ArrayList<>(); + for (Highlight highlight : highlightsAsList) { + String field = highlight.getSolrField().getNameSearchable(); + /** + * @todo don't hard code "title" here. And should we collapse name + * and title together anyway? + */ + if (!field.equals(SearchFields.NAME) && !field.equals(SearchFields.DESCRIPTION) + && !field.equals(SearchFields.DATASET_DESCRIPTION) && !field.equals(SearchFields.AFFILIATION) + && !field.equals("title")) { + filtered.add(highlight); + } + } + return filtered; + } + + public void setHighlightsAsList(List highlightsAsList) { + this.highlightsAsList = highlightsAsList; + } + + public List getFileCategories() { + return fileCategories; + } + + public void setFileCategories(List fileCategories) { + this.fileCategories = fileCategories; + } + + public List getTabularDataTags() { + return tabularDataTags; + } + + public void setTabularDataTags(List tabularDataTags) { + this.tabularDataTags = tabularDataTags; + } + + public Map getParent() { + return parent; + } + + public Long getParentIdAsLong() { + + if (this.getParent() == null) { + return null; + } + if (!this.getParent().containsKey("id")) { + return null; + } + + String parentIdString = getParent().get("id"); + if (parentIdString == null) { + return null; + } + + try { + return Long.parseLong(parentIdString); + } catch (NumberFormatException ex) { + return null; + } + } + + public void setParent(Map parent) { + this.parent = parent; + } + + public String getDataverseAffiliation() { + return dataverseAffiliation; + } + + public void setDataverseAffiliation(String dataverseAffiliation) { + this.dataverseAffiliation = dataverseAffiliation; + } + + public String getCitation() { + return citation; + } + + public void setCitation(String citation) { + this.citation = citation; + } + + public String getCitationHtml() { + return citationHtml; + } + + public void setCitationHtml(String citationHtml) { + this.citationHtml = citationHtml; + } public String getDatasetType() { return datasetType; @@ -961,317 +990,319 @@ public void setDatasetType(String datasetType) { this.datasetType = datasetType; } - public String getFiletype() { - return filetype; - } + public String getFiletype() { + return filetype; + } + + public void setFiletype(String filetype) { + this.filetype = filetype; + } + + public String getFileContentType() { + return fileContentType; + } + + public void setFileContentType(String fileContentType) { + this.fileContentType = fileContentType; + } + + public String getUnf() { + return unf; + } + + public void setUnf(String unf) { + this.unf = unf; + } + + public Long getFileSizeInBytes() { + return fileSizeInBytes; + } + + public void setFileSizeInBytes(Long fileSizeInBytes) { + this.fileSizeInBytes = fileSizeInBytes; + } + + public String getFileMd5() { + if (DataFile.ChecksumType.MD5.equals(getFileChecksumType())) { + return fileMd5; + } else { + return null; + } + } - public void setFiletype(String filetype) { - this.filetype = filetype; - } + public void setFileMd5(String fileMd5) { + this.fileMd5 = fileMd5; + } + + public DataFile.ChecksumType getFileChecksumType() { + return fileChecksumType; + } + + public void setFileChecksumType(DataFile.ChecksumType fileChecksumType) { + this.fileChecksumType = fileChecksumType; + } + + public String getFileChecksumValue() { + return fileChecksumValue; + } + + public void setFileChecksumValue(String fileChecksumValue) { + this.fileChecksumValue = fileChecksumValue; + } + + public String getNameSort() { + return nameSort; + } + + public void setNameSort(String nameSort) { + this.nameSort = nameSort; + } + + public String getStatus() { + return status; + } - public String getFileContentType() { - return fileContentType; - } - - public void setFileContentType(String fileContentType) { - this.fileContentType = fileContentType; - } - - public String getUnf() { - return unf; - } - - public void setUnf(String unf) { - this.unf = unf; - } - - public Long getFileSizeInBytes() { - return fileSizeInBytes; - } - - public void setFileSizeInBytes(Long fileSizeInBytes) { - this.fileSizeInBytes = fileSizeInBytes; - } - - public String getFileMd5() { - if (DataFile.ChecksumType.MD5.equals(getFileChecksumType())) { - return fileMd5; - } else { - return null; - } - } - - public void setFileMd5(String fileMd5) { - this.fileMd5 = fileMd5; - } - - public DataFile.ChecksumType getFileChecksumType() { - return fileChecksumType; - } - - public void setFileChecksumType(DataFile.ChecksumType fileChecksumType) { - this.fileChecksumType = fileChecksumType; - } - - public String getFileChecksumValue() { - return fileChecksumValue; - } - - public void setFileChecksumValue(String fileChecksumValue) { - this.fileChecksumValue = fileChecksumValue; - } - - public String getNameSort() { - return nameSort; - } - - public void setNameSort(String nameSort) { - this.nameSort = nameSort; - } - - public String getStatus() { - return status; - } - - void setStatus(String status) { - this.status = status; - } - - public Date getReleaseOrCreateDate() { - return releaseOrCreateDate; - } - - public void setReleaseOrCreateDate(Date releaseOrCreateDate) { - this.releaseOrCreateDate = releaseOrCreateDate; - } - - public String getDateToDisplayOnCard() { - return DateUtil.formatDate(this.releaseOrCreateDate); - } - - public long getDatasetVersionId() { - return datasetVersionId; - } - - public void setDatasetVersionId(long datasetVersionId) { - this.datasetVersionId = datasetVersionId; - } - - public String getVersionNumberFriendly() { - return versionNumberFriendly; - } - - public void setVersionNumberFriendly(String versionNumberFriendly) { - this.versionNumberFriendly = versionNumberFriendly; - } - - public String getDatasetUrl() { - String failSafeUrl = "/dataset.xhtml?id=" + entityId + "&versionId=" + datasetVersionId; - if (identifier != null) { - /** - * Unfortunately, colons in the globalId (doi:10...) are converted to %3A (doi%3A10...). To prevent this we switched many JSF tags to a plain "a" tag - * with an href as suggested at http://stackoverflow.com/questions/24733959/houtputlink-value-escaped - */ - String badString = "null"; - if (!identifier.contains(badString)) { - if (entity != null && entity instanceof Dataset) { - if (this.isHarvested() && ((Dataset) entity).getHarvestedFrom() != null) { - String remoteArchiveUrl = ((Dataset) entity).getRemoteArchiveURL(); - if (remoteArchiveUrl != null) { - return remoteArchiveUrl; - } - return null; - } - } - if (isDraftState()) { - return "/dataset.xhtml?persistentId=" + identifier + "&version=DRAFT"; - } - return "/dataset.xhtml?persistentId=" + identifier; - } else { - logger.info("Dataset identifier/globalId contains \"" + badString - + "\" perhaps due to https://github.com/IQSS/dataverse/issues/1147 . Fix data in database and reindex. Returning failsafe URL: " - + failSafeUrl); - return failSafeUrl; - } - } else { - logger.info("Dataset identifier/globalId was null. Returning failsafe URL: " + failSafeUrl); - return failSafeUrl; - } - } - - public String getFileParentIdentifier() { - if (entity == null) { - return null; - } - if (entity instanceof DataFile) { - return parent.get(PARENT_IDENTIFIER); // Dataset globalID - } - - return null; - // if (entity) - } - - public String getFilePersistentId() { - return filePersistentId; - } - - public void setFilePersistentId(String pid) { - filePersistentId = pid; - } - - public String getFileUrl() { - // Nothing special needs to be done for harvested file URLs: - // simply directing these to the local dataset.xhtml for this dataset - // will take care of it - because DatasetPage will issue a redirect - // to the remote archive URL. - // This is true AS OF 4.2.4, FEB. 2016! - We'll probably want to make - // .getRemoteArchiveURL() methods, both in DataFile and Dataset objects, - // work again at some point in the future. - /* + void setStatus(String status) { + this.status = status; + } + + public Date getReleaseOrCreateDate() { + return releaseOrCreateDate; + } + + public void setReleaseOrCreateDate(Date releaseOrCreateDate) { + this.releaseOrCreateDate = releaseOrCreateDate; + } + + public String getDateToDisplayOnCard() { + return DateUtil.formatDate(this.releaseOrCreateDate); + } + + public long getDatasetVersionId() { + return datasetVersionId; + } + + public void setDatasetVersionId(long datasetVersionId) { + this.datasetVersionId = datasetVersionId; + } + + public String getVersionNumberFriendly() { + return versionNumberFriendly; + } + + public void setVersionNumberFriendly(String versionNumberFriendly) { + this.versionNumberFriendly = versionNumberFriendly; + } + + public String getDatasetUrl() { + String failSafeUrl = "/dataset.xhtml?id=" + entityId + "&versionId=" + datasetVersionId; + if (identifier != null) { + /** + * Unfortunately, colons in the globalId (doi:10...) are converted + * to %3A (doi%3A10...). To prevent this we switched many JSF tags + * to a plain "a" tag with an href as suggested at + * http://stackoverflow.com/questions/24733959/houtputlink-value-escaped + */ + String badString = "null"; + if (!identifier.contains(badString)) { + if (entity != null && entity instanceof Dataset) { + if (this.isHarvested() && ((Dataset) entity).getHarvestedFrom() != null) { + String remoteArchiveUrl = ((Dataset) entity).getRemoteArchiveURL(); + if (remoteArchiveUrl != null) { + return remoteArchiveUrl; + } + return null; + } + } + if (isDraftState()) { + return "/dataset.xhtml?persistentId=" + identifier + "&version=DRAFT"; + } + return "/dataset.xhtml?persistentId=" + identifier; + } else { + logger.info("Dataset identifier/globalId contains \"" + badString + + "\" perhaps due to https://github.com/IQSS/dataverse/issues/1147 . Fix data in database and reindex. Returning failsafe URL: " + + failSafeUrl); + return failSafeUrl; + } + } else { + logger.info("Dataset identifier/globalId was null. Returning failsafe URL: " + failSafeUrl); + return failSafeUrl; + } + } + + public String getFileParentIdentifier() { + if (entity == null) { + return null; + } + if (entity instanceof DataFile) { + return parent.get(PARENT_IDENTIFIER); // Dataset globalID + } + + return null; + // if (entity) + } + + public String getFilePersistentId() { + return filePersistentId; + } + + public void setFilePersistentId(String pid) { + filePersistentId = pid; + } + + public String getFileUrl() { + // Nothing special needs to be done for harvested file URLs: + // simply directing these to the local dataset.xhtml for this dataset + // will take care of it - because DatasetPage will issue a redirect + // to the remote archive URL. + // This is true AS OF 4.2.4, FEB. 2016! - We'll probably want to make + // .getRemoteArchiveURL() methods, both in DataFile and Dataset objects, + // work again at some point in the future. + /* * if (entity != null && entity instanceof DataFile && this.isHarvested()) { String remoteArchiveUrl = ((DataFile) entity).getRemoteArchiveURL(); if * (remoteArchiveUrl != null) { return remoteArchiveUrl; } return null; } - */ + */ if (entity.getIdentifier() != null) { GlobalId entityPid = entity.getGlobalId(); return "/file.xhtml?persistentId=" + ((entityPid != null) ? entityPid.asString() : null); } - return "/file.xhtml?fileId=" + entity.getId() + "&datasetVersionId=" + datasetVersionId; + return "/file.xhtml?fileId=" + entity.getId() + "&datasetVersionId=" + datasetVersionId; - /* + /* * if (parentDatasetGlobalId != null) { return "/dataset.xhtml?persistentId=" + parentDatasetGlobalId; } else { return "/dataset.xhtml?id=" + * parent.get(SearchFields.ID) + "&versionId=" + datasetVersionId; } - */ - } + */ + } - public String getFileDatasetUrl() { - // See the comment in the getFileUrl() method above. -- L.A. 4.2.4 - /* + public String getFileDatasetUrl() { + // See the comment in the getFileUrl() method above. -- L.A. 4.2.4 + /* * if (entity != null && entity instanceof DataFile && this.isHarvested()) { String remoteArchiveUrl = ((DataFile) entity).getRemoteArchiveURL(); if * (remoteArchiveUrl != null) { return remoteArchiveUrl; } return null; } - */ - - String parentDatasetGlobalId = parent.get(PARENT_IDENTIFIER); - - if (parentDatasetGlobalId != null) { - if (isDraftState()) { - return "/dataset.xhtml?persistentId=" + parentDatasetGlobalId + "&version=DRAFT"; - } else { - return "/dataset.xhtml?persistentId=" + parentDatasetGlobalId; - } - } else { - return "/dataset.xhtml?id=" + parent.get(SearchFields.ID) + "&versionId=" + datasetVersionId; - } - } - - /** - * @return the dataverseAlias - */ - public String getDataverseAlias() { - return dataverseAlias; - } - - /** - * @param dataverseAlias the dataverseAlias to set - */ - public void setDataverseAlias(String dataverseAlias) { - this.dataverseAlias = dataverseAlias; - } - - /** - * @return the dataverseParentAlias - */ - public String getDataverseParentAlias() { - return dataverseParentAlias; - } - - /** - * @param dataverseParentAlias the dataverseParentAlias to set - */ - public void setDataverseParentAlias(String dataverseParentAlias) { - this.dataverseParentAlias = dataverseParentAlias; - } - - public float getScore() { - return score; - } - - public void setScore(float score) { - this.score = score; - } - - private String getDisplayType(String type) { - if (type.equals(SearchConstants.DATAVERSES)) { - return SearchConstants.DATAVERSE; - } else if (type.equals(SearchConstants.DATASETS)) { - return SearchConstants.DATASET; - } else if (type.equals(SearchConstants.FILES)) { - return SearchConstants.FILE; - } else { - return null; - } - } - - /* + */ + + String parentDatasetGlobalId = parent.get(PARENT_IDENTIFIER); + + if (parentDatasetGlobalId != null) { + if (isDraftState()) { + return "/dataset.xhtml?persistentId=" + parentDatasetGlobalId + "&version=DRAFT"; + } else { + return "/dataset.xhtml?persistentId=" + parentDatasetGlobalId; + } + } else { + return "/dataset.xhtml?id=" + parent.get(SearchFields.ID) + "&versionId=" + datasetVersionId; + } + } + + /** + * @return the dataverseAlias + */ + public String getDataverseAlias() { + return dataverseAlias; + } + + /** + * @param dataverseAlias the dataverseAlias to set + */ + public void setDataverseAlias(String dataverseAlias) { + this.dataverseAlias = dataverseAlias; + } + + /** + * @return the dataverseParentAlias + */ + public String getDataverseParentAlias() { + return dataverseParentAlias; + } + + /** + * @param dataverseParentAlias the dataverseParentAlias to set + */ + public void setDataverseParentAlias(String dataverseParentAlias) { + this.dataverseParentAlias = dataverseParentAlias; + } + + public float getScore() { + return score; + } + + public void setScore(float score) { + this.score = score; + } + + private String getDisplayType(String type) { + if (type.equals(SearchConstants.DATAVERSES)) { + return SearchConstants.DATAVERSE; + } else if (type.equals(SearchConstants.DATASETS)) { + return SearchConstants.DATASET; + } else if (type.equals(SearchConstants.FILES)) { + return SearchConstants.FILE; + } else { + return null; + } + } + + /* * public JsonArrayBuilder getUserRolesAsJson() { * * JsonArrayBuilder jsonRoleStrings = Json.createArrayBuilder(); for (String role : this.getUserRole()) { jsonRoleStrings.add(role); } return * jsonRoleStrings; } - */ - public List getUserRole() { - return userRole; - } + */ + public List getUserRole() { + return userRole; + } - public void setUserRole(List userRole) { - this.userRole = userRole; - } + public void setUserRole(List userRole) { + this.userRole = userRole; + } - public String getIdentifierOfDataverse() { - return identifierOfDataverse; - } + public String getIdentifierOfDataverse() { + return identifierOfDataverse; + } - public void setIdentifierOfDataverse(String id) { - this.identifierOfDataverse = id; - } + public void setIdentifierOfDataverse(String id) { + this.identifierOfDataverse = id; + } - public String getNameOfDataverse() { - return nameOfDataverse; - } + public String getNameOfDataverse() { + return nameOfDataverse; + } - public void setNameOfDataverse(String id) { - this.nameOfDataverse = id; - } + public void setNameOfDataverse(String id) { + this.nameOfDataverse = id; + } - public String getExternalStatus() { - return externalStatus; - } + public String getExternalStatus() { + return externalStatus; + } - public void setExternalStatus(String externalStatus) { - this.externalStatus = externalStatus; + public void setExternalStatus(String externalStatus) { + this.externalStatus = externalStatus; - } + } - public Long getEmbargoEndDate() { - return embargoEndDate; - } + public Long getEmbargoEndDate() { + return embargoEndDate; + } - public void setEmbargoEndDate(Long embargoEndDate) { - this.embargoEndDate = embargoEndDate; - } + public void setEmbargoEndDate(Long embargoEndDate) { + this.embargoEndDate = embargoEndDate; + } - public Long getRetentionEndDate() { - return retentionEndDate; - } + public Long getRetentionEndDate() { + return retentionEndDate; + } - public void setRetentionEndDate(Long retentionEndDate) { - this.retentionEndDate = retentionEndDate; - } + public void setRetentionEndDate(Long retentionEndDate) { + this.retentionEndDate = retentionEndDate; + } - public void setDatasetValid(Boolean datasetValid) { - this.datasetValid = datasetValid == null || Boolean.valueOf(datasetValid); - } + public void setDatasetValid(Boolean datasetValid) { + this.datasetValid = datasetValid == null || Boolean.valueOf(datasetValid); + } - public boolean isValid(Predicate canUpdateDataset) { + public boolean isValid(Predicate canUpdateDataset) { if (this.datasetValid) { return true; } @@ -1284,6 +1315,6 @@ public boolean isValid(Predicate canUpdateDataset) { if (!JvmSettings.UI_SHOW_VALIDITY_LABEL_WHEN_PUBLISHED.lookupOptional(Boolean.class).orElse(true)) { return true; } - return !canUpdateDataset.test(this); + return !canUpdateDataset.test(this); } } From 9c44b3066f44cac7afefaa7a8fdb7cca07d8f9fc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 22 Aug 2024 16:53:56 -0400 Subject: [PATCH 37/45] simplify logic #10517 --- .../iq/dataverse/util/json/JsonParser.java | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 438ef954300..2f01c9bc2f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -332,18 +332,13 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { }else { throw new JsonParseException("Specified metadatalanguage not allowed."); } - DatasetType defaultDatasetType = datasetTypeService.getByName(DatasetType.DEFAULT_DATASET_TYPE); - String datasetTypeIn = obj.getString("datasetType", null); + String datasetTypeIn = obj.getString("datasetType", DatasetType.DEFAULT_DATASET_TYPE); logger.fine("datasetTypeIn: " + datasetTypeIn); - if (datasetTypeIn == null) { - dataset.setDatasetType(defaultDatasetType); + DatasetType datasetType = datasetTypeService.getByName(datasetTypeIn); + if (datasetType != null) { + dataset.setDatasetType(datasetType); } else { - DatasetType datasetType = datasetTypeService.getByName(datasetTypeIn); - if (datasetType != null) { - dataset.setDatasetType(datasetType); - } else { - throw new JsonParseException("Invalid dataset type: " + datasetTypeIn); - } + throw new JsonParseException("Invalid dataset type: " + datasetTypeIn); } DatasetVersion dsv = new DatasetVersion(); From 68e4a605563bbfb60d6049590b1c23979d4152a3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 22 Aug 2024 16:57:49 -0400 Subject: [PATCH 38/45] remove unused import #10517 --- .../java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index 04a6ebdb6c4..f45a9058e7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; From 04f1c7cb5b8511f0d13db3062f737041e14cb7b0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 26 Aug 2024 16:38:52 -0400 Subject: [PATCH 39/45] name of dataset type cannot be only digits #10517 --- doc/sphinx-guides/source/api/native-api.rst | 2 +- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++++ .../java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java | 5 +++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 31afb042f29..b161fa7e6ce 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3123,7 +3123,7 @@ Add Dataset Type Note: Before you add any types of your own, there should be a single type called "dataset". If you add "software" or "workflow", these types will be sent to DataCite (if you use DataCite). Otherwise, the only functionality you gain currently from adding types is an entry in the "Dataset Type" facet but be advised that if you add a type other than "software" or "workflow", you will need to add your new type to your Bundle.properties file for it to appear in Title Case rather than lower case in the "Dataset Type" facet. -With all that said, we'll add a "software" type in the example below. This API endpoint is superuser only. +With all that said, we'll add a "software" type in the example below. This API endpoint is superuser only. The "name" of a type cannot be only digits. .. code-block:: bash diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2ebfd4aa308..034ba4536a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5139,6 +5139,10 @@ public Response addDatasetType(@Context ContainerRequestContext crc, String json if (nameIn == null) { return error(BAD_REQUEST, "A name for the dataset type is required"); } + if (StringUtils.isNumeric(nameIn)) { + // getDatasetTypes supports id or name so we don't want a names that looks like an id + return error(BAD_REQUEST, "The name of the type cannot be only digits."); + } try { DatasetType datasetType = new DatasetType(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 072e4878663..35a354c6beb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -217,6 +217,11 @@ public void testAddAndDeleteDatasetType() { badJson.prettyPrint(); badJson.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + String numbersOnlyIn = Json.createObjectBuilder().add("name", "12345").build().toString(); + Response numbersOnly = UtilIT.addDatasetType(numbersOnlyIn, apiToken); + numbersOnly.prettyPrint(); + numbersOnly.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + String randomName = UUID.randomUUID().toString().substring(0, 8); String jsonIn = Json.createObjectBuilder().add("name", randomName).build().toString(); From 42ff5043b458199a83f759db1976ab6a16249be4 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 29 Aug 2024 09:54:22 -0400 Subject: [PATCH 40/45] #10517 fix typo --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index e7862118f7d..fc13cc1d987 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -8,7 +8,7 @@ passwd=Password # we need to add the rest of the types here for two reasons. First, we want # translators to be able to translate these types. Second, in English it looks # weird to have only "Dataset" capitalized in the facet but not "software" and -# "workflow". This capitalization (looking up here in the bundel) is done by +# "workflow". This capitalization (looking up here in the bundle) is done by # SearchServiceBean near the comment "This is where facets are capitalized". dataset=Dataset software=Software From 4842d9227f35a05eea550b68311fcb807d86e203 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Sep 2024 09:45:55 -0400 Subject: [PATCH 41/45] remove unused imports #10517 --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f1e6e5bc950..abf6541b9ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -18,13 +18,11 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; -import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; From 673d775cd54989f9a29fd6cfbe1722a45e84b861 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Sep 2024 10:02:03 -0400 Subject: [PATCH 42/45] add test to assert capitalizataion of Dataset and Software #10517 --- .../iq/dataverse/api/DatasetTypesIT.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 35a354c6beb..1dec51cc3ef 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -87,6 +87,24 @@ public void testCreateSoftwareDatasetNative() { UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); UtilIT.publishDatasetViaNativeApi(datasetPid, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat().statusCode(CREATED.getStatusCode()); + + String dataset2Pid = JsonPath.from(createDataset.getBody().asString()).getString("data.persistentId"); + + UtilIT.publishDatasetViaNativeApi(dataset2Pid, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + Response searchCollection = UtilIT.searchAndShowFacets("parentName:" + dataverseAlias, null); + searchCollection.prettyPrint(); + searchCollection.then().assertThat() + .body("data.total_count", CoreMatchers.is(2)) + .body("data.count_in_response", CoreMatchers.is(2)) + .body("data.facets[0].datasetType.friendly", CoreMatchers.is("Dataset Type")) + .body("data.facets[0].datasetType.labels[0].Dataset", CoreMatchers.is(1)) + .body("data.facets[0].datasetType.labels[1].Software", CoreMatchers.is(1)) + .statusCode(OK.getStatusCode()); + // Response searchAsGuest = UtilIT.search(SearchFields.DATASET_TYPE + ":software", null); // searchAsGuest.prettyPrint(); // searchAsGuest.then().assertThat() From 78ca1a59570fa1158f9b08c3ab6ff979d9ee5afd Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Sep 2024 15:21:32 -0400 Subject: [PATCH 43/45] bump sql script version #10517 --- src/main/resources/db/migration/{V6.3.0.2.sql => V6.3.0.3.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.3.0.2.sql => V6.3.0.3.sql} (100%) diff --git a/src/main/resources/db/migration/V6.3.0.2.sql b/src/main/resources/db/migration/V6.3.0.3.sql similarity index 100% rename from src/main/resources/db/migration/V6.3.0.2.sql rename to src/main/resources/db/migration/V6.3.0.3.sql From 486dd555c339d2d42ec2ae0f04b03bf860ed6112 Mon Sep 17 00:00:00 2001 From: sbondka <145585953+sbondka@users.noreply.github.com> Date: Wed, 4 Sep 2024 22:51:52 +0200 Subject: [PATCH 44/45] JDD Metrics: Label KO #10123 (#10124) * remove parentheses * Correction of the parenthesis display --- src/main/webapp/dataset.xhtml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 0118e6decb2..910cce405dd 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -595,13 +595,13 @@ - + - ) - +
From b7b9b7dddecdfc8318bd19f3f302145e3c3a9afd Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Sep 2024 16:54:53 -0400 Subject: [PATCH 45/45] conditional INSERT of dataset type #10517 --- src/main/resources/db/migration/V6.3.0.3.sql | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/main/resources/db/migration/V6.3.0.3.sql b/src/main/resources/db/migration/V6.3.0.3.sql index 437572f3f0c..ece87767bcb 100644 --- a/src/main/resources/db/migration/V6.3.0.3.sql +++ b/src/main/resources/db/migration/V6.3.0.3.sql @@ -1,7 +1,14 @@ -- Dataset types have been added. See #10517 and #10694 -- --- Insert the default dataset type: dataset. -INSERT INTO datasettype (name) VALUES ('dataset'); +-- Insert the default dataset type: dataset (if not present). +-- Inspired by https://stackoverflow.com/questions/4069718/postgres-insert-if-does-not-exist-already/13342031#13342031 +INSERT INTO datasettype + (name) +SELECT 'dataset' +WHERE + NOT EXISTS ( + SELECT name FROM datasettype WHERE name = 'dataset' + ); -- -- Add the new column (if it doesn't exist). ALTER TABLE dataset ADD COLUMN IF NOT EXISTS datasettype_id bigint;