diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/8.11.1/schema.xml index 63312ab5d40..655cf1bc3cc 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/8.11.1/schema.xml @@ -228,6 +228,11 @@ + + + + + + + diff --git a/doc/JAVADOC_GUIDE.md b/doc/JAVADOC_GUIDE.md index 8001abda248..997c40e1624 100644 --- a/doc/JAVADOC_GUIDE.md +++ b/doc/JAVADOC_GUIDE.md @@ -88,7 +88,7 @@ Here's a better approach: /** The dataverse we move the dataset from */ private Dataverse sourceDataverse; - /** The dataverse we movet the dataset to */ + /** The dataverse we move the dataset to */ private Dataverse destinationDataverse; diff --git a/doc/mergeParty/readme.md b/doc/mergeParty/readme.md index 061673fffa0..6f3af8511dc 100644 --- a/doc/mergeParty/readme.md +++ b/doc/mergeParty/readme.md @@ -73,10 +73,10 @@ Note that before we were asking `isGuest` and now we ask `isAuthenticated`, so t ## Other Added Things ### Settings bean -Settings (in `edu.harvard.iq.dataverse.settings`) are where the application stores its more complex, admin-editable configuration. Technically, its a persistent `Map`, that can be accessed via API (`edu.harvard.iq.dataverse.api.Admin`, on path `{server}/api/s/settings`). Currenly used for the signup mechanism. +Settings (in `edu.harvard.iq.dataverse.settings`) are where the application stores its more complex, admin-editable configuration. Technically, its a persistent `Map`, that can be accessed via API (`edu.harvard.iq.dataverse.api.Admin`, on path `{server}/api/s/settings`). Currently used for the signup mechanism. ### Admin API -Accessible under url `{server}/api/s/`, API calls to this bean should be editing confugurations, allowing full indexing and more. The idea behing putting all of them under the `/s/` path is that we can later block these calls using a filter. This way, we could, say, allow access from localhost only. Or, we could block this completely based on some environemnt variable. +Accessible under url `{server}/api/s/`, API calls to this bean should be editing configurations, allowing full indexing and more. The idea behind putting all of them under the `/s/` path is that we can later block these calls using a filter. This way, we could, say, allow access from localhost only. Or, we could block this completely based on some environment variable. ### `setup-all.sh` script A new script that sets up the users and the dataverses, sets the system up for built-in signup, and then indexes the dataverses using solr. Requires the [jq utility](http://stedolan.github.io/jq/). On Macs with [homebrew](http://brew.sh) installed, getting this utility is a `brew install jq` command away. diff --git a/doc/release-notes/5.12.1-release-notes.md b/doc/release-notes/5.12.1-release-notes.md new file mode 100644 index 00000000000..aa8896660f3 --- /dev/null +++ b/doc/release-notes/5.12.1-release-notes.md @@ -0,0 +1,115 @@ +# Dataverse Software 5.12.1 + +This release brings new features, enhancements, and bug fixes to the Dataverse Software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +### Bug Fix for "Internal Server Error" When Creating a New Remote Account + +Unfortunately, as of 5.11 new remote users have seen "Internal Server Error" when creating an account (or checking notifications just after creating an account). Remote users are those who log in with institutional (Shibboleth), OAuth (ORCID, GitHub, or Google) or OIDC providers. + +This is a transient error that can be worked around by reloading the browser (or logging out and back in again) but it's obviously a very poor user experience and a bad first impression. This bug is the primary reason we are putting out this patch release. Other features and bug fixes are coming along for the ride. + +### Ability to Disable OAuth Sign Up While Allowing Existing Accounts to Log In + +A new option called `:AllowRemoteAuthSignUp` has been added providing a mechanism for disabling new account signups for specific OAuth2 authentication providers (Orcid, GitHub, Google etc.) while still allowing logins for already-existing accounts using this authentication method. + +See the [Installation Guide](https://guides.dataverse.org/en/5.12.1/installation/config.html#allowremoteauthsignup) for more information on the setting. + +### Production Date Now Used for Harvested Datasets in Addition to Distribution Date (`oai_dc` format) + +Fix the year displayed in citation for harvested dataset, especially for `oai_dc` format. + +For normal datasets, the date used is the "citation date" which is by default the publication date (the first release date) unless you [change it](https://guides.dataverse.org/en/5.12.1/api/native-api.html#set-citation-date-field-type-for-a-dataset). + +However, for a harvested dataset, the distribution date was used instead and this date is not always present in the harvested metadata. + +Now, the production date is used for harvested dataset in addition to distribution date when harvesting with the `oai_dc` format. + +### Publication Date Now Used for Harvested Dataset if Production Date is Not Set (`oai_dc` format) + +For exports and harvesting in `oai_dc` format, if "Production Date" is not set, "Publication Date" is now used instead. This change is reflected in the [Dataverse 4+ Metadata Crosswalk][] linked from the [Appendix][] of the User Guide. + +[Dataverse 4+ Metadata Crosswalk]: https://docs.google.com/spreadsheets/d/10Luzti7svVTVKTA-px27oq3RxCUM-QbiTkm8iMd5C54/edit#gid=1901625433&range=K7 +[Appendix]: https://guides.dataverse.org/en/5.12.1/user/appendix.html + +## Major Use Cases and Infrastructure Enhancements + +Changes and fixes in this release include: + +- Users creating an account by logging in with Shibboleth, OAuth, or OIDC should not see errors. (Issue 9029, PR #9030) +- When harvesting datasets, I want the Production Date if I can't get the Distribution Date (PR #8732) +- When harvesting datasets, I want the Publication Date if I can't get the Production Date (PR #8733) +- As a sysadmin I'd like to disable (temporarily or permanently) sign ups from OAuth providers while allowing existing users to continue to log in from that provider (PR #9112) +- As a C/C++ developer I want to use Dataverse APIs (PR #9070) + +## New DB Settings + +The following DB settings have been added: + +- `:AllowRemoteAuthSignUp` + +See the [Database Settings](https://guides.dataverse.org/en/5.12.1/installation/config.html#database-settings) section of the Guides for more information. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [5.12.1 Milestone](https://github.com/IQSS/dataverse/milestone/106?closed=1) in GitHub. + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.12.1/installation/). Please also contact us to get added to the [Dataverse Project Map](https://guides.dataverse.org/en/5.10/installation/config.html#putting-your-dataverse-installation-on-the-map-at-dataverse-org) if you have not done so already. + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. + +0\. These instructions assume that you've already successfully upgraded from Dataverse Software 4.x to Dataverse Software 5 following the instructions in the [Dataverse Software 5 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.0). After upgrading from the 4.x series to 5.0, you should progress through the other 5.x releases before attempting the upgrade to 5.12.1. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +```shell +export PAYARA=/usr/local/payara5 +``` + +(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell) + +1\. Undeploy the previous version + +```shell + $PAYARA/bin/asadmin list-applications + $PAYARA/bin/asadmin undeploy dataverse<-version> +``` + +2\. Stop Payara + +```shell + service payara stop + rm -rf $PAYARA/glassfish/domains/domain1/generated +``` + +6\. Start Payara + +```shell + service payara start +``` + +7\. Deploy this version. + +```shell + $PAYARA/bin/asadmin deploy dataverse-5.12.1.war +``` + +8\. Restart payara + +```shell + service payara stop + service payara start +``` + +## Upcoming Versions of Payara + +With the recent release of Payara 6 ([Payara 6.2022.1](https://github.com/payara/Payara/releases/tag/payara-server-6.2022.1) being the first version), the days of free-to-use Payara 5.x Platform Community versions [are numbered](https://blog.payara.fish/whats-new-in-the-november-2022-payara-platform-release). Specifically, Payara's blog post says, "Payara Platform Community 5.2022.4 has been released today as the penultimate Payara 5 Community release." + +Given the end of free-to-use Payara 5 versions, we plan to get the Dataverse software working on Payara 6 (#8305), which will require substantial efforts from the IQSS team and community members, as this also means shifting our app to be a [Jakarta EE 10](https://jakarta.ee/release/10/) application (upgrading from EE 8). We are currently working out the details and will share news as soon as we can. Rest assured we will do our best to provide you with a smooth transition. You can follow along in Issue #8305 and related pull requests and you are, of course, very welcome to participate by testing and otherwise contributing, as always. diff --git a/doc/release-notes/6656-file-uploads.md b/doc/release-notes/6656-file-uploads.md new file mode 100644 index 00000000000..a2430a5d0a8 --- /dev/null +++ b/doc/release-notes/6656-file-uploads.md @@ -0,0 +1 @@ +new JVM option: dataverse.files.uploads diff --git a/doc/release-notes/7715-signed-urls-for-external-tools.md b/doc/release-notes/7715-signed-urls-for-external-tools.md new file mode 100644 index 00000000000..c2d3859c053 --- /dev/null +++ b/doc/release-notes/7715-signed-urls-for-external-tools.md @@ -0,0 +1,3 @@ +# Improved Security for External Tools + +This release adds support for configuring external tools to use signed URLs to access the Dataverse API. This eliminates the need for tools to have access to the user's apiToken in order to access draft or restricted datasets and datafiles. Signed URLS can be transferred via POST or via a callback when triggering a tool via GET. \ No newline at end of file diff --git a/doc/release-notes/7940-stop-harvest-in-progress b/doc/release-notes/7940-stop-harvest-in-progress new file mode 100644 index 00000000000..cb27a900f15 --- /dev/null +++ b/doc/release-notes/7940-stop-harvest-in-progress @@ -0,0 +1,4 @@ +## Mechanism added for stopping a harvest in progress + +It is now possible for an admin to stop a long-running harvesting job. See [Harvesting Clients](https://guides.dataverse.org/en/latest/admin/harvestclients.html) guide for more information. + diff --git a/doc/release-notes/8239-geospatial-indexing.md b/doc/release-notes/8239-geospatial-indexing.md new file mode 100644 index 00000000000..165cb9031ba --- /dev/null +++ b/doc/release-notes/8239-geospatial-indexing.md @@ -0,0 +1,5 @@ +Support for indexing the "Geographic Bounding Box" fields ("West Longitude", "East Longitude", "North Latitude", and "South Latitude") from the Geospatial metadata block has been added. + +Geospatial search is supported but only via API using two new parameters: `geo_point` and `geo_radius`. + +A Solr schema update is required. diff --git a/doc/release-notes/8671-sorting-licenses.md b/doc/release-notes/8671-sorting-licenses.md new file mode 100644 index 00000000000..4ceb9ec056f --- /dev/null +++ b/doc/release-notes/8671-sorting-licenses.md @@ -0,0 +1,7 @@ +## License sorting + +Licenses as shown in the dropdown in UI can be now sorted by the superusers. See [Configuring Licenses](https://guides.dataverse.org/en/5.10/installation/config.html#configuring-licenses) section of the Installation Guide for reference. + +## Backward Incompatibilities + +License files are now required to contain the new "sortOrder" column. When attempting to create a new license without this field, an error would be returned. See [Configuring Licenses](https://guides.dataverse.org/en/5.10/installation/config.html#configuring-licenses) section of the Installation Guide for reference. \ No newline at end of file diff --git a/doc/release-notes/8732-date-in-citation-harvested-datasets.md b/doc/release-notes/8732-date-in-citation-harvested-datasets.md deleted file mode 100644 index 85f2d24a8a9..00000000000 --- a/doc/release-notes/8732-date-in-citation-harvested-datasets.md +++ /dev/null @@ -1,7 +0,0 @@ -Fix the year displayed in citation for harvested dataset, specialy for oai_dc format. - -For normal datasets, the date used is the "citation date" which is by default the publication date (the first release date) (https://guides.dataverse.org/en/latest/api/native-api.html?highlight=citationdate#set-citation-date-field-type-for-a-dataset). - -But for a harvested dataset, the distribution date is used instead and this date is not always present in the harvested metadata. With oai_dc format the date tag if used as production date. - -Now, the production date is used for harvested dataset in addition to distribution date. \ No newline at end of file diff --git a/doc/release-notes/8733-oai_dc-date.md b/doc/release-notes/8733-oai_dc-date.md deleted file mode 100644 index a2a09f361d3..00000000000 --- a/doc/release-notes/8733-oai_dc-date.md +++ /dev/null @@ -1,4 +0,0 @@ -For exports and harvesting in `oai_dc` format, if "Production Date" is not set, "Publication Date" is now used instead. This change is reflected in the [Dataverse 4+ Metadata Crosswalk][] linked from the [Appendix][] of the User Guide. - -[Dataverse 4+ Metadata Crosswalk]: https://docs.google.com/spreadsheets/d/10Luzti7svVTVKTA-px27oq3RxCUM-QbiTkm8iMd5C54/edit#gid=1901625433&range=K7 -[Appendix]: https://guides.dataverse.org/en/latest/user/appendix.html diff --git a/doc/release-notes/8838-cstr.md b/doc/release-notes/8838-cstr.md new file mode 100644 index 00000000000..d6bcd33f412 --- /dev/null +++ b/doc/release-notes/8838-cstr.md @@ -0,0 +1,13 @@ +### CSRT PID Types Added to Related Publication ID Type field + +A persistent identifier, [CSRT](https://www.cstr.cn/search/specification/), is added to the Related Publication field's ID Type child field. For datasets published with CSRT IDs, Dataverse will also include them in the datasets' Schema.org metadata exports. + +The CSRT + +### Required Upgrade Steps + +Update the Citation metadata block: + +- `wget https://github.com/IQSS/dataverse/releases/download/v#.##/citation.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` +- Add the updated citation.properties file to the appropriate directory \ No newline at end of file diff --git a/doc/release-notes/8944-metadatablocks.md b/doc/release-notes/8944-metadatablocks.md new file mode 100644 index 00000000000..35bb7808e59 --- /dev/null +++ b/doc/release-notes/8944-metadatablocks.md @@ -0,0 +1,5 @@ +The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: + +- `controlledVocabularyValues` - All possible values for fields with a controlled vocabulary. For example, the values "Agricultural Sciences", "Arts and Humanities", etc. for the "Subject" field. +- `isControlledVocabulary`: Whether or not this field has a controlled vocabulary. +- `multiple`: Whether or not the field supports multiple values. diff --git a/doc/release-notes/9117-file-type-detection.md b/doc/release-notes/9117-file-type-detection.md new file mode 100644 index 00000000000..462eaace8ed --- /dev/null +++ b/doc/release-notes/9117-file-type-detection.md @@ -0,0 +1,5 @@ +NetCDF and HDF5 files are now detected based on their content rather than just their file extension. + +Both "classic" NetCDF 3 files and more modern NetCDF 4 files are detected based on content. + +Detection for HDF4 files is only done through the file extension ".hdf", as before. diff --git a/doc/sphinx-guides/SphinxRSTCheatSheet.md b/doc/sphinx-guides/SphinxRSTCheatSheet.md index 1ccd293080c..300260cb5b1 100755 --- a/doc/sphinx-guides/SphinxRSTCheatSheet.md +++ b/doc/sphinx-guides/SphinxRSTCheatSheet.md @@ -10,7 +10,7 @@ RST Cheat Sheet for Sphinx v 1.2.2 | Bold text | **text** | | | Italics/emphasis | *text* | | | literal | ``literal`` | | -| Internal cross-reference link | See section 5.3.1 of Sphinx documentationand example below | See section 5.3.1 of Sphinx documentationand example below | +| Internal cross-reference link | See section 5.3.1 of Sphinx documentation and example below | See section 5.3.1 of Sphinx documentation and example below | | code block | .. code-block:: guess | Allows for code blocks to be displayed properly | For more cheats please visit the [RST cheat sheet google doc] (https://docs.google.com/document/d/105H3iwPwgnPqwuMJI7q-h6FLtXV_EUCiwq2P13lADgA/edit?usp=sharing) \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 61db5dfed93..fd1f0f27bc5 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -1,5 +1,5 @@ Tool Type Scope Description Data Explorer explore file A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse. Whole Tale explore dataset A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_. -File Previewers explore file A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, and GeoJSON - allowing them to be viewed without downloading. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers +File Previewers explore file A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, and ZipFiles - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers Data Curation Tool configure file A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions. diff --git a/doc/sphinx-guides/source/_static/api/add-license.json b/doc/sphinx-guides/source/_static/api/add-license.json index 969d6d58dab..a9d5dd34093 100644 --- a/doc/sphinx-guides/source/_static/api/add-license.json +++ b/doc/sphinx-guides/source/_static/api/add-license.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by/4.0", "shortDescription": "Creative Commons Attribution 4.0 International License.", "iconUrl": "https://i.creativecommons.org/l/by/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 2 } diff --git a/doc/sphinx-guides/source/_static/api/dataset-add-subject-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-add-subject-metadata.json index ea0922dadc8..c81c5b32aab 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-add-subject-metadata.json +++ b/doc/sphinx-guides/source/_static/api/dataset-add-subject-metadata.json @@ -2,7 +2,7 @@ "typeName": "subject", "value": ["Astronomy and Astrophysics", "Agricultural Sciences", -"Arts and Humanities", "Physics"] +"Arts and Humanities", "Physics", "Mathematical Sciences"] } diff --git a/doc/sphinx-guides/source/_static/api/ddi_dataset.xml b/doc/sphinx-guides/source/_static/api/ddi_dataset.xml index 05eaadc3458..014ebb8c581 100644 --- a/doc/sphinx-guides/source/_static/api/ddi_dataset.xml +++ b/doc/sphinx-guides/source/_static/api/ddi_dataset.xml @@ -88,12 +88,12 @@ 10 20 - 30 - 40 + 40 + 30 - 80 - 70 + 70 + 80 60 50 diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json index e30c067a86b..47413c8a625 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json @@ -12,8 +12,16 @@ "PID": "{datasetPid}" }, { - "apiToken": "{apiToken}" + "locale":"{localeCode}" } - ] + ], + "allowedApiCalls": [ + { + "name":"retrieveDatasetJson", + "httpMethod":"GET", + "urlTemplate":"/api/v1/datasets/{datasetId}", + "timeOut":10 + } + ] } } diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json index 14f71a280b3..1c132576099 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json @@ -1,6 +1,6 @@ { "displayName": "Fabulous File Tool", - "description": "Fabulous Fun for Files!", + "description": "A non-existent tool that is fabulous fun for files!", "toolName": "fabulous", "scope": "file", "types": [ @@ -9,13 +9,25 @@ ], "toolUrl": "https://fabulousfiletool.com", "contentType": "text/tab-separated-values", + "httpMethod":"GET", "toolParameters": { "queryParameters": [ { "fileid": "{fileId}" }, { - "key": "{apiToken}" + "datasetPid": "{datasetPid}" + }, + { + "locale":"{localeCode}" + } + ], + "allowedApiCalls": [ + { + "name":"retrieveDataFile", + "httpMethod":"GET", + "urlTemplate":"/api/v1/access/datafile/{fileId}", + "timeOut":270 } ] } diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst index c655d5af763..e94a6aa1730 100644 --- a/doc/sphinx-guides/source/admin/harvestclients.rst +++ b/doc/sphinx-guides/source/admin/harvestclients.rst @@ -21,6 +21,21 @@ Clients are managed on the "Harvesting Clients" page accessible via the :doc:`da The process of creating a new, or editing an existing client, is largely self-explanatory. It is split into logical steps, in a way that allows the user to go back and correct the entries made earlier. The process is interactive and guidance text is provided. For example, the user is required to enter the URL of the remote OAI server. When they click *Next*, the application will try to establish a connection to the server in order to verify that it is working, and to obtain the information about the sets of metadata records and the metadata formats it supports. The choices offered to the user on the next page will be based on this extra information. If the application fails to establish a connection to the remote archive at the address specified, or if an invalid response is received, the user is given an opportunity to check and correct the URL they entered. +How to Stop a Harvesting Run in Progress +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Some harvesting jobs, especially the initial full harvest of a very large set - such as the default set of public datasets at IQSS - can take many hours. In case it is necessary to terminate such a long-running job, the following mechanism is provided (note that it is only available to a sysadmin with shell access to the application server): Create an empty file in the domain logs directory with the following name: ``stopharvest_.``, where ```` is the nickname of the harvesting client and ```` is the process id of the Application Server (Payara). This flag file needs to be owned by the same user that's running Payara, so that the application can remove it after stopping the job in progress. + +For example: + +.. code-block:: bash + + sudo touch /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916 + sudo chown dataverse /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916 + +Note: If the application server is stopped and restarted, any running harvesting jobs will be killed but may remain marked as in progress in the database. We thus recommend using the mechanism here to stop ongoing harvests prior to a server restart. + + What if a Run Fails? ~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 5f7cf85f714..9fb8626d4c4 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -386,12 +386,16 @@ Metadata Block Setup Now that you understand the TSV format used for metadata blocks, the next step is to attempt to make improvements to existing metadata blocks or create entirely new metadata blocks. For either task, you should have a Dataverse Software development environment set up for testing where you can drop the database frequently while you make edits to TSV files. Once you have tested your TSV files, you should consider making a pull request to contribute your improvement back to the community. +.. _exploring-metadata-blocks: + Exploring Metadata Blocks ~~~~~~~~~~~~~~~~~~~~~~~~~ -In addition to studying the TSV files themselves you might find the following highly experimental and subject-to-change API endpoints useful to understand the metadata blocks that have already been loaded into your Dataverse installation: +In addition to studying the TSV files themselves you will probably find the :ref:`metadata-blocks-api` API helpful in getting a structured dump of metadata blocks in JSON format. + +There are also a few older, highly experimental, and subject-to-change API endpoints under the "admin" API documented below but the public API above is preferred. -You can get a dump of metadata fields (yes, the output is odd, please open a issue) like this: +You can get a dump of metadata fields like this: ``curl http://localhost:8080/api/admin/datasetfield`` diff --git a/doc/sphinx-guides/source/admin/user-administration.rst b/doc/sphinx-guides/source/admin/user-administration.rst index 608a8ab2b72..a21263f6f17 100644 --- a/doc/sphinx-guides/source/admin/user-administration.rst +++ b/doc/sphinx-guides/source/admin/user-administration.rst @@ -57,9 +57,9 @@ See :ref:`deactivate-a-user` Confirm Email ------------- -A Dataverse installation encourages builtin/local users to verify their email address upon signup or email change so that sysadmins can be assured that users can be contacted. +A Dataverse installation encourages builtin/local users to verify their email address upon sign up or email change so that sysadmins can be assured that users can be contacted. -The app will send a standard welcome email with a URL the user can click, which, when activated, will store a ``lastconfirmed`` timestamp in the ``authenticateduser`` table of the database. Any time this is "null" for a user (immediately after signup and/or changing of their Dataverse installation email address), their current email on file is considered to not be verified. The link that is sent expires after a time (the default is 24 hours), but this is configurable by a superuser via the ``:MinutesUntilConfirmEmailTokenExpires`` config option. +The app will send a standard welcome email with a URL the user can click, which, when activated, will store a ``lastconfirmed`` timestamp in the ``authenticateduser`` table of the database. Any time this is "null" for a user (immediately after sign up and/or changing of their Dataverse installation email address), their current email on file is considered to not be verified. The link that is sent expires after a time (the default is 24 hours), but this is configurable by a superuser via the ``:MinutesUntilConfirmEmailTokenExpires`` config option. Should users' URL token expire, they will see a "Verify Email" button on the account information page to send another URL. diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index d72a6f62004..4f6c9a8015c 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -92,7 +92,9 @@ Terminology contentType File level tools operate on a specific **file type** (content type or MIME type such as "application/pdf") and this must be specified. Dataset level tools do not use contentType. - toolParameters **Query parameters** are supported and described below. + toolParameters **httpMethod**, **queryParameters**, and **allowedApiCalls** are supported and described below. + + httpMethod Either ``GET`` or ``POST``. queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org``. @@ -102,6 +104,16 @@ Terminology reserved words A **set of strings surrounded by curly braces** such as ``{fileId}`` or ``{datasetId}`` that will be inserted into query parameters. See the table below for a complete list. + allowedApiCalls An array of objects defining callbacks the tool is allowed to make to the Dataverse API. If the dataset or file being accessed is not public, the callback URLs will be signed to allow the tool access for a defined time. + + allowedApiCalls name A name the tool will use to identify this callback URL such as ``retrieveDataFile``. + + allowedApiCalls urlTemplate The relative URL for the callback using reserved words to indicate where values should by dynamically substituted such as ``/api/v1/datasets/{datasetId}``. + + allowedApiCalls httpMethod Which HTTP method the specified callback uses such as ``GET`` or ``POST``. + + allowedApiCalls timeOut For non-public datasets and datafiles, how many minutes the signed URLs given to the tool should be valid for. Must be an integer. + toolName A **name** of an external tool that is used to differentiate between external tools and also used in bundle.properties for localization in the Dataverse installation web interface. For example, the toolName for Data Explorer is ``explorer``. For the Data Curation Tool the toolName is ``dct``. This is an optional parameter in the manifest JSON file. =========================== ========== @@ -131,6 +143,25 @@ Reserved Words ``{localeCode}`` optional The code for the language ("en" for English, "fr" for French, etc.) that user has selected from the language toggle in a Dataverse installation. See also :ref:`i18n`. =========================== ========== =========== +.. _api-exttools-auth: + +Authorization Options ++++++++++++++++++++++ + +When called for datasets or data files that are not public (i.e. in a draft dataset or for a restricted file), external tools are allowed access via the user's credentials. This is accomplished by one of two mechanisms: + +* Signed URLs (more secure, recommended) + + - Configured via the ``allowedApiCalls`` section of the manifest. The tool will be provided with signed URLs allowing the specified access to the given dataset or datafile for the specified amount of time. The tool will not be able to access any other datasets or files the user may have access to and will not be able to make calls other than those specified. + - For tools invoked via a GET call, Dataverse will include a callback query parameter with a Base64 encoded value. The decoded value is a signed URL that can be called to retrieve a JSON response containing all of the queryParameters and allowedApiCalls specified in the manfiest. + - For tools invoked via POST, Dataverse will send a JSON body including the requested queryParameters and allowedApiCalls. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool. + +* API Token (deprecated, less secure, not recommended) + + - Configured via the ``queryParameters`` by including an ``{apiToken}`` value. When this is present Dataverse will send the user's apiToken to the tool. With the user's API token, the tool can perform any action via the Dataverse API that the user could. External tools configured via this method should be assessed for their trustworthiness. + - For tools invoked via GET, this will be done via a query parameter in the request URL which could be cached in the browser's history. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool. + - For tools invoked via POST, Dataverse will send a JSON body including the apiToken. + Internationalization of Your External Tool ++++++++++++++++++++++++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6d68d648cb3..76ca38fdc70 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -526,7 +526,7 @@ To create a dataset, you must supply a JSON file that contains at least the foll - Description Text - Subject -As a starting point, you can download :download:`dataset-finch1.json <../../../../scripts/search/tests/data/dataset-finch1.json>` and modify it to meet your needs. (:download:`dataset-create-new-all-default-fields.json <../../../../scripts/api/data/dataset-finch1_fr.json>` is a variant of this file that includes setting the metadata language (see :ref:`:MetadataLanguages`) to French (fr). In addition to this minimal example, you can download :download:`dataset-create-new-all-default-fields.json <../../../../scripts/api/data/dataset-create-new-all-default-fields.json>` which populates all of the metadata fields that ship with a Dataverse installation.) +As a starting point, you can download :download:`dataset-finch1.json <../../../../scripts/search/tests/data/dataset-finch1.json>` and modify it to meet your needs. (:download:`dataset-finch1_fr.json <../../../../scripts/api/data/dataset-finch1_fr.json>` is a variant of this file that includes setting the metadata language (see :ref:`:MetadataLanguages`) to French (fr). In addition to this minimal example, you can download :download:`dataset-create-new-all-default-fields.json <../../../../scripts/api/data/dataset-create-new-all-default-fields.json>` which populates all of the metadata fields that ship with a Dataverse installation.) The curl command below assumes you have kept the name "dataset-finch1.json" and that this file is in your current working directory. @@ -2029,6 +2029,24 @@ Archiving is an optional feature that may be configured for a Dataverse installa curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/:persistentId/$VERSION/archivalStatus?persistentId=$PERSISTENT_IDENTIFIER" +Get External Tool Parameters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call is intended as a callback that can be used by :doc:`/installation/external-tools` to retrieve signed Urls necessary for their interaction with Dataverse. +It can be called directly as well. + +The response is a JSON object described in the :doc:`/api/external-tools` section of the API guide. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV + export VERSION=1.0 + export TOOL_ID=1 + + + curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/toolparams/$TOOL_ID?persistentId=$PERSISTENT_IDENTIFIER" Files ----- @@ -2689,6 +2707,24 @@ Note the optional "limit" parameter. Without it, the API will attempt to populat By default, the admin API calls are blocked and can only be called from localhost. See more details in :ref:`:BlockedApiEndpoints <:BlockedApiEndpoints>` and :ref:`:BlockedApiPolicy <:BlockedApiPolicy>` settings in :doc:`/installation/config`. +Get External Tool Parameters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call is intended as a callback that can be used by :doc:`/installation/external-tools` to retrieve signed Urls necessary for their interaction with Dataverse. +It can be called directly as well. (Note that the required FILEMETADATA_ID is the "id" returned in the JSON response from the /api/files/$FILE_ID/metadata call.) + +The response is a JSON object described in the :doc:`/api/external-tools` section of the API guide. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export FILE_ID=3 + export FILEMETADATA_ID=1 + export TOOL_ID=1 + + curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/files/$FILE_ID/metadata/$FILEMETADATA_ID/toolparams/$TOOL_ID + Users Token Management ---------------------- @@ -2971,22 +3007,47 @@ The fully expanded example above (without environment variables) looks like this curl https://demo.dataverse.org/api/info/apiTermsOfUse +.. _metadata-blocks-api: + Metadata Blocks --------------- +See also :ref:`exploring-metadata-blocks`. + Show Info About All Metadata Blocks ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Lists brief info about all metadata blocks registered in the system:: +|CORS| Lists brief info about all metadata blocks registered in the system. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + + curl $SERVER_URL/api/metadatablocks + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/metadatablocks + curl https://demo.dataverse.org/api/metadatablocks Show Info About Single Metadata Block ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Return data about the block whose ``identifier`` is passed. ``identifier`` can either be the block's id, or its name:: +|CORS| Return data about the block whose ``identifier`` is passed, including allowed controlled vocabulary values. ``identifier`` can either be the block's database id, or its name (i.e. "citation"). + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export IDENTIFIER=citation + + curl $SERVER_URL/api/metadatablocks/$IDENTIFIER + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/metadatablocks/$identifier + curl https://demo.dataverse.org/api/metadatablocks/citation .. _Notifications: @@ -3200,6 +3261,147 @@ The fully expanded example above (without the environment variables) looks like Only users with superuser permissions may delete harvesting sets. +Managing Harvesting Clients +--------------------------- + +The following API can be used to create and manage "Harvesting Clients". A Harvesting Client is a configuration entry that allows your Dataverse installation to harvest and index metadata from a specific remote location, either regularly, on a configured schedule, or on a one-off basis. For more information, see the :doc:`/admin/harvestclients` section of the Admin Guide. + +List All Configured Harvesting Clients +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Shows all the Harvesting Clients configured:: + + GET http://$SERVER/api/harvest/clients/ + +Show a Specific Harvesting Client +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Shows a Harvesting Client with a defined nickname:: + + GET http://$SERVER/api/harvest/clients/$nickname + +.. code-block:: bash + + curl "http://localhost:8080/api/harvest/clients/myclient" + + { + "status":"OK", + { + "data": { + "lastDatasetsFailed": "22", + "lastDatasetsDeleted": "0", + "metadataFormat": "oai_dc", + "archiveDescription": "This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data.", + "archiveUrl": "https://dataverse.foo.edu", + "harvestUrl": "https://dataverse.foo.edu/oai", + "style": "dataverse", + "type": "oai", + "dataverseAlias": "fooData", + "nickName": "myClient", + "set": "fooSet", + "schedule": "none", + "status": "inActive", + "lastHarvest": "Thu Oct 13 14:48:57 EDT 2022", + "lastResult": "SUCCESS", + "lastSuccessful": "Thu Oct 13 14:48:57 EDT 2022", + "lastNonEmpty": "Thu Oct 13 14:48:57 EDT 2022", + "lastDatasetsHarvested": "137" + } + } + + +Create a Harvesting Client +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To create a new harvesting client:: + + POST http://$SERVER/api/harvest/clients/$nickname + +``nickName`` is the name identifying the new client. It should be alpha-numeric and may also contain -, _, or %, but no spaces. Must also be unique in the installation. + +You must supply a JSON file that describes the configuration, similarly to the output of the GET API above. The following fields are mandatory: + +- dataverseAlias: The alias of an existing collection where harvested datasets will be deposited +- harvestUrl: The URL of the remote OAI archive +- archiveUrl: The URL of the remote archive that will be used in the redirect links pointing back to the archival locations of the harvested records. It may or may not be on the same server as the harvestUrl above. If this OAI archive is another Dataverse installation, it will be the same URL as harvestUrl minus the "/oai". For example: https://demo.dataverse.org/ vs. https://demo.dataverse.org/oai +- metadataFormat: A supported metadata format. As of writing this the supported formats are "oai_dc", "oai_ddi" and "dataverse_json". + +The following optional fields are supported: + +- archiveDescription: What the name suggests. If not supplied, will default to "This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data." +- set: The OAI set on the remote server. If not supplied, will default to none, i.e., "harvest everything". +- style: Defaults to "default" - a generic OAI archive. (Make sure to use "dataverse" when configuring harvesting from another Dataverse installation). + +Generally, the API will accept the output of the GET version of the API for an existing client as valid input, but some fields will be ignored. For example, as of writing this there is no way to configure a harvesting schedule via this API. + +An example JSON file would look like this:: + + { + "nickName": "zenodo", + "dataverseAlias": "zenodoHarvested", + "harvestUrl": "https://zenodo.org/oai2d", + "archiveUrl": "https://zenodo.org", + "archiveDescription": "Moissonné depuis la collection LMOPS de l'entrepôt Zenodo. En cliquant sur ce jeu de données, vous serez redirigé vers Zenodo.", + "metadataFormat": "oai_dc", + "set": "user-lmops" + } + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=http://localhost:8080 + + curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/zenodo" --upload-file client.json + +The fully expanded example above (without the environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "http://localhost:8080/api/harvest/clients/zenodo" --upload-file "client.json" + + { + "status": "OK", + "data": { + "metadataFormat": "oai_dc", + "archiveDescription": "Moissonné depuis la collection LMOPS de l'entrepôt Zenodo. En cliquant sur ce jeu de données, vous serez redirigé vers Zenodo.", + "archiveUrl": "https://zenodo.org", + "harvestUrl": "https://zenodo.org/oai2d", + "style": "default", + "type": "oai", + "dataverseAlias": "zenodoHarvested", + "nickName": "zenodo", + "set": "user-lmops", + "schedule": "none", + "status": "inActive", + "lastHarvest": "N/A", + "lastSuccessful": "N/A", + "lastNonEmpty": "N/A", + "lastDatasetsHarvested": "N/A", + "lastDatasetsDeleted": "N/A" + } + } + +Only users with superuser permissions may create or configure harvesting clients. + +Modify a Harvesting Client +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Similar to the API above, using the same JSON format, but run on an existing client and using the PUT method instead of POST. + +Delete a Harvesting Client +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Self-explanatory: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "http://localhost:8080/api/harvest/clients/$nickName" + +Only users with superuser permissions may delete harvesting clients. + + PIDs ---- @@ -4015,7 +4217,7 @@ View the details of the standard license with the database ID specified in ``$ID curl $SERVER_URL/api/licenses/$ID -Superusers can add a new license by posting a JSON file adapted from this example :download:`add-license.json <../_static/api/add-license.json>`. The ``name`` and ``uri`` of the new license must be unique. If you are interested in adding a Creative Commons license, you are encouarged to use the JSON files under :ref:`adding-creative-commons-licenses`: +Superusers can add a new license by posting a JSON file adapted from this example :download:`add-license.json <../_static/api/add-license.json>`. The ``name`` and ``uri`` of the new license must be unique. Sort order field is mandatory. If you are interested in adding a Creative Commons license, you are encouarged to use the JSON files under :ref:`adding-creative-commons-licenses`: .. code-block:: bash @@ -4040,6 +4242,13 @@ Superusers can delete a license, provided it is not in use, by the license ``$ID .. code-block:: bash curl -X DELETE -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID + +Superusers can change the sorting order of a license specified by the license ``$ID``: + +.. code-block:: bash + + export SORT_ORDER=100 + curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:sortOrder/$SORT_ORDER List Dataset Templates ~~~~~~~~~~~~~~~~~~~~~~ @@ -4070,6 +4279,33 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -X DELETE https://demo.dataverse.org/api/admin/template/24 + +.. _api-native-signed-url: - - +Request Signed URL +~~~~~~~~~~~~~~~~~~ + +Dataverse has the ability to create signed URLs for it's API calls. +A signature, which is valid only for the specific API call and only for a specified duration, allows the call to proceed with the authentication of the specified user. +It is intended as an alternative to the use of an API key (which is valid for a long time period and can be used with any API call). +Signed URLs were developed to support External Tools but may be useful in other scenarios where Dataverse or a third-party tool needs to delegate limited access to another user or tool. +This API call allows a Dataverse superUser to generate a signed URL for such scenarios. +The JSON input parameter required is an object with the following keys: + +- ``url`` - the exact URL to sign, including api version number and all query parameters +- ``timeOut`` - how long in minutes the signature should be valid for, default is 10 minutes +- ``httpMethod`` - which HTTP method is required, default is GET +- ``user`` - the user identifier for the account associated with this signature, the default is the superuser making the call. The API call will succeed/fail based on whether the specified user has the required permissions. + +A curl example using allowing access to a dataset's metadata + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export API_KEY=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export JSON='{"url":"https://demo.dataverse.org/api/v1/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB","timeOut":5,"user":"alberteinstein"}' + + curl -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/requestSignedUrl + +Please see :ref:`dataverse.api.signature-secret` for the configuration option to add a shared secret, enabling extra +security. diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index d5e56543fb1..b941064f173 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -35,6 +35,8 @@ show_relevance boolean Whether or not to show details of which fields were ma show_facets boolean Whether or not to show facets that can be operated on by the "fq" parameter. False by default. See :ref:`advanced search example `. fq string A filter query on the search term. Multiple "fq" parameters can be used. See :ref:`advanced search example `. show_entity_ids boolean Whether or not to show the database IDs of the search results (for developer use). +geo_point string Latitude and longitude in the form ``geo_point=42.3,-71.1``. You must supply ``geo_radius`` as well. See also :ref:`geospatial-search`. +geo_radius string Radial distance in kilometers from ``geo_point`` (which must be supplied as well) such as ``geo_radius=1.5``. metadata_fields string Includes the requested fields for each dataset in the response. Multiple "metadata_fields" parameters can be used to include several fields. The value must be in the form "{metadata_block_name}:{field_name}" to include a specific field from a metadata block (see :ref:`example `) or "{metadata_field_set_name}:\*" to include all the fields for a metadata block (see :ref:`example `). "{field_name}" cannot be a subfield of a compound field. If "{field_name}" is a compound field, all subfields are included. =============== ======= =========== diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 880ed561720..590eee4bd9d 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -66,9 +66,9 @@ # built documents. # # The short X.Y version. -version = '5.12' +version = '5.12.1' # The full version, including alpha/beta/rc tags. -release = '5.12' +release = '5.12.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 53fc11a5915..55f5f550dd9 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -10,7 +10,7 @@ Introduction See :doc:`version-control` for background on our branching strategy. -The steps below describe making both normal releases and hotfix releases. +The steps below describe making both regular releases and hotfix releases. Write Release Notes ------------------- @@ -43,49 +43,110 @@ Increment the version number to the milestone (e.g. 5.10.1) in the following two - modules/dataverse-parent/pom.xml -> ```` -> ```` (e.g. `pom.xml commit `_) - doc/sphinx-guides/source/conf.py (two places, e.g. `conf.py commit `_) -Add the version being released to the lists in the following two files: +Add the version being released to the lists in the following file: - doc/sphinx-guides/source/versions.rst (e.g. `versions.rst commit `_) Check in the Changes Above into a Release Branch and Merge It ------------------------------------------------------------- -For any ordinary release, make the changes above in the release branch you created, make a pull request, and merge it into the "develop" branch. Like usual, you can safely delete the branch after the merge is complete. +For a regular release, make the changes above in the release branch you created, make a pull request, and merge it into the "develop" branch. Like usual, you can safely delete the branch after the merge is complete. If you are making a hotfix release, make the pull request against the "master" branch. Do not delete the branch after merging because we will later merge it into the "develop" branch to pick up the hotfix. More on this later. -Either way, as usual, you should ensure that all tests are passing. Please note that you might need to bump the version in `jenkins.yml `_ in dataverse-ansible to get the tests to run. +Either way, as usual, you should ensure that all tests are passing. Please note that you will need to bump the version in `jenkins.yml `_ in dataverse-ansible to get the tests to pass. Consider doing this before making the pull request. Alternatively, you can bump jenkins.yml after making the pull request and re-run the Jenkins job to make sure tests pass. Merge "develop" into "master" ----------------------------- -Note: If you are making a hotfix release, the "develop" branch is not involved so you can skip this step. +If this is a regular (non-hotfix) release, create a pull request to merge the "develop" branch into the "master" branch using this "compare" link: https://github.com/IQSS/dataverse/compare/master...develop -The "develop" branch should be merged into "master" before tagging. +Once important tests have passed (compile, unit tests, etc.), merge the pull request. Don't worry about style tests failing such as for shell scripts. + +If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later). + +Build the Guides for the Release +-------------------------------- + +Go to https://jenkins.dataverse.org/job/guides.dataverse.org/ and make the following adjustments to the config: + +- Repository URL: ``https://github.com/IQSS/dataverse.git`` +- Branch Specifier (blank for 'any'): ``*/master`` +- ``VERSION`` (under "Build Steps"): ``5.10.1`` (for example) + +Click "Save" then "Build Now". + +Make sure the guides directory appears in the expected location such as https://guides.dataverse.org/en/5.10.1/ + +As described below, we'll soon point the "latest" symlink to that new directory. Create a Draft Release on GitHub -------------------------------- -Create a draft release at https://github.com/IQSS/dataverse/releases/new +Go to https://github.com/IQSS/dataverse/releases/new to start creating a draft release. + +- Under "Choose a tag" you will be creating a new tag. Have it start with a "v" such as ``v5.10.1``. Click "Create new tag on publish". +- Under "Target" go to "Recent Commits" and select the merge commit from when you merged ``develop`` into ``master`` above. This commit will appear in ``/api/info/version`` from a running installation. +- Under "Release title" use the same name as the tag such as ``v5.10.1``. +- In the description, copy and paste the content from the release notes .md file created in the "Write Release Notes" steps above. +- Click "Save draft" because we do not want to publish the release yet. + +At this point you can send around the draft release for any final feedback. Links to the guides for this release should be working now, since you build them above. + +Make corrections to the draft, if necessary. It will be out of sync with the .md file, but that's ok (`#7988 `_ is tracking this). + +Run a Build to Create the War File +---------------------------------- -The "tag version" and "title" should be the number of the milestone with a "v" in front (i.e. v5.10.1). +ssh into the dataverse-internal server and undeploy the current war file. -Copy in the content from the .md file created in the "Write Release Notes" steps above. +Go to https://jenkins.dataverse.org/job/IQSS_Dataverse_Internal/ and make the following adjustments to the config: + +- Repository URL: ``https://github.com/IQSS/dataverse.git`` +- Branch Specifier (blank for 'any'): ``*/master`` +- Execute shell: Update version in filenames to ``dataverse-5.10.1.war`` (for example) + +Click "Save" then "Build Now". + +The build number will appear in ``/api/info/version`` (along with the commit mentioned above) from a running installation (e.g. ``{"version":"5.10.1","build":"907-b844672``). + +Build Installer (dvinstall.zip) +------------------------------- + +ssh into the dataverse-internal server and do the following: + +- In a git checkout of the dataverse source switch to the master branch and pull the latest. +- Copy the war file from the previous step to the ``target`` directory in the root of the repo (create it, if necessary). +- ``cd scripts/installer`` +- ``make`` + +A zip file called ``dvinstall.zip`` should be produced. Make Artifacts Available for Download ------------------------------------- Upload the following artifacts to the draft release you created: -- war file (``mvn package`` from Jenkins) -- installer (``cd scripts/installer && make``) -- other files as needed, such as updated Solr schema and config files +- the war file (e.g. ``dataverse-5.10.1.war``, from above) +- the installer (``dvinstall.zip``, from above) +- other files as needed: + + - updated Solr schema + - metadata block tsv files + - config files Publish the Release ------------------- Click the "Publish release" button. +Update Guides Link +------------------ + +"latest" at https://guides.dataverse.org/en/latest/ is a symlink to the directory with the latest release. That directory (e.g. ``5.10.1``) was put into place by the Jenkins "guides" job described above. + +ssh into the guides server and update the symlink to point to the latest release. + Close Milestone on GitHub and Create a New One ---------------------------------------------- @@ -115,7 +176,7 @@ For Hotfixes, Merge Hotfix Branch into "develop" and Rename SQL Scripts Note: this only applies to hotfixes! -We've merged the hotfix into the "master" branch but now we need the fixes (and version bump) in the "develop" branch. Make a new branch off the hotfix branch and create a pull request against develop. Merge conflicts are possible and this pull request should go through review and QA like normal. Afterwards it's fine to delete this branch and the hotfix brach that was merged into master. +We've merged the hotfix into the "master" branch but now we need the fixes (and version bump) in the "develop" branch. Make a new branch off the hotfix branch and create a pull request against develop. Merge conflicts are possible and this pull request should go through review and QA like normal. Afterwards it's fine to delete this branch and the hotfix branch that was merged into master. Because of the hotfix version, any SQL scripts in "develop" should be renamed (from "5.11.0" to "5.11.1" for example). To read more about our naming conventions for SQL scripts, see :doc:`sql-upgrade-scripts`. diff --git a/doc/sphinx-guides/source/developers/troubleshooting.rst b/doc/sphinx-guides/source/developers/troubleshooting.rst index 0463a68d8c8..832785f9860 100755 --- a/doc/sphinx-guides/source/developers/troubleshooting.rst +++ b/doc/sphinx-guides/source/developers/troubleshooting.rst @@ -41,7 +41,7 @@ This command helps verify what host your domain is using to send mail. Even if i 2. From the left-side panel, select **JavaMail Sessions** 3. You should see one session named **mail/notifyMailSession** -- click on that. -From this window you can modify certain fields of your Dataverse installation's notifyMailSession, which is the JavaMail session for outgoing system email (such as on user signup or data publication). Two of the most important fields we need are: +From this window you can modify certain fields of your Dataverse installation's notifyMailSession, which is the JavaMail session for outgoing system email (such as on user sign up or data publication). Two of the most important fields we need are: - **Mail Host:** The DNS name of the default mail server (e.g. smtp.gmail.com) - **Default User:** The username provided to your Mail Host when you connect to it (e.g. johndoe@gmail.com) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 3cdac253cb3..2c576b03989 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -580,8 +580,7 @@ Optionally, you may provide static credentials for each S3 storage using MicroPr - ``dataverse.files..access-key`` for this storage's "access key ID" - ``dataverse.files..secret-key`` for this storage's "secret access key" -You may provide the values for these via any of the -`supported config sources `_. +You may provide the values for these via any `supported MicroProfile Config API source`_. **WARNING:** @@ -700,6 +699,26 @@ Once you have configured a trusted remote store, you can point your users to the =========================================== ================== ========================================================================== =================== +.. _temporary-file-storage: + +Temporary Upload File Storage ++++++++++++++++++++++++++++++ + +When uploading files via the API or Web UI, you need to be aware that multiple steps are involved to enable +features like ingest processing, transfer to a permanent storage, checking for duplicates, unzipping etc. + +All of these processes are triggered after finishing transfers over the wire and moving the data into a temporary +(configurable) location on disk at :ref:`${dataverse.files.directory} `\ ``/temp``. + +Before being moved there, + +- JSF Web UI uploads are stored at :ref:`${dataverse.files.uploads} `, defaulting to + ``/usr/local/payara5/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is + configurable and might be set to a separate disk volume where stale uploads are purged periodically. +- API uploads are stored at the system's temporary files location indicated by the Java system property + ``java.io.tmpdir``, defaulting to ``/tmp`` on Linux. If this location is backed by a `tmpfs `_ + on your machine, large file uploads via API will cause RAM and/or swap usage bursts. You might want to point this to + a different location, restrict maximum size of it, and monitor for stale uploads. .. _Branding Your Installation: @@ -800,7 +819,7 @@ Refer to :ref:`:NavbarSupportUrl` for setting to a fully-qualified URL which wil Sign Up ####### -Refer to :ref:`:SignUpUrl` and :ref:`conf-allow-signup` for setting a relative path URL to which users will be sent for signup and for controlling the ability for creating local user accounts. +Refer to :ref:`:SignUpUrl` and :ref:`conf-allow-signup` for setting a relative path URL to which users will be sent for sign up and for controlling the ability for creating local user accounts. Custom Header ^^^^^^^^^^^^^ @@ -1044,6 +1063,14 @@ On a new Dataverse installation, users may select from the following licenses or (Note that existing Dataverse installations which are upgraded from 5.9 or previous will only offer CC0 1.0, added automatically during the upgrade to version 5.10.) +If the Dataverse Installation supports multiple languages, the license name/description translations should be added to the ``License`` properties files. (See :ref:`i18n` for more on properties files and internationalization in general.) +To create the key, the license name has to be converted to lowercase, replace space with underscore. + +Example:: + + license.cc0_1.0.description=Creative Commons CC0 1.0 Universal Public Domain Dedication. + license.cc0_1.0.name=CC0 1.0 + You have a lot of control over which licenses and terms are available. You can remove licenses and add new ones. You can decide which license is the default. You can remove "Custom Dataset Terms" as a option. You can remove all licenses and make "Custom Dataset Terms" the only option. Before making changes, you are encouraged to read the :ref:`license-terms` section of the User Guide about why CC0 is the default and what the "Custom Dataset Terms" option allows. @@ -1092,6 +1119,29 @@ Disabling Custom Dataset Terms See :ref:`:AllowCustomTermsOfUse` for how to disable the "Custom Dataset Terms" option. +.. _ChangeLicenseSortOrder: + +Sorting licenses +---------------- + +The default order of licenses in the dropdown in the user interface is as follows: + +* The default license is shown first +* Followed by the remaining installed licenses in the order of installation +* The custom license is at the end + +Only the order of the installed licenses can be changed with the API calls. The default license always remains first and the custom license last. + +The order of licenses can be changed by setting the ``sortOrder`` property of a license. For the purpose of making sorting easier and to allow grouping of the licenses, ``sortOrder`` property does not have to be unique. Licenses with the same ``sortOrder`` are sorted by their ID, i.e., first by the sortOrder, then by the ID. Nevertheless, you can set a unique ``sortOrder`` for every license in order to sort them fully manually. + +The ``sortOrder`` is an whole number and is used to sort licenses in ascending fashion. + +Changing the sorting order of a license specified by the license ``$ID`` is done by superusers using the following API call: + +.. code-block:: bash + + export SORT_ORDER=100 + curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:sortOrder/$SORT_ORDER .. _BagIt File Handler: BagIt File Handler @@ -1412,11 +1462,27 @@ Note that it's also possible to use the ``dataverse.fqdn`` as a variable, if you We are absolutely aware that it's confusing to have both ``dataverse.fqdn`` and ``dataverse.siteUrl``. https://github.com/IQSS/dataverse/issues/6636 is about resolving this confusion. +.. _dataverse.files.directory: + dataverse.files.directory +++++++++++++++++++++++++ This is how you configure the path Dataverse uses for temporary files. (File store specific dataverse.files.\.directory options set the permanent data storage locations.) +.. _dataverse.files.uploads: + +dataverse.files.uploads ++++++++++++++++++++++++ + +Configure a folder to store the incoming file stream during uploads (before transfering to `${dataverse.files.directory}/temp`). +Please also see :ref:`temporary-file-storage` for more details. +You can use an absolute path or a relative, which is relative to the application server domain directory. + +Defaults to ``./uploads``, which resolves to ``/usr/local/payara5/glassfish/domains/domain1/uploads`` in a default +installation. + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``. + dataverse.auth.password-reset-timeout-in-minutes ++++++++++++++++++++++++++++++++++++++++++++++++ @@ -1670,6 +1736,39 @@ This setting is useful in cases such as running your Dataverse installation behi "HTTP_VIA", "REMOTE_ADDR" + +.. _dataverse.api.signature-secret: + +dataverse.api.signature-secret +++++++++++++++++++++++++++++++ + +Context: Dataverse has the ability to create "Signed URLs" for it's API calls. Using a signed URLs is more secure than +providing API tokens, which are long-lived and give the holder all of the permissions of the user. In contrast, signed URLs +are time limited and only allow the action of the API call in the URL. See :ref:`api-exttools-auth` and +:ref:`api-native-signed-url` for more details. + +The key used to sign a URL is created from the API token of the creating user plus a signature-secret provided by an administrator. +**Using a signature-secret is highly recommended.** This setting defaults to an empty string. Using a non-empty +signature-secret makes it impossible for someone who knows an API token from forging signed URLs and provides extra security by +making the overall signing key longer. + +Since the signature-secret is sensitive, you should treat it like a password. Here is an example how to set your shared secret +with the secure method "password alias": + +.. code-block:: shell + + echo "AS_ADMIN_ALIASPASSWORD=change-me-super-secret" > /tmp/password.txt + asadmin create-password-alias --passwordfile /tmp/password.txt dataverse.api.signature-secret + rm /tmp/password.txt + +Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable +``DATAVERSE_API_SIGNATURE_SECRET``. + +**WARNING:** For security, do not use the sources "environment variable" or "system property" (JVM option) in a +production context! Rely on password alias, secrets directory or cloud based sources instead! + + + .. _:ApplicationServerSettings: Application Server Settings @@ -2197,7 +2296,7 @@ If ``:SolrFullTextIndexing`` is set to true, the content of files of any size wi :SignUpUrl ++++++++++ -The relative path URL to which users will be sent for signup. The default setting is below. +The relative path URL to which users will be sent for sign up. The default setting is below. ``curl -X PUT -d '/dataverseuser.xhtml?editMode=CREATE' http://localhost:8080/api/admin/settings/:SignUpUrl`` @@ -2273,6 +2372,31 @@ Set to false to disallow local accounts from being created. See also the section ``curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:AllowSignUp`` +.. _:AllowRemoteAuthSignUp: + +:AllowRemoteAuthSignUp +++++++++++++++++++++++ + +This is a **compound** setting that enables or disables sign up for new accounts for individual OAuth2 authentication methods (such as Orcid, Google and GitHub). This way it is possible to continue allowing logins via an OAuth2 provider for already existing accounts, without letting new users create accounts with this method. + +By default, if the setting is not present, all configured OAuth sign ups are allowed. If the setting is present, but the value for this specific method is not specified, it is assumed that the sign ups are allowed for it. + +Examples: + +This curl command... + +``curl -X PUT -d '{"default":"false"}' http://localhost:8080/api/admin/settings/:AllowRemoteAuthSignUp`` + +...disables all OAuth sign ups. + +This curl command... + +``curl -X PUT -d '{"default":"true","google":"false"}' http://localhost:8080/api/admin/settings/:AllowRemoteAuthSignUp`` + +...keeps sign ups open for all the OAuth login providers except google. (That said, note that the ``"default":"true"`` part in this example is redundant, since it would default to true anyway for all the methods other than google.) + +See also :doc:`oauth2`. + :FileFixityChecksumAlgorithm ++++++++++++++++++++++++++++ @@ -3042,3 +3166,7 @@ The interval in seconds between Dataverse calls to Globus to check on upload pro +++++++++++++++++++++++++ A true/false option to add a Globus transfer option to the file download menu which is not yet fully supported in the dataverse-globus app. See :ref:`globus-support` for details. + + + +.. _supported MicroProfile Config API source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst index 0dfdb0393e0..8dffde87cc2 100644 --- a/doc/sphinx-guides/source/installation/oauth2.rst +++ b/doc/sphinx-guides/source/installation/oauth2.rst @@ -78,6 +78,11 @@ This template can be used for configuring this setting (**this is not something - :download:`orcid-sandbox.json <../_static/installation/files/root/auth-providers/orcid-sandbox.json>` +Disabling Sign Up +~~~~~~~~~~~~~~~~~ + +See :ref:`:AllowRemoteAuthSignUp`. + Converting Local Users to OAuth ------------------------------- diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst index 42e1a2b23d4..2e82a1482b4 100755 --- a/doc/sphinx-guides/source/user/find-use-data.rst +++ b/doc/sphinx-guides/source/user/find-use-data.rst @@ -39,6 +39,13 @@ enter search terms for Dataverse collections, dataset metadata (citation and dom metadata. If you are searching for tabular data files you can also search at the variable level for name and label. To find out more about what each field searches, hover over the field name for a detailed description of the field. +.. _geospatial-search: + +Geospatial Search +----------------- + +Geospatial search is available from the :doc:`/api/search` (look for "geo" parameters). The metadata fields that are geospatially indexed are "West Longitude", "East Longitude", "North Latitude", and "South Latitude" from the "Geographic Bounding Box" field in the "Geospatial Metadata" block. + Browsing a Dataverse Installation --------------------------------- diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index 1cbd785b5dd..e0a344de9a1 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -6,7 +6,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. -- 5.12 +- 5.12.1 +- `5.12 `__ - `5.11.1 `__ - `5.11 `__ - `5.10.1 `__ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index ccc0a9a7f60..bf37299f2df 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -129,7 +129,7 @@ - 5.12 + 5.12.1 11 UTF-8 @@ -163,7 +163,7 @@ 4.4.14 - 5.0.0-RC1 + 5.0.0-RC2 1.15.0 @@ -299,6 +299,11 @@ true + + unidata-all + Unidata All + https://artifacts.unidata.ucar.edu/repository/unidata-all/ + dvn.private Local repository for hosting jars not available from network repositories. diff --git a/pom.xml b/pom.xml index c6459cfc55c..8b6f98c5896 100644 --- a/pom.xml +++ b/pom.xml @@ -25,6 +25,7 @@ 0.8.7 5.2.1 2.4.1 + 5.5.3 org.junit.jupiter diff --git a/scripts/api/data/licenses/licenseCC-BY-4.0.json b/scripts/api/data/licenses/licenseCC-BY-4.0.json index 5596e65e947..59201b8d08e 100644 --- a/scripts/api/data/licenses/licenseCC-BY-4.0.json +++ b/scripts/api/data/licenses/licenseCC-BY-4.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by/4.0", "shortDescription": "Creative Commons Attribution 4.0 International License.", "iconUrl": "https://licensebuttons.net/l/by/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 2 } diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json index 8154c9ec5df..c19087664db 100644 --- a/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json +++ b/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by-nc/4.0", "shortDescription": "Creative Commons Attribution-NonCommercial 4.0 International License.", "iconUrl": "https://licensebuttons.net/l/by-nc/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 4 } diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json index 247ce52f6ea..2e374917d28 100644 --- a/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json +++ b/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by-nc-nd/4.0", "shortDescription": "Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.", "iconUrl": "https://licensebuttons.net/l/by-nc-nd/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 7 } diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json index e9726fb6374..5018884f65e 100644 --- a/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json +++ b/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by-nc-sa/4.0", "shortDescription": "Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.", "iconUrl": "https://licensebuttons.net/l/by-nc-sa/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 3 } diff --git a/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json b/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json index 7ae81bacc10..317d459a7ae 100644 --- a/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json +++ b/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by-nd/4.0", "shortDescription": "Creative Commons Attribution-NoDerivatives 4.0 International License.", "iconUrl": "https://licensebuttons.net/l/by-nd/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 6 } diff --git a/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json b/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json index e9a02880885..0d28c9423aa 100644 --- a/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json +++ b/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/licenses/by-sa/4.0", "shortDescription": "Creative Commons Attribution-ShareAlike 4.0 International License.", "iconUrl": "https://licensebuttons.net/l/by-sa/4.0/88x31.png", - "active": true + "active": true, + "sortOrder": 5 } diff --git a/scripts/api/data/licenses/licenseCC0-1.0.json b/scripts/api/data/licenses/licenseCC0-1.0.json index 396ba133327..216260a5de8 100644 --- a/scripts/api/data/licenses/licenseCC0-1.0.json +++ b/scripts/api/data/licenses/licenseCC0-1.0.json @@ -3,5 +3,6 @@ "uri": "http://creativecommons.org/publicdomain/zero/1.0", "shortDescription": "Creative Commons CC0 1.0 Universal Public Domain Dedication.", "iconUrl": "https://licensebuttons.net/p/zero/1.0/88x31.png", - "active": true + "active": true, + "sortOrder": 1 } diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index 29d121aae16..1b1ff0ae819 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -96,22 +96,23 @@ subject Other D12 13 publicationIDType ark 0 publicationIDType arXiv 1 - publicationIDType bibcode 2 - publicationIDType doi 3 - publicationIDType ean13 4 - publicationIDType eissn 5 - publicationIDType handle 6 - publicationIDType isbn 7 - publicationIDType issn 8 - publicationIDType istc 9 - publicationIDType lissn 10 - publicationIDType lsid 11 - publicationIDType pmid 12 - publicationIDType purl 13 - publicationIDType upc 14 - publicationIDType url 15 - publicationIDType urn 16 - publicationIDType DASH-NRS 17 + publicationIDType bibcode 2 + publicationIDType cstr 3 + publicationIDType doi 4 + publicationIDType ean13 5 + publicationIDType eissn 6 + publicationIDType handle 7 + publicationIDType isbn 8 + publicationIDType issn 9 + publicationIDType istc 10 + publicationIDType lissn 11 + publicationIDType lsid 12 + publicationIDType pmid 13 + publicationIDType purl 14 + publicationIDType upc 15 + publicationIDType url 16 + publicationIDType urn 17 + publicationIDType DASH-NRS 18 contributorType Data Collector 0 contributorType Data Curator 1 contributorType Data Manager 2 diff --git a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java index 5b6cdd23775..99c7951c96e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java @@ -97,12 +97,8 @@ public int getNumberOfConfiguredHarvestClients() { } public long getNumberOfHarvestedDatasets() { - List configuredHarvestingClients = harvestingClientService.getAllHarvestingClients(); - if (configuredHarvestingClients == null || configuredHarvestingClients.isEmpty()) { - return 0L; - } - Long numOfDatasets = harvestingClientService.getNumberOfHarvestedDatasetByClients(configuredHarvestingClients); + Long numOfDatasets = harvestingClientService.getNumberOfHarvestedDatasetsByAllClients(); if (numOfDatasets != null && numOfDatasets > 0L) { return numOfDatasets; @@ -142,7 +138,7 @@ public String getHarvestClientsInfoLabel() { infoLabel = configuredHarvestingClients.size() + " harvesting clients configured; "; } - Long numOfDatasets = harvestingClientService.getNumberOfHarvestedDatasetByClients(configuredHarvestingClients); + Long numOfDatasets = harvestingClientService.getNumberOfHarvestedDatasetsByAllClients(); if (numOfDatasets != null && numOfDatasets > 0L) { return infoLabel + numOfDatasets + " harvested datasets"; diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index a4f82d41bac..e91221ce36c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -391,19 +391,21 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f /** * The "edit version" is the most recent *draft* of a dataset, and if the - * latest version of a dataset is published, a new draft will be created. - * + * latest version of a dataset is published, a new draft will be created. If + * you don't want to create a new version, you should be using + * getLatestVersion. + * * @return The edit version {@code this}. */ - public DatasetVersion getEditVersion() { - return getEditVersion(null, null); + public DatasetVersion getOrCreateEditVersion() { + return getOrCreateEditVersion(null, null); } - public DatasetVersion getEditVersion(FileMetadata fm) { - return getEditVersion(null, fm); + public DatasetVersion getOrCreateEditVersion(FileMetadata fm) { + return getOrCreateEditVersion(null, fm); } - public DatasetVersion getEditVersion(Template template, FileMetadata fm) { + public DatasetVersion getOrCreateEditVersion(Template template, FileMetadata fm) { DatasetVersion latestVersion = this.getLatestVersion(); if (!latestVersion.isWorkingCopy() || template != null) { // if the latest version is released or archived, create a new version for editing diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 0a8db69bf5b..6e71f6c5042 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2067,7 +2067,7 @@ private String init(boolean initFull) { } //Initalize with the default if there is one dataset.setTemplate(selectedTemplate); - workingVersion = dataset.getEditVersion(selectedTemplate, null); + workingVersion = dataset.getOrCreateEditVersion(selectedTemplate, null); updateDatasetFieldInputLevels(); } else { workingVersion = dataset.getCreateVersion(licenseServiceBean.getDefault()); @@ -2401,7 +2401,7 @@ private void resetVersionUI() { AuthenticatedUser au = (AuthenticatedUser) session.getUser(); //On create set pre-populated fields - for (DatasetField dsf : dataset.getEditVersion().getDatasetFields()) { + for (DatasetField dsf : dataset.getOrCreateEditVersion().getDatasetFields()) { if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.depositor) && dsf.isEmpty()) { dsf.getDatasetFieldValues().get(0).setValue(au.getLastName() + ", " + au.getFirstName()); } @@ -2458,7 +2458,7 @@ private void refreshSelectedFiles(List filesToRefresh){ } String termsOfAccess = workingVersion.getTermsOfUseAndAccess().getTermsOfAccess(); boolean requestAccess = workingVersion.getTermsOfUseAndAccess().isFileAccessRequest(); - workingVersion = dataset.getEditVersion(); + workingVersion = dataset.getOrCreateEditVersion(); workingVersion.getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(requestAccess); List newSelectedFiles = new ArrayList<>(); @@ -2521,7 +2521,7 @@ public void edit(EditMode editMode) { if (this.readOnly) { dataset = datasetService.find(dataset.getId()); } - workingVersion = dataset.getEditVersion(); + workingVersion = dataset.getOrCreateEditVersion(); clone = workingVersion.cloneDatasetVersion(); if (editMode.equals(EditMode.METADATA)) { datasetVersionUI = datasetVersionUI.initDatasetVersionUI(workingVersion, true); @@ -3452,7 +3452,7 @@ private void deleteFiles(List filesToDelete) { if (markedForDelete.getId() != null) { // This FileMetadata has an id, i.e., it exists in the database. // We are going to remove this filemetadata from the version: - dataset.getEditVersion().getFileMetadatas().remove(markedForDelete); + dataset.getOrCreateEditVersion().getFileMetadatas().remove(markedForDelete); // But the actual delete will be handled inside the UpdateDatasetCommand // (called later on). The list "filesToBeDeleted" is passed to the // command as a parameter: @@ -3678,7 +3678,7 @@ public String save() { // have been created in the dataset. dataset = datasetService.find(dataset.getId()); - List filesAdded = ingestService.saveAndAddFilesToDataset(dataset.getEditVersion(), newFiles, null, true); + List filesAdded = ingestService.saveAndAddFilesToDataset(dataset.getOrCreateEditVersion(), newFiles, null, true); newFiles.clear(); // and another update command: diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 6cf294ffd6d..fc8df8681af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -539,7 +539,7 @@ public String init() { return permissionsWrapper.notFound(); } - workingVersion = dataset.getEditVersion(); + workingVersion = dataset.getOrCreateEditVersion(); //TODO: review if we we need this check; // as getEditVersion should either return the exisiting draft or create a new one @@ -890,7 +890,7 @@ private void deleteFiles(List filesForDelete) { // ToDo - FileMetadataUtil.removeFileMetadataFromList should handle these two // removes so they could be put after this if clause and the else clause could // be removed. - dataset.getEditVersion().getFileMetadatas().remove(markedForDelete); + dataset.getOrCreateEditVersion().getFileMetadatas().remove(markedForDelete); fileMetadatas.remove(markedForDelete); filesToBeDeleted.add(markedForDelete); @@ -907,7 +907,7 @@ private void deleteFiles(List filesForDelete) { // 1. delete the filemetadata from the local display list: FileMetadataUtil.removeFileMetadataFromList(fileMetadatas, markedForDelete); // 2. delete the filemetadata from the version: - FileMetadataUtil.removeFileMetadataFromList(dataset.getEditVersion().getFileMetadatas(), markedForDelete); + FileMetadataUtil.removeFileMetadataFromList(dataset.getOrCreateEditVersion().getFileMetadatas(), markedForDelete); } if (markedForDelete.getDataFile().getId() == null) { @@ -1201,7 +1201,7 @@ public String save() { */ } - workingVersion = dataset.getEditVersion(); + workingVersion = dataset.getOrCreateEditVersion(); logger.fine("working version id: " + workingVersion.getId()); if (FileEditMode.EDIT == mode && Referrer.FILE == referrer) { diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 7f2c6dfca5c..85eb79d2ddc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -365,7 +365,7 @@ public String saveProvFreeform(String freeformTextInput, DataFile dataFileFromPo file.setProvEntityName(dataFileFromPopup.getProvEntityName()); //passing this value into the file being saved here is pretty hacky. Command cmd; - for (FileMetadata fmw : editDataset.getEditVersion().getFileMetadatas()) { + for (FileMetadata fmw : editDataset.getOrCreateEditVersion().getFileMetadatas()) { if (fmw.getDataFile().equals(this.fileMetadata.getDataFile())) { cmd = new PersistProvFreeFormCommand(dvRequestService.getDataverseRequest(), file, freeformTextInput); commandEngine.submit(cmd); @@ -381,15 +381,15 @@ public String restrictFile(boolean restricted) throws CommandException{ String fileNames = null; editDataset = this.file.getOwner(); if (restricted) { // get values from access popup - editDataset.getEditVersion().getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); - editDataset.getEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); + editDataset.getOrCreateEditVersion().getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); + editDataset.getOrCreateEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); } //using this method to update the terms for datasets that are out of compliance // with Terms of Access requirement - may get her with a file that is already restricted // we'll allow it try { Command cmd; - for (FileMetadata fmw : editDataset.getEditVersion().getFileMetadatas()) { + for (FileMetadata fmw : editDataset.getOrCreateEditVersion().getFileMetadatas()) { if (fmw.getDataFile().equals(this.fileMetadata.getDataFile())) { fileNames += fmw.getLabel(); cmd = new RestrictFileCommand(fmw.getDataFile(), dvRequestService.getDataverseRequest(), restricted); @@ -424,7 +424,7 @@ public String deleteFile() { FileMetadata markedForDelete = null; - for (FileMetadata fmd : editDataset.getEditVersion().getFileMetadatas()) { + for (FileMetadata fmd : editDataset.getOrCreateEditVersion().getFileMetadatas()) { if (fmd.getDataFile().getId().equals(fileId)) { markedForDelete = fmd; @@ -435,17 +435,17 @@ public String deleteFile() { // the file already exists as part of this dataset // so all we remove is the file from the fileMetadatas (for display) // and let the delete be handled in the command (by adding it to the filesToBeDeleted list - editDataset.getEditVersion().getFileMetadatas().remove(markedForDelete); + editDataset.getOrCreateEditVersion().getFileMetadatas().remove(markedForDelete); filesToBeDeleted.add(markedForDelete); } else { List filesToKeep = new ArrayList<>(); - for (FileMetadata fmo : editDataset.getEditVersion().getFileMetadatas()) { + for (FileMetadata fmo : editDataset.getOrCreateEditVersion().getFileMetadatas()) { if (!fmo.getDataFile().getId().equals(this.getFile().getId())) { filesToKeep.add(fmo); } } - editDataset.getEditVersion().setFileMetadatas(filesToKeep); + editDataset.getOrCreateEditVersion().setFileMetadatas(filesToKeep); } fileDeleteInProgress = true; @@ -612,7 +612,7 @@ public void setTermsMet(boolean termsMet) { public String save() { // Validate - Set constraintViolations = editDataset.getEditVersion().validate(); + Set constraintViolations = editDataset.getOrCreateEditVersion().validate(); if (!constraintViolations.isEmpty()) { //JsfHelper.addFlashMessage(JH.localize("dataset.message.validationError")); fileDeleteInProgress = false; @@ -629,7 +629,7 @@ public String save() { if (!filesToBeDeleted.isEmpty()) { // We want to delete the file (there's always only one file with this page) - editDataset.getEditVersion().getFileMetadatas().remove(filesToBeDeleted.get(0)); + editDataset.getOrCreateEditVersion().getFileMetadatas().remove(filesToBeDeleted.get(0)); deleteFileId = filesToBeDeleted.get(0).getDataFile().getId(); deleteStorageLocation = datafileService.getPhysicalFileToDelete(filesToBeDeleted.get(0).getDataFile()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index ed9a544e726..e919ecf786d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -46,9 +46,11 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.locality.StorageSiteServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonParser; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; @@ -362,7 +364,7 @@ protected AuthenticatedUser findUserByApiToken( String apiKey ) { protected User findUserOrDie() throws WrappedResponse { final String requestApiKey = getRequestApiKey(); final String requestWFKey = getRequestWorkflowInvocationID(); - if (requestApiKey == null && requestWFKey == null) { + if (requestApiKey == null && requestWFKey == null && getRequestParameter(UrlSignerUtil.SIGNED_URL_TOKEN)==null) { return GuestUser.get(); } PrivateUrlUser privateUrlUser = privateUrlSvc.getPrivateUrlUserFromToken(requestApiKey); @@ -419,10 +421,36 @@ private AuthenticatedUser findAuthenticatedUserOrDie( String key, String wfid ) } else { throw new WrappedResponse(badWFKey(wfid)); } + } else if (getRequestParameter(UrlSignerUtil.SIGNED_URL_TOKEN) != null) { + AuthenticatedUser authUser = getAuthenticatedUserFromSignedUrl(); + if (authUser != null) { + return authUser; + } } //Just send info about the apiKey - workflow users will learn about invocationId elsewhere throw new WrappedResponse(badApiKey(null)); } + + private AuthenticatedUser getAuthenticatedUserFromSignedUrl() { + AuthenticatedUser authUser = null; + // The signedUrl contains a param telling which user this is supposed to be for. + // We don't trust this. So we lookup that user, and get their API key, and use + // that as a secret in validating the signedURL. If the signature can't be + // validated with their key, the user (or their API key) has been changed and + // we reject the request. + // ToDo - add null checks/ verify that calling methods catch things. + String user = httpRequest.getParameter("user"); + AuthenticatedUser targetUser = authSvc.getAuthenticatedUser(user); + String key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + + authSvc.findApiTokenByUser(targetUser).getTokenString(); + String signedUrl = httpRequest.getRequestURL().toString() + "?" + httpRequest.getQueryString(); + String method = httpRequest.getMethod(); + boolean validated = UrlSignerUtil.isValidUrl(signedUrl, user, method, key); + if (validated) { + authUser = targetUser; + } + return authUser; + } protected Dataverse findDataverseOrDie( String dvIdtf ) throws WrappedResponse { Dataverse dv = findDataverse(dvIdtf); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index abeedf23b59..75aa57a0d2b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1384,7 +1384,7 @@ public Response allowAccessRequest(@PathParam("id") String datasetToAllowAccessI return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args)); } - dataset.getEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(allowRequest); + dataset.getOrCreateEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(allowRequest); try { engineSvc.submit(new UpdateDatasetVersionCommand(dataset, dataverseRequest)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ef08444af69..2c147b94243 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -14,6 +14,7 @@ import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseSession; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.validation.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; @@ -34,6 +35,7 @@ import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibServiceBean; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibUtil; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailException; @@ -47,6 +49,7 @@ import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; +import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -90,6 +93,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteTemplateCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; +import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.userdata.UserListMaker; @@ -98,6 +102,7 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; import java.io.IOException; import java.io.OutputStream; @@ -1813,6 +1818,9 @@ public Response submitDatasetVersionToArchive(@PathParam("id") String dsid, Dataset ds = findDatasetOrDie(dsid); DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); + if(dv==null) { + return error(Status.BAD_REQUEST, "Requested version not found."); + } if (dv.getArchivalCopyLocation() == null) { String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); // Note - the user is being sent via the createDataverseRequest(au) call to the @@ -1858,7 +1866,7 @@ public void run() { return error(Status.BAD_REQUEST, "Version was already submitted for archiving."); } } catch (WrappedResponse e1) { - return error(Status.UNAUTHORIZED, "api key required"); + return e1.getResponse(); } } @@ -1949,7 +1957,7 @@ public void run() { return error(Status.BAD_REQUEST, "No unarchived published dataset versions found"); } } catch (WrappedResponse e1) { - return error(Status.UNAUTHORIZED, "api key required"); + return e1.getResponse(); } } @@ -2241,4 +2249,52 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon .collect(toJsonArray())); } + + @POST + @Consumes("application/json") + @Path("/requestSignedUrl") + public Response getSignedUrl(JsonObject urlInfo) { + AuthenticatedUser superuser = null; + try { + superuser = findAuthenticatedUserOrDie(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + if (superuser == null || !superuser.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Requesting signed URLs is restricted to superusers."); + } + + String userId = urlInfo.getString("user"); + String key=null; + if (userId != null) { + AuthenticatedUser user = authSvc.getAuthenticatedUser(userId); + // If a user param was sent, we sign the URL for them, otherwise on behalf of + // the superuser who made this api call + if (user != null) { + ApiToken apiToken = authSvc.findApiTokenByUser(user); + if (apiToken != null && !apiToken.isExpired() && !apiToken.isDisabled()) { + key = apiToken.getTokenString(); + } + } else { + userId = superuser.getUserIdentifier(); + // We ~know this exists - the superuser just used it and it was unexpired/not + // disabled. (ToDo - if we want this to work with workflow tokens (or as a + // signed URL), we should do more checking as for the user above)) + key = authSvc.findApiTokenByUser(superuser).getTokenString(); + } + if (key == null) { + return error(Response.Status.CONFLICT, "Do not have a valid user with apiToken"); + } + key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + key; + } + + String baseUrl = urlInfo.getString("url"); + int timeout = urlInfo.getInt(ExternalToolHandler.TIMEOUT, 10); + String method = urlInfo.getString(ExternalToolHandler.HTTP_METHOD, "GET"); + + String signedUrl = UrlSignerUtil.signUrl(baseUrl, timeout, userId, method, key); + + return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl)); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index aff543e643c..7695a00833e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -57,10 +57,11 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.externaltools.ExternalTool; +import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; - -import edu.harvard.iq.dataverse.S3PackageImporter; +import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; @@ -142,7 +143,6 @@ import javax.ws.rs.core.*; import javax.ws.rs.core.Response.Status; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import javax.ws.rs.core.UriInfo; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrServerException; @@ -630,7 +630,7 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, DatasetVersion managedVersion; if (updateDraft) { - final DatasetVersion editVersion = ds.getEditVersion(); + final DatasetVersion editVersion = ds.getOrCreateEditVersion(); editVersion.setDatasetFields(incomingVersion.getDatasetFields()); editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess()); editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion); @@ -639,7 +639,7 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid")); } Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); - managedVersion = managedDataset.getEditVersion(); + managedVersion = managedDataset.getOrCreateEditVersion(); } else { boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null); if (!hasValidTerms) { @@ -698,7 +698,7 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - DatasetVersion dsv = ds.getEditVersion(); + DatasetVersion dsv = ds.getOrCreateEditVersion(); boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -709,7 +709,7 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String DatasetVersion managedVersion; if (updateDraft) { Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); - managedVersion = managedDataset.getEditVersion(); + managedVersion = managedDataset.getOrCreateEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); } @@ -731,14 +731,14 @@ public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) { try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - DatasetVersion dsv = ds.getEditVersion(); + DatasetVersion dsv = ds.getOrCreateEditVersion(); boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; if (updateDraft) { Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); - managedVersion = managedDataset.getEditVersion(); + managedVersion = managedDataset.getOrCreateEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); } @@ -769,7 +769,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); - DatasetVersion dsv = ds.getEditVersion(); + DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); DatasetField singleField = null; @@ -882,7 +882,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion = updateDraft - ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion() + ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion() : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); return ok(json(managedVersion)); @@ -932,7 +932,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); - DatasetVersion dsv = ds.getEditVersion(); + DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); DatasetField singleField = null; @@ -986,6 +986,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque dsf.setSingleValue(""); dsf.setSingleControlledVocabularyValue(null); } + cvvDisplay=""; } if (updateField.getDatasetFieldType().isControlledVocabulary()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { @@ -1037,7 +1038,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque DatasetVersion managedVersion; if (updateDraft) { - managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion(); + managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); } @@ -3581,4 +3582,42 @@ private boolean isSingleVersionArchiving() { } return false; } + + // This method provides a callback for an external tool to retrieve it's + // parameters/api URLs. If the request is authenticated, e.g. by it being + // signed, the api URLs will be signed. If a guest request is made, the URLs + // will be plain/unsigned. + // This supports the cases where a tool is accessing a restricted resource (e.g. + // for a draft dataset), or public case. + @GET + @Path("{id}/versions/{version}/toolparams/{tid}") + public Response getExternalToolDVParams(@PathParam("tid") long externalToolId, + @PathParam("id") String datasetId, @PathParam("version") String version, @QueryParam(value = "locale") String locale) { + try { + DataverseRequest req = createDataverseRequest(findUserOrDie()); + DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null); + if (target == null) { + return error(BAD_REQUEST, "DatasetVersion not found."); + } + + ExternalTool externalTool = externalToolService.findById(externalToolId); + if(externalTool==null) { + return error(BAD_REQUEST, "External tool not found."); + } + if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) { + return error(BAD_REQUEST, "External tool does not have dataset scope."); + } + ApiToken apiToken = null; + User u = findUserOrDie(); + if (u instanceof AuthenticatedUser) { + apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) u); + } + + + ExternalToolHandler eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale); + return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())))); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index 01f627ea23b..2410da04072 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -217,37 +217,37 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] } } } - if (redirect_url_str!=null) { + } + if (redirect_url_str != null) { - logger.fine("Data Access API: redirect url: " + redirect_url_str); - URI redirect_uri; + logger.fine("Data Access API: redirect url: " + redirect_url_str); + URI redirect_uri; - try { - redirect_uri = new URI(redirect_url_str); - } catch (URISyntaxException ex) { - logger.info("Data Access API: failed to create redirect url (" + redirect_url_str + ")"); - redirect_uri = null; - } - if (redirect_uri != null) { - // increment the download count, if necessary: - if (di.getGbr() != null && !(isThumbnailDownload(di) || isPreprocessedMetadataDownload(di))) { - try { - logger.fine("writing guestbook response, for a download redirect."); - Command cmd = new CreateGuestbookResponseCommand(di.getDataverseRequestService().getDataverseRequest(), di.getGbr(), di.getGbr().getDataFile().getOwner()); - di.getCommand().submit(cmd); - MakeDataCountEntry entry = new MakeDataCountEntry(di.getRequestUriInfo(), di.getRequestHttpHeaders(), di.getDataverseRequestService(), di.getGbr().getDataFile()); - mdcLogService.logEntry(entry); - } catch (CommandException e) { - } + try { + redirect_uri = new URI(redirect_url_str); + } catch (URISyntaxException ex) { + logger.info("Data Access API: failed to create redirect url (" + redirect_url_str + ")"); + redirect_uri = null; + } + if (redirect_uri != null) { + // increment the download count, if necessary: + if (di.getGbr() != null && !(isThumbnailDownload(di) || isPreprocessedMetadataDownload(di))) { + try { + logger.fine("writing guestbook response, for a download redirect."); + Command cmd = new CreateGuestbookResponseCommand(di.getDataverseRequestService().getDataverseRequest(), di.getGbr(), di.getGbr().getDataFile().getOwner()); + di.getCommand().submit(cmd); + MakeDataCountEntry entry = new MakeDataCountEntry(di.getRequestUriInfo(), di.getRequestHttpHeaders(), di.getDataverseRequestService(), di.getGbr().getDataFile()); + mdcLogService.logEntry(entry); + } catch (CommandException e) { } - - // finally, issue the redirect: - Response response = Response.seeOther(redirect_uri).build(); - logger.fine("Issuing redirect to the file location."); - throw new RedirectionException(response); } - throw new ServiceUnavailableException(); + + // finally, issue the redirect: + Response response = Response.seeOther(redirect_uri).build(); + logger.fine("Issuing redirect to the file location."); + throw new RedirectionException(response); } + throw new ServiceUnavailableException(); } if (di.getConversionParam() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java index aef30bfb0c2..e53b54482b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 9dc0c3be524..af0f6be6d32 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -14,6 +14,7 @@ import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper; @@ -31,6 +32,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.export.ExportException; import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.externaltools.ExternalTool; +import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; @@ -40,6 +43,7 @@ import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.InputStream; import java.io.StringReader; @@ -388,7 +392,7 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData, } try { - DatasetVersion editVersion = df.getOwner().getEditVersion(); + DatasetVersion editVersion = df.getOwner().getOrCreateEditVersion(); //We get the new fileMetadata from the new version //This is because after generating the draft with getEditVersion, @@ -451,7 +455,8 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData, @GET @Path("{id}/metadata") public Response getFileMetadata(@PathParam("id") String fileIdOrPersistentId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, Boolean getDraft) throws WrappedResponse, Exception { - DataverseRequest req; + //ToDo - versionId is not used - can't get metadata for earlier versions + DataverseRequest req; try { req = createDataverseRequest(findUserOrDie()); } catch (Exception e) { @@ -639,4 +644,40 @@ private void exportDatasetMetadata(SettingsServiceBean settingsServiceBean, Data } } + // This method provides a callback for an external tool to retrieve it's + // parameters/api URLs. If the request is authenticated, e.g. by it being + // signed, the api URLs will be signed. If a guest request is made, the URLs + // will be plain/unsigned. + // This supports the cases where a tool is accessing a restricted resource (e.g. + // preview of a draft file), or public case. + @GET + @Path("{id}/metadata/{fmid}/toolparams/{tid}") + public Response getExternalToolFMParams(@PathParam("tid") long externalToolId, + @PathParam("id") String fileId, @PathParam("fmid") long fmid, @QueryParam(value = "locale") String locale) { + try { + ExternalTool externalTool = externalToolService.findById(externalToolId); + if(externalTool == null) { + return error(BAD_REQUEST, "External tool not found."); + } + if (!ExternalTool.Scope.FILE.equals(externalTool.getScope())) { + return error(BAD_REQUEST, "External tool does not have file scope."); + } + ApiToken apiToken = null; + User u = findUserOrDie(); + if (u instanceof AuthenticatedUser) { + apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) u); + } + FileMetadata target = fileSvc.findFileMetadata(fmid); + if (target == null) { + return error(BAD_REQUEST, "FileMetadata not found."); + } + + ExternalToolHandler eth = null; + + eth = new ExternalToolHandler(externalTool, target.getDataFile(), apiToken, target, locale); + return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())))); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index d17e76c499a..b75cb687c62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -5,12 +5,15 @@ import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetHarvestingClientCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand; import edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean; import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import javax.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; @@ -20,10 +23,10 @@ import java.util.List; import java.util.logging.Logger; import javax.ejb.EJB; -import javax.ejb.Stateless; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObject; +import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; @@ -32,13 +35,10 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; -@Stateless @Path("harvest/clients") public class HarvestingClients extends AbstractApiBean { - @EJB - DataverseServiceBean dataverseService; @EJB HarvesterServiceBean harvesterService; @EJB @@ -111,6 +111,10 @@ public Response harvestingClient(@PathParam("nickName") String nickName, @QueryP return error(Response.Status.NOT_FOUND, "Harvesting client " + nickName + " not found."); } + // See the comment in the harvestingClients() (plural) above for the explanation + // of why we are looking up the client twice (tl;dr: to utilize the + // authorization logic in the command) + HarvestingClient retrievedHarvestingClient = null; try { @@ -118,7 +122,7 @@ public Response harvestingClient(@PathParam("nickName") String nickName, @QueryP // exception, that already has a proper HTTP response in it. retrievedHarvestingClient = execCommand(new GetHarvestingClientCommand(createDataverseRequest(findUserOrDie()), harvestingClient)); - logger.info("retrieved Harvesting Client " + retrievedHarvestingClient.getName() + " with the GetHarvestingClient command."); + logger.fine("retrieved Harvesting Client " + retrievedHarvestingClient.getName() + " with the GetHarvestingClient command."); } catch (WrappedResponse wr) { return wr.getResponse(); } catch (Exception ex) { @@ -143,29 +147,76 @@ public Response harvestingClient(@PathParam("nickName") String nickName, @QueryP @POST @Path("{nickName}") public Response createHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException { - + // Per the discussion during the QA of PR #9174, we decided to make + // the create/edit APIs superuser-only (the delete API was already so) + try { + User u = findUserOrDie(); + if ((!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { + throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can create harvesting clients.")); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + try ( StringReader rdr = new StringReader(jsonBody) ) { JsonObject json = Json.createReader(rdr).readObject(); + // Check that the client with this name doesn't exist yet: + // (we could simply let the command fail, but that does not result + // in a pretty report to the end user) + + HarvestingClient lookedUpClient = null; + try { + lookedUpClient = harvestingClientService.findByNickname(nickName); + } catch (Exception ex) { + logger.warning("Exception caught looking up harvesting client " + nickName + ": " + ex.getMessage()); + // let's hope that this was a fluke of some kind; we'll proceed + // with the attempt to create a new client and report an error + // if that fails too. + } + + if (lookedUpClient != null) { + return error(Response.Status.BAD_REQUEST, "Harvesting client " + nickName + " already exists"); + } + HarvestingClient harvestingClient = new HarvestingClient(); - // TODO: check that it doesn't exist yet... - harvestingClient.setName(nickName); + String dataverseAlias = jsonParser().parseHarvestingClient(json, harvestingClient); - Dataverse ownerDataverse = dataverseService.findByAlias(dataverseAlias); + if (dataverseAlias == null) { + return error(Response.Status.BAD_REQUEST, "dataverseAlias must be supplied"); + } + + // Check if the dataverseAlias supplied is valid, i.e. corresponds + // to an existing dataverse (collection): + Dataverse ownerDataverse = dataverseSvc.findByAlias(dataverseAlias); if (ownerDataverse == null) { return error(Response.Status.BAD_REQUEST, "No such dataverse: " + dataverseAlias); } + // The nickname supplied as part of the Rest path takes precedence: + harvestingClient.setName(nickName); + + // Populate the description field, if none is supplied: + if (harvestingClient.getArchiveDescription() == null) { + harvestingClient.setArchiveDescription(BundleUtil.getStringFromBundle("harvestclients.viewEditDialog.archiveDescription.default.generic")); + } + + if (StringUtil.isEmpty(harvestingClient.getArchiveUrl()) + || StringUtil.isEmpty(harvestingClient.getHarvestingUrl()) + || StringUtil.isEmpty(harvestingClient.getMetadataPrefix())) { + return error(Response.Status.BAD_REQUEST, "Required fields harvestUrl, archiveUrl and metadataFormat must be supplied"); + } + harvestingClient.setDataverse(ownerDataverse); if (ownerDataverse.getHarvestingClientConfigs() == null) { ownerDataverse.setHarvestingClientConfigs(new ArrayList<>()); } ownerDataverse.getHarvestingClientConfigs().add(harvestingClient); - + DataverseRequest req = createDataverseRequest(findUserOrDie()); - HarvestingClient managedHarvestingClient = execCommand( new CreateHarvestingClientCommand(req, harvestingClient)); - return created( "/harvest/clients/" + nickName, harvestingConfigAsJson(managedHarvestingClient)); + harvestingClient = execCommand(new CreateHarvestingClientCommand(req, harvestingClient)); + return created( "/harvest/clients/" + nickName, harvestingConfigAsJson(harvestingClient)); } catch (JsonParseException ex) { return error( Response.Status.BAD_REQUEST, "Error parsing harvesting client: " + ex.getMessage() ); @@ -180,6 +231,15 @@ public Response createHarvestingClient(String jsonBody, @PathParam("nickName") S @PUT @Path("{nickName}") public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException { + try { + User u = findUserOrDie(); + if ((!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { + throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can modify harvesting clients.")); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + HarvestingClient harvestingClient = null; try { harvestingClient = harvestingClientService.findByNickname(nickName); @@ -198,15 +258,39 @@ public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") S DataverseRequest req = createDataverseRequest(findUserOrDie()); JsonObject json = Json.createReader(rdr).readObject(); - String newDataverseAlias = jsonParser().parseHarvestingClient(json, harvestingClient); + HarvestingClient newHarvestingClient = new HarvestingClient(); + String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient); if (newDataverseAlias != null && !newDataverseAlias.equals("") && !newDataverseAlias.equals(ownerDataverseAlias)) { return error(Response.Status.BAD_REQUEST, "Bad \"dataverseAlias\" supplied. Harvesting client "+nickName+" belongs to the dataverse "+ownerDataverseAlias); } - HarvestingClient managedHarvestingClient = execCommand( new UpdateHarvestingClientCommand(req, harvestingClient)); - return created( "/datasets/" + nickName, harvestingConfigAsJson(managedHarvestingClient)); + + // Go through the supported editable fields and update the client accordingly: + + if (newHarvestingClient.getHarvestingUrl() != null) { + harvestingClient.setHarvestingUrl(newHarvestingClient.getHarvestingUrl()); + } + if (newHarvestingClient.getHarvestingSet() != null) { + harvestingClient.setHarvestingSet(newHarvestingClient.getHarvestingSet()); + } + if (newHarvestingClient.getMetadataPrefix() != null) { + harvestingClient.setMetadataPrefix(newHarvestingClient.getMetadataPrefix()); + } + if (newHarvestingClient.getArchiveUrl() != null) { + harvestingClient.setArchiveUrl(newHarvestingClient.getArchiveUrl()); + } + if (newHarvestingClient.getArchiveDescription() != null) { + harvestingClient.setArchiveDescription(newHarvestingClient.getArchiveDescription()); + } + if (newHarvestingClient.getHarvestStyle() != null) { + harvestingClient.setHarvestStyle(newHarvestingClient.getHarvestStyle()); + } + // TODO: Make schedule configurable via this API too. + + harvestingClient = execCommand( new UpdateHarvestingClientCommand(req, harvestingClient)); + return ok( "/harvest/clients/" + nickName, harvestingConfigAsJson(harvestingClient)); } catch (JsonParseException ex) { return error( Response.Status.BAD_REQUEST, "Error parsing harvesting client: " + ex.getMessage() ); @@ -218,9 +302,58 @@ public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") S } - // TODO: - // add a @DELETE method - // (there is already a DeleteHarvestingClient command) + @DELETE + @Path("{nickName}") + public Response deleteHarvestingClient(@PathParam("nickName") String nickName) throws IOException { + // Deleting a client can take a while (if there's a large amnount of + // harvested content associated with it). So instead of calling the command + // directly, we will be calling an async. service bean method. + + + try { + User u = findUserOrDie(); + if ((!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { + throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete harvesting clients.")); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + HarvestingClient harvestingClient = null; + + try { + harvestingClient = harvestingClientService.findByNickname(nickName); + } catch (Exception ex) { + logger.warning("Exception caught looking up harvesting client " + nickName + ": " + ex.getMessage()); + return error( Response.Status.BAD_REQUEST, "Internal error: failed to look up harvesting client " + nickName); + } + + if (harvestingClient == null) { + return error(Response.Status.NOT_FOUND, "Harvesting client " + nickName + " not found."); + } + + // Check if the client is in a state where it can be safely deleted: + + if (harvestingClient.isDeleteInProgress()) { + return error( Response.Status.BAD_REQUEST, "Harvesting client " + nickName + " is already being deleted (in progress)"); + } + + if (harvestingClient.isHarvestingNow()) { + return error( Response.Status.BAD_REQUEST, "It is not safe to delete client " + nickName + " while a harvesting job is in progress"); + } + + // Finally, delete it (asynchronously): + + try { + harvestingClientService.deleteClient(harvestingClient.getId()); + } catch (Exception ex) { + return error( Response.Status.BAD_REQUEST, "Internal error: failed to delete harvesting client " + nickName); + } + + + return ok("Harvesting Client " + nickName + ": delete in progress"); + } + // Methods for managing harvesting runs (jobs): @@ -240,13 +373,13 @@ public Response startHarvestingJob(@PathParam("nickName") String clientNickname, } if (authenticatedUser == null || !authenticatedUser.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Only the Dataverse Admin user can run harvesting jobs"); + return error(Response.Status.FORBIDDEN, "Only admin users can run harvesting jobs"); } HarvestingClient harvestingClient = harvestingClientService.findByNickname(clientNickname); if (harvestingClient == null) { - return error(Response.Status.NOT_FOUND, "No such dataverse: "+clientNickname); + return error(Response.Status.NOT_FOUND, "No such client: "+clientNickname); } DataverseRequest dataverseRequest = createDataverseRequest(authenticatedUser); @@ -258,35 +391,8 @@ public Response startHarvestingJob(@PathParam("nickName") String clientNickname, return this.accepted(); } - // This GET shows the status of the harvesting run in progress for this - // client, if present: - // @GET - // @Path("{nickName}/run") - // TODO: - - // This DELETE kills the harvesting run in progress for this client, - // if present: - // @DELETE - // @Path("{nickName}/run") - // TODO: - - - - - /* Auxiliary, helper methods: */ - /* - @Deprecated - public static JsonArrayBuilder harvestingConfigsAsJsonArray(List harvestingDataverses) { - JsonArrayBuilder hdArr = Json.createArrayBuilder(); - - for (Dataverse hd : harvestingDataverses) { - hdArr.add(harvestingConfigAsJson(hd.getHarvestingClientConfig())); - } - return hdArr; - }*/ - public static JsonObjectBuilder harvestingConfigAsJson(HarvestingClient harvestingConfig) { if (harvestingConfig == null) { return null; @@ -296,6 +402,7 @@ public static JsonObjectBuilder harvestingConfigAsJson(HarvestingClient harvesti return jsonObjectBuilder().add("nickName", harvestingConfig.getName()). add("dataverseAlias", harvestingConfig.getDataverse().getAlias()). add("type", harvestingConfig.getHarvestType()). + add("style", harvestingConfig.getHarvestStyle()). add("harvestUrl", harvestingConfig.getHarvestingUrl()). add("archiveUrl", harvestingConfig.getArchiveUrl()). add("archiveDescription",harvestingConfig.getArchiveDescription()). diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java index 58e1f8cc2c5..1fdf7818cfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java @@ -146,6 +146,37 @@ public Response setActiveState(@PathParam("id") long id, @PathParam("activeState } } + @PUT + @Path("/{id}/:sortOrder/{sortOrder}") + public Response setSortOrder(@PathParam("id") long id, @PathParam("sortOrder") long sortOrder) { + User authenticatedUser; + try { + authenticatedUser = findAuthenticatedUserOrDie(); + if (!authenticatedUser.isSuperuser()) { + return error(Status.FORBIDDEN, "must be superuser"); + } + } catch (WrappedResponse e) { + return error(Status.UNAUTHORIZED, "api key required"); + } + try { + if (licenseSvc.setSortOrder(id, sortOrder) == 0) { + return error(Response.Status.NOT_FOUND, "License with ID " + id + " not found"); + } + License license = licenseSvc.getById(id); + actionLogSvc + .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "sortOrderLicenseChanged") + .setInfo("License " + license.getName() + "(" + license.getUri() + ") as id: " + id + + "has now sort order " + sortOrder + ".") + .setUserIdentifier(authenticatedUser.getIdentifier())); + return ok("License ID " + id + " sort order set to " + sortOrder); + } catch (WrappedResponse e) { + if (e.getCause() instanceof IllegalArgumentException) { + return badRequest(e.getCause().getMessage()); + } + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } + @DELETE @Path("/{id}") public Response deleteLicenseById(@PathParam("id") long id) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index 71cb59ff62a..cef509b1ec5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -72,6 +72,8 @@ public Response search( @QueryParam("show_my_data") boolean showMyData, @QueryParam("query_entities") boolean queryEntities, @QueryParam("metadata_fields") List metadataFields, + @QueryParam("geo_point") String geoPointRequested, + @QueryParam("geo_radius") String geoRadiusRequested, @Context HttpServletResponse response ) { @@ -87,6 +89,8 @@ public Response search( // sanity checking on user-supplied arguments SortBy sortBy; int numResultsPerPage; + String geoPoint; + String geoRadius; List dataverseSubtrees = new ArrayList<>(); try { @@ -119,6 +123,17 @@ public Response search( throw new IOException("Filter is empty, which should never happen, as this allows unfettered searching of our index"); } + geoPoint = getGeoPoint(geoPointRequested); + geoRadius = getGeoRadius(geoRadiusRequested); + + if (geoPoint != null && geoRadius == null) { + return error(Response.Status.BAD_REQUEST, "If you supply geo_point you must also supply geo_radius."); + } + + if (geoRadius != null && geoPoint == null) { + return error(Response.Status.BAD_REQUEST, "If you supply geo_radius you must also supply geo_point."); + } + } catch (Exception ex) { return error(Response.Status.BAD_REQUEST, ex.getLocalizedMessage()); } @@ -137,7 +152,9 @@ public Response search( paginationStart, dataRelatedToMe, numResultsPerPage, - true //SEK get query entities always for search API additional Dataset Information 6300 12/6/2019 + true, //SEK get query entities always for search API additional Dataset Information 6300 12/6/2019 + geoPoint, + geoRadius ); } catch (SearchException ex) { Throwable cause = ex; @@ -340,4 +357,12 @@ private Dataverse getSubtree(String alias) throws Exception { } } + private String getGeoPoint(String geoPointRequested) { + return SearchUtil.getGeoPoint(geoPointRequested); + } + + private String getGeoRadius(String geoRadiusRequested) { + return SearchUtil.getGeoRadius(geoRadiusRequested); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index d3b938af960..7568c7caff6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.Stateless; import javax.json.JsonArray; @@ -200,12 +201,17 @@ public Response getAuthenticatedUserByToken() { String tokenFromRequestAPI = getRequestApiKey(); AuthenticatedUser authenticatedUser = findUserByApiToken(tokenFromRequestAPI); + // This allows use of the :me API call from an active login session. Not sure + // this is a good idea if (authenticatedUser == null) { - return error(Response.Status.BAD_REQUEST, "User with token " + tokenFromRequestAPI + " not found."); - } else { - return ok(json(authenticatedUser)); + try { + authenticatedUser = findAuthenticatedUserOrDie(); + } catch (WrappedResponse ex) { + Logger.getLogger(Users.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "User with token " + tokenFromRequestAPI + " not found."); + } } - + return ok(json(authenticatedUser)); } @POST diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java index b6d75276ae1..6543d771ebe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java @@ -110,7 +110,7 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to create a dataset in this dataverse."); } - DatasetVersion newDatasetVersion = dataset.getEditVersion(); + DatasetVersion newDatasetVersion = dataset.getOrCreateEditVersion(); String foreignFormat = SwordUtil.DCTERMS; try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java index dc178a9a740..8fb55a8eaf6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java @@ -137,7 +137,7 @@ public DepositReceipt replaceMetadata(String uri, Deposit deposit, AuthCredentia if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias()); } - DatasetVersion datasetVersion = dataset.getEditVersion(); + DatasetVersion datasetVersion = dataset.getOrCreateEditVersion(); // erase all metadata before creating populating dataset version List emptyDatasetFields = new ArrayList<>(); datasetVersion.setDatasetFields(emptyDatasetFields); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 928ffd4a129..5491024c73c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -250,7 +250,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au // Make sure that the upload type is not rsync - handled above for dual mode // ------------------------------------- - if (dataset.getEditVersion().isHasPackageFile()) { + if (dataset.getOrCreateEditVersion().isHasPackageFile()) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")); } @@ -276,7 +276,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au } String uploadedZipFilename = deposit.getFilename(); - DatasetVersion editVersion = dataset.getEditVersion(); + DatasetVersion editVersion = dataset.getOrCreateEditVersion(); if (deposit.getInputStream() == null) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Deposit input stream was null."); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index b242cd2936f..d92ed78681b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -647,6 +647,8 @@ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecord actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "createUser") .setInfo(authenticatedUser.getIdentifier())); + + authenticatedUser.initialize(); return authenticatedUser; } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index 225352dec43..c5be41a014a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -1,9 +1,11 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.authorization.AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.ClockUtil; import edu.harvard.iq.dataverse.util.StringUtil; import java.io.BufferedReader; @@ -27,6 +29,8 @@ import static edu.harvard.iq.dataverse.util.StringUtil.toOption; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.text.MessageFormat; +import java.util.ArrayList; import org.omnifaces.util.Faces; /** @@ -45,6 +49,8 @@ public class OAuth2LoginBackingBean implements Serializable { private String responseBody; Optional redirectPage = Optional.empty(); private OAuth2Exception error; + private boolean disabled = false; + private boolean signUpDisabled = false; /** * TODO: Only used in exchangeCodeForToken(). Make local var in method. */ @@ -96,13 +102,28 @@ public void exchangeCodeForToken() throws IOException { AbstractOAuth2AuthenticationProvider idp = oIdp.get(); oauthUser = idp.getUserRecord(code.get(), systemConfig.getOAuth2CallbackUrl()); + // Throw an error if this authentication method is disabled: + // (it's not clear if it's possible at all, for somebody to get here with + // the provider really disabled; but, shouldn't hurt either). + if (isProviderDisabled(idp.getId())) { + disabled = true; + throw new OAuth2Exception(-1, "", MessageFormat.format(BundleUtil.getStringFromBundle("oauth2.callback.error.providerDisabled"), idp.getId())); + } + UserRecordIdentifier idtf = oauthUser.getUserRecordIdentifier(); AuthenticatedUser dvUser = authenticationSvc.lookupUser(idtf); if (dvUser == null) { - // need to create the user - newAccountPage.setNewUser(oauthUser); - Faces.redirect("/oauth2/firstLogin.xhtml"); + // Need to create a new user - unless signups are disabled + // for this authentication method; in which case, throw + // an error: + if (systemConfig.isSignupDisabledForRemoteAuthProvider(idp.getId())) { + signUpDisabled = true; + throw new OAuth2Exception(-1, "", MessageFormat.format(BundleUtil.getStringFromBundle("oauth2.callback.error.signupDisabledForProvider"), idp.getId())); + } else { + newAccountPage.setNewUser(oauthUser); + Faces.redirect("/oauth2/firstLogin.xhtml"); + } } else { // login the user and redirect to HOME of intended page (if any). @@ -271,4 +292,32 @@ public List getProviders() { public boolean isOAuth2ProvidersDefined() { return !authenticationSvc.getOAuth2Providers().isEmpty(); } + + public boolean isDisabled() { + return disabled; + } + + public boolean isSignUpDisabled() { + return signUpDisabled; + } + + private boolean isProviderDisabled(String providerId) { + // Compare this provider id against the list of *enabled* auth providers + // returned by the Authentication Service: + List idps = new ArrayList<>(authenticationSvc.getAuthenticationProviders()); + + // for the tests to work: + if (idps.isEmpty()) { + return false; + } + + for (AuthenticationProvider idp : idps) { + if (idp != null) { + if (providerId.equals(idp.getId())) { + return false; + } + } + } + return true; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 2cd28d9aac9..9fdfce2f1a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -148,7 +148,7 @@ void prePersist() { } @PostLoad - void postLoad() { + public void initialize() { mutedNotificationsSet = Type.tokenizeToSet(mutedNotifications); mutedEmailsSet = Type.tokenizeToSet(mutedEmails); } diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java index 6b82a665c17..3ae8ce9b883 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java @@ -190,7 +190,7 @@ public void beforeJob() throws Exception { // if mode = REPLACE, remove all filemetadata from the dataset version and start fresh if (mode.equalsIgnoreCase(ImportMode.REPLACE.name())) { try { - DatasetVersion workingVersion = dataset.getEditVersion(); + DatasetVersion workingVersion = dataset.getOrCreateEditVersion(); List fileMetadataList = workingVersion.getFileMetadatas(); jobLogger.log(Level.INFO, "Removing any existing file metadata since mode = REPLACE"); for (FileMetadata fmd : fileMetadataList) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 7683aab7dfa..f1785a42098 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -25,12 +25,8 @@ import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; +import java.util.*; import java.util.logging.Logger; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; import javax.imageio.ImageIO; import org.apache.commons.io.IOUtils; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; @@ -43,6 +39,7 @@ import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.EnumUtils; public class DatasetUtil { @@ -459,7 +456,7 @@ public static List getDatasetSummaryFields(DatasetVersion datasetV } public static boolean isRsyncAppropriateStorageDriver(Dataset dataset){ - // ToDo - rsync was written before multiple store support and currently is hardcoded to use the DataAccess.S3 store. + // ToDo - rsync was written before multiple store support and currently is hardcoded to use the DataAccess.S3 store. // When those restrictions are lifted/rsync can be configured per store, this test should check that setting // instead of testing for the 's3" store, //This method is used by both the dataset and edit files page so one change here @@ -551,7 +548,7 @@ public static License getLicense(DatasetVersion dsv) { public static String getLicenseName(DatasetVersion dsv) { License license = DatasetUtil.getLicense(dsv); - return license != null ? license.getName() + return license != null ? getLocalizedLicenseDetails(license,"NAME") : BundleUtil.getStringFromBundle("license.custom"); } @@ -577,7 +574,30 @@ public static String getLicenseIcon(DatasetVersion dsv) { public static String getLicenseDescription(DatasetVersion dsv) { License license = DatasetUtil.getLicense(dsv); - return license != null ? license.getShortDescription() : BundleUtil.getStringFromBundle("license.custom.description"); + return license != null ? getLocalizedLicenseDetails(license,"DESCRIPTION") : BundleUtil.getStringFromBundle("license.custom.description"); + } + + public enum LicenseOption { + NAME, DESCRIPTION + }; + + public static String getLocalizedLicenseDetails(License license,String keyPart) { + String licenseName = license.getName(); + String localizedLicenseValue = "" ; + try { + if (EnumUtils.isValidEnum(LicenseOption.class, keyPart ) ){ + String key = "license." + licenseName.toLowerCase().replace(" ", "_") + "." + keyPart.toLowerCase(); + localizedLicenseValue = BundleUtil.getStringFromPropertyFile(key, "License"); + } + } + catch (Exception e) { + localizedLicenseValue = licenseName; + } + + if (localizedLicenseValue == null) { + localizedLicenseValue = licenseName ; + } + return localizedLicenseValue; } public static String getLocaleExternalStatus(String status) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 8e7922fd83b..febbb249a91 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1200,7 +1200,7 @@ private boolean step_030_createNewFilesViaIngest(){ } // Load the working version of the Dataset - workingVersion = dataset.getEditVersion(); + workingVersion = dataset.getOrCreateEditVersion(); clone = workingVersion.cloneDatasetVersion(); try { CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, @@ -1805,7 +1805,7 @@ private void setNewlyAddedFiles(List datafiles){ newlyAddedFileMetadatas = new ArrayList<>(); // Loop of uglinesss...but expect 1 to 4 files in final file list - List latestFileMetadatas = dataset.getEditVersion().getFileMetadatas(); + List latestFileMetadatas = dataset.getOrCreateEditVersion().getFileMetadatas(); for (DataFile newlyAddedFile : finalFileList){ diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java index 534e07feaae..1efaf14c755 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java @@ -81,7 +81,7 @@ protected void additionalParameterTests(CommandContext ctxt) throws CommandExcep @Override protected DatasetVersion getVersionToPersist( Dataset theDataset ) { - return theDataset.getEditVersion(); + return theDataset.getOrCreateEditVersion(); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 772b6205b02..66ba00bcf55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -56,7 +56,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { DatasetVersion updateVersion = getDataset().getLatestVersionForCopy(); // Copy metadata from draft version to latest published version - updateVersion.setDatasetFields(getDataset().getEditVersion().initDatasetFields()); + updateVersion.setDatasetFields(getDataset().getOrCreateEditVersion().initDatasetFields()); validateOrDie(updateVersion, isValidateLenient()); @@ -68,14 +68,14 @@ public Dataset execute(CommandContext ctxt) throws CommandException { TermsOfUseAndAccess oldTerms = updateVersion.getTermsOfUseAndAccess(); - TermsOfUseAndAccess newTerms = getDataset().getEditVersion().getTermsOfUseAndAccess(); + TermsOfUseAndAccess newTerms = getDataset().getOrCreateEditVersion().getTermsOfUseAndAccess(); newTerms.setDatasetVersion(updateVersion); updateVersion.setTermsOfUseAndAccess(newTerms); //Put old terms on version that will be deleted.... - getDataset().getEditVersion().setTermsOfUseAndAccess(oldTerms); + getDataset().getOrCreateEditVersion().setTermsOfUseAndAccess(oldTerms); //Also set the fileaccessrequest boolean on the dataset to match the new terms getDataset().setFileAccessRequest(updateVersion.getTermsOfUseAndAccess().isFileAccessRequest()); - List newComments = getDataset().getEditVersion().getWorkflowComments(); + List newComments = getDataset().getOrCreateEditVersion().getWorkflowComments(); if (newComments!=null && newComments.size() >0) { for(WorkflowComment wfc: newComments) { wfc.setDatasetVersion(updateVersion); @@ -91,7 +91,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Look for file metadata changes and update published metadata if needed List pubFmds = updateVersion.getFileMetadatas(); int pubFileCount = pubFmds.size(); - int newFileCount = tempDataset.getEditVersion().getFileMetadatas().size(); + int newFileCount = tempDataset.getOrCreateEditVersion().getFileMetadatas().size(); /* The policy for this command is that it should only be used when the change is a 'minor update' with no file changes. * Nominally we could call .isMinorUpdate() for that but we're making the same checks as we go through the update here. */ @@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.em().remove(mergedFmd); // including removing metadata from the list on the datafile draftFmd.getDataFile().getFileMetadatas().remove(draftFmd); - tempDataset.getEditVersion().getFileMetadatas().remove(draftFmd); + tempDataset.getOrCreateEditVersion().getFileMetadatas().remove(draftFmd); // And any references in the list held by categories for (DataFileCategory cat : tempDataset.getCategories()) { cat.getFileMetadatas().remove(draftFmd); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java index 88b5a75ea22..7e32b19e576 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java @@ -24,7 +24,7 @@ public GetDraftDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffect @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - return ds.getEditVersion(); + return ds.getOrCreateEditVersion(); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PersistProvFreeFormCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PersistProvFreeFormCommand.java index aa06967675f..a258c36d6ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PersistProvFreeFormCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PersistProvFreeFormCommand.java @@ -36,7 +36,7 @@ public DataFile execute(CommandContext ctxt) throws CommandException { } else { Dataset dataset = dataFile.getOwner(); - DatasetVersion workingVersion = dataset.getEditVersion(); + DatasetVersion workingVersion = dataset.getOrCreateEditVersion(); if (workingVersion.isDraft()) { if (dataset.isReleased()){ diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommand.java index 16fa40cd8a7..38cbeaf3d66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommand.java @@ -63,7 +63,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { } else { Dataset dataset = file.getOwner(); - DatasetVersion workingVersion = dataset.getEditVersion(); + DatasetVersion workingVersion = dataset.getOrCreateEditVersion(); // We need the FileMetadata for the file in the draft dataset version and the // file we have may still reference the fmd from the prior released version FileMetadata draftFmd = file.getFileMetadata(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java index 169f6d790d3..ba0348f57d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java @@ -37,11 +37,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"), this); } - dataset.getEditVersion().setLastUpdateTime(getTimestamp()); + dataset.getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); dataset.setModificationTime(getTimestamp()); ctxt.engine().submit( new RemoveLockCommand(getRequest(), getDataset(), DatasetLock.Reason.InReview) ); - WorkflowComment workflowComment = new WorkflowComment(dataset.getEditVersion(), WorkflowComment.Type.RETURN_TO_AUTHOR, comment, (AuthenticatedUser) this.getUser()); + WorkflowComment workflowComment = new WorkflowComment(dataset.getOrCreateEditVersion(), WorkflowComment.Type.RETURN_TO_AUTHOR, comment, (AuthenticatedUser) this.getUser()); ctxt.datasets().addWorkflowComment(workflowComment); updateDatasetUser(ctxt); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java index c3a62a35bb3..72f0ef335fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java @@ -77,7 +77,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { public Dataset save(CommandContext ctxt) throws CommandException { - getDataset().getEditVersion().setLastUpdateTime(getTimestamp()); + getDataset().getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); getDataset().setModificationTime(getTimestamp()); Dataset savedDataset = ctxt.em().merge(getDataset()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java index e38f5bae8e0..130030798ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java @@ -51,7 +51,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { private Dataset save(CommandContext ctxt) throws CommandException { - getDataset().getEditVersion().setLastUpdateTime(getTimestamp()); + getDataset().getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); getDataset().setModificationTime(getTimestamp()); Dataset savedDataset = ctxt.em().merge(getDataset()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 227c54c598f..33f64f23076 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -64,7 +64,7 @@ public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest this.filesToDelete = new ArrayList<>(); this.clone = null; this.fmVarMet = null; - for (FileMetadata fmd : theDataset.getEditVersion().getFileMetadatas()) { + for (FileMetadata fmd : theDataset.getOrCreateEditVersion().getFileMetadatas()) { if (fmd.getDataFile().equals(fileToDelete)) { filesToDelete.add(fmd); break; @@ -114,10 +114,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException { logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", getDataset().getId()); } - getDataset().getEditVersion(fmVarMet).setDatasetFields(getDataset().getEditVersion(fmVarMet).initDatasetFields()); - validateOrDie(getDataset().getEditVersion(fmVarMet), isValidateLenient()); + getDataset().getOrCreateEditVersion(fmVarMet).setDatasetFields(getDataset().getOrCreateEditVersion(fmVarMet).initDatasetFields()); + validateOrDie(getDataset().getOrCreateEditVersion(fmVarMet), isValidateLenient()); - final DatasetVersion editVersion = getDataset().getEditVersion(fmVarMet); + final DatasetVersion editVersion = getDataset().getOrCreateEditVersion(fmVarMet); DatasetFieldUtil.tidyUpFields(editVersion.getDatasetFields(), true); @@ -204,10 +204,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // If the datasetversion doesn't match, we have the fmd from a published version // and we need to remove the one for the newly created draft instead, so we find // it here - logger.fine("Edit ver: " + theDataset.getEditVersion().getId()); + logger.fine("Edit ver: " + theDataset.getOrCreateEditVersion().getId()); logger.fine("fmd ver: " + fmd.getDatasetVersion().getId()); - if (!theDataset.getEditVersion().equals(fmd.getDatasetVersion())) { - fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getEditVersion()); + if (!theDataset.getOrCreateEditVersion().equals(fmd.getDatasetVersion())) { + fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getOrCreateEditVersion()); } } fmd = ctxt.em().merge(fmd); @@ -229,21 +229,21 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // In either case, to fully remove the fmd, we have to remove any other possible // references // From the datasetversion - FileMetadataUtil.removeFileMetadataFromList(theDataset.getEditVersion().getFileMetadatas(), fmd); + FileMetadataUtil.removeFileMetadataFromList(theDataset.getOrCreateEditVersion().getFileMetadatas(), fmd); // and from the list associated with each category for (DataFileCategory cat : theDataset.getCategories()) { FileMetadataUtil.removeFileMetadataFromList(cat.getFileMetadatas(), fmd); } } - for(FileMetadata fmd: theDataset.getEditVersion().getFileMetadatas()) { + for(FileMetadata fmd: theDataset.getOrCreateEditVersion().getFileMetadatas()) { logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version"); } if (recalculateUNF) { - ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getEditVersion()); + ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getOrCreateEditVersion()); } - theDataset.getEditVersion().setLastUpdateTime(getTimestamp()); + theDataset.getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); theDataset.setModificationTime(getTimestamp()); savedDataset = ctxt.em().merge(theDataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index 7f94b1bbbbf..1789b7a90c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -20,7 +20,6 @@ import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToMany; -import javax.persistence.Transient; /** * A specification or definition for how an external tool is intended to @@ -30,8 +29,6 @@ @Entity public class ExternalTool implements Serializable { - private static final Logger logger = Logger.getLogger(ExternalToolServiceBean.class.getCanonicalName()); - public static final String DISPLAY_NAME = "displayName"; public static final String DESCRIPTION = "description"; public static final String LEGACY_SINGLE_TYPE = "type"; @@ -41,6 +38,7 @@ public class ExternalTool implements Serializable { public static final String TOOL_PARAMETERS = "toolParameters"; public static final String CONTENT_TYPE = "contentType"; public static final String TOOL_NAME = "toolName"; + public static final String ALLOWED_API_CALLS = "allowedApiCalls"; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @@ -97,6 +95,14 @@ public class ExternalTool implements Serializable { @Column(nullable = true, columnDefinition = "TEXT") private String contentType; + /** + * Set of API calls the tool would like to be able to use (e,.g. for retrieving + * data through the Dataverse REST API). Used to build signedUrls for POST + * headers, as in DP Creator + */ + @Column(nullable = true, columnDefinition = "TEXT") + private String allowedApiCalls; + /** * This default constructor is only here to prevent this error at * deployment: @@ -112,6 +118,10 @@ public ExternalTool() { } public ExternalTool(String displayName, String toolName, String description, List externalToolTypes, Scope scope, String toolUrl, String toolParameters, String contentType) { + this(displayName, toolName, description, externalToolTypes, scope, toolUrl, toolParameters, contentType, null); + } + + public ExternalTool(String displayName, String toolName, String description, List externalToolTypes, Scope scope, String toolUrl, String toolParameters, String contentType, String allowedApiCalls) { this.displayName = displayName; this.toolName = toolName; this.description = description; @@ -120,6 +130,7 @@ public ExternalTool(String displayName, String toolName, String description, Lis this.toolUrl = toolUrl; this.toolParameters = toolParameters; this.contentType = contentType; + this.allowedApiCalls = allowedApiCalls; } public enum Type { @@ -273,6 +284,9 @@ public JsonObjectBuilder toJson() { if (getContentType() != null) { jab.add(CONTENT_TYPE, getContentType()); } + if (getAllowedApiCalls()!= null) { + jab.add(ALLOWED_API_CALLS,getAllowedApiCalls()); + } return jab; } @@ -298,5 +312,19 @@ public String getDisplayNameLang() { return displayName; } + /** + * @return the allowedApiCalls + */ + public String getAllowedApiCalls() { + return allowedApiCalls; + } + + /** + * @param allowedApiCalls the allowedApiCalls to set + */ + public void setAllowedApiCalls(String allowedApiCalls) { + this.allowedApiCalls = allowedApiCalls; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index 33d8c2d0d54..88a51017b75 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -4,16 +4,35 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.URLTokenUtil; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.List; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.Base64; +import java.util.Map.Entry; +import java.util.logging.Level; +import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonNumber; import javax.json.JsonObject; -import javax.json.JsonReader; +import javax.json.JsonObjectBuilder; +import javax.json.JsonString; +import javax.json.JsonValue; +import javax.ws.rs.HttpMethod; + +import org.apache.commons.codec.binary.StringUtils; /** * Handles an operation on a specific file. Requires a file id in order to be @@ -23,15 +42,26 @@ public class ExternalToolHandler extends URLTokenUtil { private final ExternalTool externalTool; + + private String requestMethod; + + public static final String HTTP_METHOD="httpMethod"; + public static final String TIMEOUT="timeOut"; + public static final String SIGNED_URL="signedUrl"; + public static final String NAME="name"; + public static final String URL_TEMPLATE="urlTemplate"; + + /** * File level tool * * @param externalTool The database entity. - * @param dataFile Required. - * @param apiToken The apiToken can be null because "explore" tools can be - * used anonymously. + * @param dataFile Required. + * @param apiToken The apiToken can be null because "explore" tools can be + * used anonymously. */ - public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken, FileMetadata fileMetadata, String localeCode) { + public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken, + FileMetadata fileMetadata, String localeCode) { super(dataFile, apiToken, fileMetadata, localeCode); this.externalTool = externalTool; } @@ -40,52 +70,168 @@ public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToke * Dataset level tool * * @param externalTool The database entity. - * @param dataset Required. - * @param apiToken The apiToken can be null because "explore" tools can be - * used anonymously. + * @param dataset Required. + * @param apiToken The apiToken can be null because "explore" tools can be + * used anonymously. */ public ExternalToolHandler(ExternalTool externalTool, Dataset dataset, ApiToken apiToken, String localeCode) { super(dataset, apiToken, localeCode); this.externalTool = externalTool; } - // TODO: rename to handleRequest() to someday handle sending headers as well as query parameters. - public String getQueryParametersForUrl() { - return getQueryParametersForUrl(false); + public String handleRequest() { + return handleRequest(false); } - - // TODO: rename to handleRequest() to someday handle sending headers as well as query parameters. - public String getQueryParametersForUrl(boolean preview) { - String toolParameters = externalTool.getToolParameters(); - JsonReader jsonReader = Json.createReader(new StringReader(toolParameters)); - JsonObject obj = jsonReader.readObject(); - JsonArray queryParams = obj.getJsonArray("queryParameters"); - if (queryParams == null || queryParams.isEmpty()) { - return ""; - } - List params = new ArrayList<>(); - queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> { - queryParam.keySet().forEach((key) -> { - String value = queryParam.getString(key); - String param = getQueryParam(key, value); - if (param != null && !param.isEmpty()) { - params.add(param); + + public String handleRequest(boolean preview) { + JsonObject toolParameters = JsonUtil.getJsonObject(externalTool.getToolParameters()); + JsonString method = toolParameters.getJsonString(HTTP_METHOD); + requestMethod = method != null ? method.getString() : HttpMethod.GET; + JsonObject params = getParams(toolParameters); + logger.fine("Found params: " + JsonUtil.prettyPrint(params)); + if (requestMethod.equals(HttpMethod.GET)) { + String paramsString = ""; + if (externalTool.getAllowedApiCalls() == null) { + // Legacy, using apiKey + logger.fine("Legacy Case"); + + for (Entry entry : params.entrySet()) { + paramsString = paramsString + (paramsString.isEmpty() ? "?" : "&") + entry.getKey() + "="; + JsonValue val = entry.getValue(); + if (val.getValueType().equals(JsonValue.ValueType.NUMBER)) { + paramsString += ((JsonNumber) val).intValue(); + } else { + paramsString += ((JsonString) val).getString(); + } } - }); - }); - if (!preview) { - return "?" + String.join("&", params); + } else { + //Send a signed callback to get params and signedURLs + String callback = null; + switch (externalTool.getScope()) { + case DATASET: + callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + case FILE: + callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" + + dataFile.getId() + "/metadata/" + fileMetadata.getId() + "/toolparams/" + + externalTool.getId(); + } + if (apiToken != null) { + callback = UrlSignerUtil.signUrl(callback, 5, apiToken.getAuthenticatedUser().getUserIdentifier(), HttpMethod.GET, + JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + apiToken.getTokenString()); + } + paramsString= "?callback=" + Base64.getEncoder().encodeToString(StringUtils.getBytesUtf8(callback)); + if (getLocaleCode() != null) { + paramsString += "&locale=" + getLocaleCode(); + } + } + if (preview) { + paramsString += "&preview=true"; + } + logger.fine("GET return is: " + paramsString); + return paramsString; + } else { - return "?" + String.join("&", params) + "&preview=true"; + // ToDo - if the allowedApiCalls() are defined, could/should we send them to + // tools using GET as well? + + if (requestMethod.equals(HttpMethod.POST)) { + String body = JsonUtil.prettyPrint(createPostBody(params).build()); + try { + logger.info("POST Body: " + body); + return postFormData(body); + } catch (IOException | InterruptedException ex) { + Logger.getLogger(ExternalToolHandler.class.getName()).log(Level.SEVERE, null, ex); + } + } } + return null; + } + + public JsonObject getParams(JsonObject toolParameters) { + //ToDo - why an array of object each with a single key/value pair instead of one object? + JsonArray queryParams = toolParameters.getJsonArray("queryParameters"); + + // ToDo return json and print later + JsonObjectBuilder paramsBuilder = Json.createObjectBuilder(); + if (!(queryParams == null) && !queryParams.isEmpty()) { + queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> { + queryParam.keySet().forEach((key) -> { + String value = queryParam.getString(key); + JsonValue param = getParam(value); + if (param != null) { + paramsBuilder.add(key, param); + } + }); + }); + } + return paramsBuilder.build(); + } + + public JsonObjectBuilder createPostBody(JsonObject params) { + JsonObjectBuilder bodyBuilder = Json.createObjectBuilder(); + bodyBuilder.add("queryParameters", params); + String apiCallStr = externalTool.getAllowedApiCalls(); + if (apiCallStr != null && !apiCallStr.isBlank()) { + JsonArray apiArray = JsonUtil.getJsonArray(externalTool.getAllowedApiCalls()); + JsonArrayBuilder apisBuilder = Json.createArrayBuilder(); + apiArray.getValuesAs(JsonObject.class).forEach(((apiObj) -> { + logger.fine(JsonUtil.prettyPrint(apiObj)); + String name = apiObj.getJsonString(NAME).getString(); + String httpmethod = apiObj.getJsonString(HTTP_METHOD).getString(); + int timeout = apiObj.getInt(TIMEOUT); + String urlTemplate = apiObj.getJsonString(URL_TEMPLATE).getString(); + logger.fine("URL Template: " + urlTemplate); + urlTemplate = SystemConfig.getDataverseSiteUrlStatic() + urlTemplate; + String apiPath = replaceTokensWithValues(urlTemplate); + logger.fine("URL WithTokens: " + apiPath); + String url = apiPath; + // Sign if apiToken exists, otherwise send unsigned URL (i.e. for guest users) + ApiToken apiToken = getApiToken(); + if (apiToken != null) { + url = UrlSignerUtil.signUrl(apiPath, timeout, apiToken.getAuthenticatedUser().getUserIdentifier(), + httpmethod, JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + + getApiToken().getTokenString()); + } + logger.fine("Signed URL: " + url); + apisBuilder.add(Json.createObjectBuilder().add(NAME, name).add(HTTP_METHOD, httpmethod) + .add(SIGNED_URL, url).add(TIMEOUT, timeout)); + })); + bodyBuilder.add("signedUrls", apisBuilder); + } + return bodyBuilder; + } + + private String postFormData(String allowedApis) throws IOException, InterruptedException { + String url = null; + HttpClient client = HttpClient.newHttpClient(); + HttpRequest request = HttpRequest.newBuilder().POST(HttpRequest.BodyPublishers.ofString(allowedApis)) + .uri(URI.create(externalTool.getToolUrl())).header("Content-Type", "application/json").build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + boolean redirect = false; + int status = response.statusCode(); + if (status != HttpURLConnection.HTTP_OK) { + if (status == HttpURLConnection.HTTP_MOVED_TEMP || status == HttpURLConnection.HTTP_MOVED_PERM + || status == HttpURLConnection.HTTP_SEE_OTHER) { + redirect = true; + } + } + if (redirect == true) { + String newUrl = response.headers().firstValue("location").get(); +// toolContext = "http://" + response.uri().getAuthority(); + + url = newUrl; + } + return url; } public String getToolUrlWithQueryParams() { - return externalTool.getToolUrl() + getQueryParametersForUrl(); + String params = ExternalToolHandler.this.handleRequest(); + return externalTool.getToolUrl() + params; } - + public String getToolUrlForPreviewMode() { - return externalTool.getToolUrl() + getQueryParametersForUrl(true); + return externalTool.getToolUrl() + handleRequest(true); } public ExternalTool getExternalTool() { @@ -97,9 +243,9 @@ public void setApiToken(ApiToken apiToken) { } /** - * @return Returns Javascript that opens the explore tool in a new browser - * tab if the browser allows it.If not, it shows an alert that popups must - * be enabled in the browser. + * @return Returns Javascript that opens the explore tool in a new browser tab + * if the browser allows it.If not, it shows an alert that popups must + * be enabled in the browser. */ public String getExploreScript() { String toolUrl = this.getToolUrlWithQueryParams(); diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java index d49d66c26f7..a65ad2427ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.externaltools.ExternalTool.Type; import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.URLTokenUtil.ReservedWord; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.externaltools.ExternalTool.Scope; import java.io.StringReader; @@ -151,8 +152,7 @@ public static ExternalTool parseAddExternalToolManifest(String manifest) { if (manifest == null || manifest.isEmpty()) { throw new IllegalArgumentException("External tool manifest was null or empty!"); } - JsonReader jsonReader = Json.createReader(new StringReader(manifest)); - JsonObject jsonObject = jsonReader.readObject(); + JsonObject jsonObject = JsonUtil.getJsonObject(manifest); //Note: ExternalToolServiceBeanTest tests are dependent on the order of these retrievals String displayName = getRequiredTopLevelField(jsonObject, DISPLAY_NAME); String toolName = getOptionalTopLevelField(jsonObject, TOOL_NAME); @@ -169,6 +169,8 @@ public static ExternalTool parseAddExternalToolManifest(String manifest) { String toolUrl = getRequiredTopLevelField(jsonObject, TOOL_URL); JsonObject toolParametersObj = jsonObject.getJsonObject(TOOL_PARAMETERS); JsonArray queryParams = toolParametersObj.getJsonArray("queryParameters"); + JsonArray allowedApiCallsArray = jsonObject.getJsonArray(ALLOWED_API_CALLS); + boolean allRequiredReservedWordsFound = false; if (scope.equals(Scope.FILE)) { List requiredReservedWordCandidates = new ArrayList<>(); @@ -221,8 +223,12 @@ public static ExternalTool parseAddExternalToolManifest(String manifest) { } String toolParameters = toolParametersObj.toString(); + String allowedApiCalls = null; + if(allowedApiCallsArray !=null) { + allowedApiCalls = allowedApiCallsArray.toString(); + } - return new ExternalTool(displayName, toolName, description, externalToolTypes, scope, toolUrl, toolParameters, contentType); + return new ExternalTool(displayName, toolName, description, externalToolTypes, scope, toolUrl, toolParameters, contentType, allowedApiCalls); } private static String getRequiredTopLevelField(JsonObject jsonObject, String key) { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java index 0dc94f835e9..50d06807a13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java @@ -40,12 +40,13 @@ public void setId(Long id) { this.id = id; } - public enum RunResultType { SUCCESS, FAILURE, INPROGRESS }; + public enum RunResultType { SUCCESS, FAILURE, INPROGRESS, INTERRUPTED }; private static String RESULT_LABEL_SUCCESS = "SUCCESS"; private static String RESULT_LABEL_FAILURE = "FAILED"; private static String RESULT_LABEL_INPROGRESS = "IN PROGRESS"; private static String RESULT_DELETE_IN_PROGRESS = "DELETE IN PROGRESS"; + private static String RESULT_LABEL_INTERRUPTED = "INTERRUPTED"; @ManyToOne @JoinColumn(nullable = false) @@ -76,6 +77,8 @@ public String getResultLabel() { return RESULT_LABEL_FAILURE; } else if (isInProgress()) { return RESULT_LABEL_INPROGRESS; + } else if (isInterrupted()) { + return RESULT_LABEL_INTERRUPTED; } return null; } @@ -84,8 +87,8 @@ public String getDetailedResultLabel() { if (harvestingClient != null && harvestingClient.isDeleteInProgress()) { return RESULT_DELETE_IN_PROGRESS; } - if (isSuccess()) { - String resultLabel = RESULT_LABEL_SUCCESS; + if (isSuccess() || isInterrupted()) { + String resultLabel = getResultLabel(); resultLabel = resultLabel.concat("; "+harvestedDatasetCount+" harvested, "); resultLabel = resultLabel.concat(deletedDatasetCount+" deleted, "); @@ -128,6 +131,14 @@ public void setInProgress() { harvestResult = RunResultType.INPROGRESS; } + public boolean isInterrupted() { + return RunResultType.INTERRUPTED == harvestResult; + } + + public void setInterrupted() { + harvestResult = RunResultType.INTERRUPTED; + } + // Time of this harvest attempt: @Temporal(value = TemporalType.TIMESTAMP) private Date startTime; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/FastGetRecord.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/FastGetRecord.java index 5b3e4df331d..c5e3a93e2df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/FastGetRecord.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/FastGetRecord.java @@ -130,7 +130,7 @@ public void harvestRecord(String baseURL, String identifier, String metadataPref int responseCode = 0; con = (HttpURLConnection) url.openConnection(); - con.setRequestProperty("User-Agent", "DataverseHarvester/3.0"); + con.setRequestProperty("User-Agent", "Dataverse Harvesting Client v5"); con.setRequestProperty("Accept-Encoding", "compress, gzip, identify"); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java index e7156dfe9aa..0e9ffb20653 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java @@ -48,6 +48,9 @@ import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.Path; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -85,6 +88,7 @@ public class HarvesterServiceBean { public static final String HARVEST_RESULT_FAILED="failed"; public static final String DATAVERSE_PROPRIETARY_METADATA_FORMAT="dataverse_json"; public static final String DATAVERSE_PROPRIETARY_METADATA_API="/api/datasets/export?exporter="+DATAVERSE_PROPRIETARY_METADATA_FORMAT+"&persistentId="; + public static final String DATAVERSE_HARVEST_STOP_FILE="../logs/stopharvest_"; public HarvesterServiceBean() { @@ -130,7 +134,7 @@ public List getHarvestTimers() { } /** - * Run a harvest for an individual harvesting Dataverse + * Run a harvest for an individual harvesting client * @param dataverseRequest * @param harvestingClientId * @throws IOException @@ -141,12 +145,9 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId if (harvestingClientConfig == null) { throw new IOException("No such harvesting client: id="+harvestingClientId); } - - Dataverse harvestingDataverse = harvestingClientConfig.getDataverse(); - - MutableBoolean harvestErrorOccurred = new MutableBoolean(false); + String logTimestamp = logFormatter.format(new Date()); - Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingDataverse.getAlias() + logTimestamp); + Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingClientConfig.getName() + logTimestamp); String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log"; FileHandler fileHandler = new FileHandler(logFileName); hdLogger.setUseParentHandlers(false); @@ -155,21 +156,15 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId PrintWriter importCleanupLog = new PrintWriter(new FileWriter( "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp+".txt")); - List harvestedDatasetIds = null; - - List harvestedDatasetIdsThisBatch = new ArrayList(); - - List failedIdentifiers = new ArrayList(); - List deletedIdentifiers = new ArrayList(); + List harvestedDatasetIds = new ArrayList<>(); + List failedIdentifiers = new ArrayList<>(); + List deletedIdentifiers = new ArrayList<>(); Date harvestStartTime = new Date(); try { - boolean harvestingNow = harvestingClientConfig.isHarvestingNow(); - - if (harvestingNow) { - harvestErrorOccurred.setValue(true); - hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + harvestingDataverse.getName() + " is currently being harvested."); + if (harvestingClientConfig.isHarvestingNow()) { + hdLogger.log(Level.SEVERE, "Cannot start harvest, client " + harvestingClientConfig.getName() + " is already harvesting."); } else { harvestingClientService.resetHarvestInProgress(harvestingClientId); @@ -177,7 +172,7 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId if (harvestingClientConfig.isOai()) { - harvestedDatasetIds = harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger, importCleanupLog, harvestErrorOccurred, failedIdentifiers, deletedIdentifiers, harvestedDatasetIdsThisBatch); + harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger, importCleanupLog, failedIdentifiers, deletedIdentifiers, harvestedDatasetIds); } else { throw new IOException("Unsupported harvest type"); @@ -187,18 +182,17 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId hdLogger.log(Level.INFO, "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: " + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size()); } + } catch (StopHarvestException she) { + hdLogger.log(Level.INFO, "HARVEST INTERRUPTED BY EXTERNAL REQUEST"); + harvestingClientService.setPartiallyCompleted(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); } catch (Throwable e) { - harvestErrorOccurred.setValue(true); + // Any other exception should be treated as a complete failure String message = "Exception processing harvest, server= " + harvestingClientConfig.getHarvestingUrl() + ",format=" + harvestingClientConfig.getMetadataPrefix() + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, message); logException(e, hdLogger); hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR."); - // TODO: - // even though this harvesting run failed, we may have had successfully - // processed some number of datasets, by the time the exception was thrown. - // We should record that number too. And the number of the datasets that - // had failed, that we may have counted. -- L.A. 4.4 - harvestingClientService.setHarvestFailure(harvestingClientId, new Date()); + + harvestingClientService.setHarvestFailure(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); } finally { harvestingClientService.resetHarvestInProgress(harvestingClientId); @@ -215,12 +209,11 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId * @param harvestErrorOccurred have we encountered any errors during harvest? * @param failedIdentifiers Study Identifiers for failed "GetRecord" requests */ - private List harvestOAI(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, Logger hdLogger, PrintWriter importCleanupLog, MutableBoolean harvestErrorOccurred, List failedIdentifiers, List deletedIdentifiers, List harvestedDatasetIdsThisBatch) - throws IOException, ParserConfigurationException, SAXException, TransformerException { + private void harvestOAI(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, Logger hdLogger, PrintWriter importCleanupLog, List failedIdentifiers, List deletedIdentifiers, List harvestedDatasetIds) + throws IOException, ParserConfigurationException, SAXException, TransformerException, StopHarvestException { logBeginOaiHarvest(hdLogger, harvestingClient); - List harvestedDatasetIds = new ArrayList(); OaiHandler oaiHandler; HttpClient httpClient = null; @@ -243,6 +236,10 @@ private List harvestOAI(DataverseRequest dataverseRequest, HarvestingClien try { for (Iterator
idIter = oaiHandler.runListIdentifiers(); idIter.hasNext();) { + // Before each iteration, check if this harvesting job needs to be aborted: + if (checkIfStoppingJob(harvestingClient)) { + throw new StopHarvestException("Harvesting stopped by external request"); + } Header h = idIter.next(); String identifier = h.getIdentifier(); @@ -265,18 +262,11 @@ private List harvestOAI(DataverseRequest dataverseRequest, HarvestingClien if (datasetId != null) { harvestedDatasetIds.add(datasetId); - - if ( harvestedDatasetIdsThisBatch == null ) { - harvestedDatasetIdsThisBatch = new ArrayList(); - } - harvestedDatasetIdsThisBatch.add(datasetId); - } if (getRecordErrorOccurred.booleanValue() == true) { failedIdentifiers.add(identifier); - harvestErrorOccurred.setValue(true); - //temporary: + //can be uncommented out for testing failure handling: //throw new IOException("Exception occured, stopping harvest"); } } @@ -286,8 +276,6 @@ private List harvestOAI(DataverseRequest dataverseRequest, HarvestingClien logCompletedOaiHarvest(hdLogger, harvestingClient); - return harvestedDatasetIds; - } private Long processRecord(DataverseRequest dataverseRequest, Logger hdLogger, PrintWriter importCleanupLog, OaiHandler oaiHandler, String identifier, MutableBoolean recordErrorOccurred, List deletedIdentifiers, Date dateStamp, HttpClient httpClient) { @@ -303,7 +291,7 @@ private Long processRecord(DataverseRequest dataverseRequest, Logger hdLogger, P // Make direct call to obtain the proprietary Dataverse metadata // in JSON from the remote Dataverse server: String metadataApiUrl = oaiHandler.getProprietaryDataverseMetadataURL(identifier); - logger.info("calling "+metadataApiUrl); + logger.fine("calling "+metadataApiUrl); tempFile = retrieveProprietaryDataverseMetadata(httpClient, metadataApiUrl); } else { @@ -372,7 +360,7 @@ File retrieveProprietaryDataverseMetadata (HttpClient client, String remoteApiUr HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(remoteApiUrl)) .GET() - .header("User-Agent", "DataverseHarvester/6.0") + .header("User-Agent", "Dataverse Harvesting Client v5") .build(); HttpResponse response; @@ -410,6 +398,26 @@ private void deleteHarvestedDatasetIfExists(String persistentIdentifier, Dataver } hdLogger.info("No dataset found for " + persistentIdentifier + ", skipping delete. "); } + + private boolean checkIfStoppingJob(HarvestingClient harvestingClient) { + Long pid = ProcessHandle.current().pid(); + String stopFileName = DATAVERSE_HARVEST_STOP_FILE + harvestingClient.getName() + "." + pid; + Path stopFilePath = Paths.get(stopFileName); + + if (Files.exists(stopFilePath)) { + // Now that we know that the file is there, let's (try to) delete it, + // so that the harvest can be re-run. + try { + Files.delete(stopFilePath); + } catch (IOException ioex) { + // No need to treat this is a big deal (could be a permission, etc.) + logger.warning("Failed to delete the flag file "+stopFileName + "; check permissions and delete manually."); + } + return true; + } + + return false; + } private void logBeginOaiHarvest(Logger hdLogger, HarvestingClient harvestingClient) { hdLogger.log(Level.INFO, "BEGIN HARVEST, oaiUrl=" diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index 32365e17852..aeb010fad6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -188,7 +188,9 @@ public String getHarvestingUrl() { } public void setHarvestingUrl(String harvestingUrl) { - this.harvestingUrl = harvestingUrl.trim(); + if (harvestingUrl != null) { + this.harvestingUrl = harvestingUrl.trim(); + } } private String archiveUrl; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 0af73550190..13cc44ce919 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -167,28 +167,20 @@ public void deleteClient(Long clientId) { @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void setHarvestSuccess(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { - HarvestingClient harvestingClient = em.find(HarvestingClient.class, hcId); - if (harvestingClient == null) { - return; - } - em.refresh(harvestingClient); - - ClientHarvestRun currentRun = harvestingClient.getLastRun(); - - if (currentRun != null && currentRun.isInProgress()) { - // TODO: what if there's no current run in progress? should we just - // give up quietly, or should we make a noise of some kind? -- L.A. 4.4 - - currentRun.setSuccess(); - currentRun.setFinishTime(currentTime); - currentRun.setHarvestedDatasetCount(new Long(harvestedCount)); - currentRun.setFailedDatasetCount(new Long(failedCount)); - currentRun.setDeletedDatasetCount(new Long(deletedCount)); - } + recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.SUCCESS); } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void setHarvestFailure(Long hcId, Date currentTime) { + public void setHarvestFailure(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { + recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.FAILURE); + } + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + public void setPartiallyCompleted(Long hcId, Date finishTime, int harvestedCount, int failedCount, int deletedCount) { + recordHarvestJobStatus(hcId, finishTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.INTERRUPTED); + } + + public void recordHarvestJobStatus(Long hcId, Date finishTime, int harvestedCount, int failedCount, int deletedCount, ClientHarvestRun.RunResultType result) { HarvestingClient harvestingClient = em.find(HarvestingClient.class, hcId); if (harvestingClient == null) { return; @@ -198,28 +190,40 @@ public void setHarvestFailure(Long hcId, Date currentTime) { ClientHarvestRun currentRun = harvestingClient.getLastRun(); if (currentRun != null && currentRun.isInProgress()) { - // TODO: what if there's no current run in progress? should we just - // give up quietly, or should we make a noise of some kind? -- L.A. 4.4 - currentRun.setFailed(); - currentRun.setFinishTime(currentTime); + currentRun.setResult(result); + currentRun.setFinishTime(finishTime); + currentRun.setHarvestedDatasetCount(Long.valueOf(harvestedCount)); + currentRun.setFailedDatasetCount(Long.valueOf(failedCount)); + currentRun.setDeletedDatasetCount(Long.valueOf(deletedCount)); } - } + } + + public Long getNumberOfHarvestedDatasetsByAllClients() { + try { + return (Long) em.createNativeQuery("SELECT count(d.id) FROM dataset d " + + " WHERE d.harvestingclient_id IS NOT NULL").getSingleResult(); + + } catch (Exception ex) { + logger.info("Warning: exception looking up the total number of harvested datasets: " + ex.getMessage()); + return 0L; + } + } public Long getNumberOfHarvestedDatasetByClients(List clients) { - String dvs = null; + String clientIds = null; for (HarvestingClient client: clients) { - if (dvs == null) { - dvs = client.getDataverse().getId().toString(); + if (clientIds == null) { + clientIds = client.getId().toString(); } else { - dvs = dvs.concat(","+client.getDataverse().getId().toString()); + clientIds = clientIds.concat(","+client.getId().toString()); } } try { - return (Long) em.createNativeQuery("SELECT count(d.id) FROM dataset d, " - + " dvobject o WHERE d.id = o.id AND o.owner_id in (" - + dvs + ")").getSingleResult(); + return (Long) em.createNativeQuery("SELECT count(d.id) FROM dataset d " + + " WHERE d.harvestingclient_id in (" + + clientIds + ")").getSingleResult(); } catch (Exception ex) { logger.info("Warning: exception trying to count harvested datasets by clients: " + ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/StopHarvestException.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/StopHarvestException.java new file mode 100644 index 00000000000..dffa2dd0385 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/StopHarvestException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.harvest.client; + +/** + * + * @author landreev + */ + +public class StopHarvestException extends Exception { + public StopHarvestException(String message) { + super(message); + } + + public StopHarvestException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index 5eacb1addb6..f778fd56644 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -96,9 +96,15 @@ public class OAIServlet extends HttpServlet { // be calling ListIdentifiers, and then making direct calls to the export // API of the remote Dataverse, to obtain the records in native json. This // is how we should have implemented this in the first place, really. + /* + SEK + per #3621 we are adding urls to the namespace and schema + These will not resolve presently. the change is so that the + xml produced by https://demo.dataverse.org/oai?verb=ListMetadataFormats will validate + */ private static final String DATAVERSE_EXTENDED_METADATA_FORMAT = "dataverse_json"; - private static final String DATAVERSE_EXTENDED_METADATA_NAMESPACE = "Custom Dataverse metadata in JSON format (Dataverse4 to Dataverse4 harvesting only)"; - private static final String DATAVERSE_EXTENDED_METADATA_SCHEMA = "JSON schema pending"; + private static final String DATAVERSE_EXTENDED_METADATA_NAMESPACE = "https://dataverse.org/schema/core"; + private static final String DATAVERSE_EXTENDED_METADATA_SCHEMA = "https://dataverse.org/schema/core.xsd"; private Context xoaiContext; private SetRepository setRepository; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java index faf3cf9ddc4..147d42648fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java @@ -49,7 +49,7 @@ public DataverseXoaiItemRepository (OAIRecordServiceBean recordService, DatasetS } @Override - public ItemIdentifier getItem(String identifier) throws IdDoesNotExistException { + public ItemIdentifier getItemIdentifier(String identifier) throws IdDoesNotExistException { // This method is called when ListMetadataFormats request specifies // the identifier, requesting the formats available for this specific record. // In our case, under the current implementation, we need to simply look diff --git a/src/main/java/edu/harvard/iq/dataverse/license/License.java b/src/main/java/edu/harvard/iq/dataverse/license/License.java index 96baacc6731..c6e2cdbc2e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/license/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/license/License.java @@ -23,9 +23,9 @@ */ @NamedQueries({ @NamedQuery( name="License.findAll", - query="SELECT l FROM License l ORDER BY (case when l.isDefault then 0 else 1 end), l.id asc"), + query="SELECT l FROM License l ORDER BY (case when l.isDefault then 0 else 1 end), l.sortOrder, l.id asc"), @NamedQuery( name="License.findAllActive", - query="SELECT l FROM License l WHERE l.active='true' ORDER BY (case when l.isDefault then 0 else 1 end), l.id asc"), + query="SELECT l FROM License l WHERE l.active='true' ORDER BY (case when l.isDefault then 0 else 1 end), l.sortOrder, l.id asc"), @NamedQuery( name="License.findById", query = "SELECT l FROM License l WHERE l.id=:id"), @NamedQuery( name="License.findDefault", @@ -42,6 +42,8 @@ query = "UPDATE License l SET l.isDefault='false'"), @NamedQuery( name="License.setActiveState", query = "UPDATE License l SET l.active=:state WHERE l.id=:id"), + @NamedQuery( name="License.setSortOrder", + query = "UPDATE License l SET l.sortOrder=:sortOrder WHERE l.id=:id"), }) @Entity @@ -73,6 +75,9 @@ public class License { @Column(nullable = false) private boolean isDefault; + + @Column(nullable = false, columnDefinition = "BIGINT NOT NULL DEFAULT 0") + private Long sortOrder; @OneToMany(mappedBy="license") private List termsOfUseAndAccess; @@ -80,7 +85,7 @@ public class License { public License() { } - public License(String name, String shortDescription, URI uri, URI iconUrl, boolean active) { + public License(String name, String shortDescription, URI uri, URI iconUrl, boolean active, Long sortOrder) { this.name = name; this.shortDescription = shortDescription; this.uri = uri.toASCIIString(); @@ -91,6 +96,7 @@ public License(String name, String shortDescription, URI uri, URI iconUrl, boole } this.active = active; isDefault = false; + this.sortOrder = sortOrder; } public Long getId() { @@ -172,17 +178,26 @@ public void setTermsOfUseAndAccess(List termsOfUseAndAccess this.termsOfUseAndAccess = termsOfUseAndAccess; } + public Long getSortOrder() { + return sortOrder; + } + + public void setSortOrder(Long sortOrder) { + this.sortOrder = sortOrder; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; License license = (License) o; - return active == license.active && id.equals(license.id) && name.equals(license.name) && shortDescription.equals(license.shortDescription) && uri.equals(license.uri) && Objects.equals(iconUrl, license.iconUrl); + return active == license.active && id.equals(license.id) && name.equals(license.name) && shortDescription.equals(license.shortDescription) && uri.equals(license.uri) && Objects.equals(iconUrl, license.iconUrl) + && Objects.equals(sortOrder, license.sortOrder); } @Override public int hashCode() { - return Objects.hash(id, name, shortDescription, uri, iconUrl, active); + return Objects.hash(id, name, shortDescription, uri, iconUrl, active, sortOrder); } @Override @@ -195,6 +210,7 @@ public String toString() { ", iconUrl=" + iconUrl + ", active=" + active + ", isDefault=" + isDefault + + ", sortOrder=" + sortOrder + '}'; } diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java index c18e168685a..b554fecd437 100644 --- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java @@ -93,11 +93,23 @@ public int setActive(Long id, boolean state) throws WrappedResponse { new IllegalArgumentException("License already " + (state ? "active" : "inactive")), null); } } + + public int setSortOrder(Long id, Long sortOrder) throws WrappedResponse { + License candidate = getById(id); + if (candidate == null) + return 0; + + return em.createNamedQuery("License.setSortOrder").setParameter("id", id).setParameter("sortOrder", sortOrder) + .executeUpdate(); + } public License save(License license) throws WrappedResponse { if (license.getId() != null) { throw new WrappedResponse(new IllegalArgumentException("There shouldn't be an ID in the request body"), null); } + if (license.getSortOrder() == null) { + throw new WrappedResponse(new IllegalArgumentException("There should be a sort order value in the request body"), null); + } try { em.persist(license); em.flush(); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 484e5768eb1..4661e9c1cd5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; @@ -37,6 +38,7 @@ import java.io.IOException; import java.io.InputStream; import java.sql.Timestamp; +import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.util.ArrayList; @@ -947,6 +949,70 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Float.parseFloat(westLon)) { + minWestLon=westLon; + } + if(maxEastLon==null || Float.parseFloat(maxEastLon) < Float.parseFloat(eastLon)) { + maxEastLon=eastLon; + } + if(minSouthLat==null || Float.parseFloat(minSouthLat) > Float.parseFloat(southLat)) { + minSouthLat=southLat; + } + if(maxNorthLat==null || Float.parseFloat(maxNorthLat) < Float.parseFloat(northLat)) { + maxNorthLat=northLat; + } + //W, E, N, S + solrInputDocument.addField(SearchFields.GEOLOCATION, "ENVELOPE(" + westLon + "," + eastLon + "," + northLat + "," + southLat + ")"); + } + } + //Only one bbox per dataset + //W, E, N, S + if ((minWestLon != null || maxEastLon != null) && (maxNorthLat != null || minSouthLat != null)) { + solrInputDocument.addField(SearchFields.BOUNDING_BOX, "ENVELOPE(" + minWestLon + "," + maxEastLon + "," + maxNorthLat + "," + minSouthLat + ")"); + } + + } } for(String metadataBlockName : metadataBlocksWithValue) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java index 2e75a81ed5f..f3d5f85121d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java @@ -268,4 +268,9 @@ more targeted results for just datasets. The format is YYYY (i.e. public static final String FULL_TEXT = "_text_"; public static final String EMBARGO_END_DATE = "embargoEndDate"; + // SpatialRecursivePrefixTreeFieldType: https://solr.apache.org/guide/8_11/spatial-search.html#rpt + public static final String GEOLOCATION = "geolocation"; + // BBoxField (bounding box): https://solr.apache.org/guide/8_11/spatial-search.html#bboxfield + public static final String BOUNDING_BOX = "boundingBox"; + } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 9bb83c88add..2b40347828a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -355,7 +355,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused DataverseRequest dataverseRequest = new DataverseRequest(session.getUser(), httpServletRequest); List dataverses = new ArrayList<>(); dataverses.add(dataverse); - solrQueryResponse = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false); + solrQueryResponse = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null); if (solrQueryResponse.hasError()){ logger.info(solrQueryResponse.getError()); setSolrErrorEncountered(true); @@ -363,7 +363,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused // This 2nd search() is for populating the "type" ("dataverse", "dataset", "file") facets: -- L.A. // (why exactly do we need it, again?) // To get the counts we display in the types facets particulary for unselected types - SEK 08/25/2021 - solrQueryResponseAllTypes = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalAllTypes, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false); + solrQueryResponseAllTypes = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalAllTypes, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null); if (solrQueryResponse.hasError()){ logger.info(solrQueryResponse.getError()); setSolrErrorEncountered(true); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index ca158198204..b87a334e938 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -100,7 +100,7 @@ public class SearchServiceBean { * @throws SearchException */ public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, String query, List filterQueries, String sortField, String sortOrder, int paginationStart, boolean onlyDatatRelatedToMe, int numResultsPerPage) throws SearchException { - return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDatatRelatedToMe, numResultsPerPage, true); + return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDatatRelatedToMe, numResultsPerPage, true, null, null); } /** @@ -121,10 +121,24 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, String query, List filterQueries, String sortField, String sortOrder, int paginationStart, boolean onlyDatatRelatedToMe, int numResultsPerPage, boolean retrieveEntities) throws SearchException { + public SolrQueryResponse search( + DataverseRequest dataverseRequest, + List dataverses, + String query, + List filterQueries, + String sortField, String sortOrder, + int paginationStart, + boolean onlyDatatRelatedToMe, + int numResultsPerPage, + boolean retrieveEntities, + String geoPoint, + String geoRadius + ) throws SearchException { if (paginationStart < 0) { throw new IllegalArgumentException("paginationStart must be 0 or greater"); @@ -204,8 +218,12 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List queryStrings, boolean isAnd, bo return queryBuilder.toString().trim(); } - + + /** + * @return Null if supplied point is null or whitespace. + * @throws IllegalArgumentException If the lat/long is not separated by a + * comma. + * @throws NumberFormatException If the lat/long values are not numbers. + */ + public static String getGeoPoint(String userSuppliedGeoPoint) throws IllegalArgumentException, NumberFormatException { + if (userSuppliedGeoPoint == null || userSuppliedGeoPoint.isBlank()) { + return null; + } + String[] parts = userSuppliedGeoPoint.split(","); + // We'll supply our own errors but Solr gives a decent one: + // "Point must be in 'lat, lon' or 'x y' format: 42.3;-71.1" + if (parts.length != 2) { + String msg = "Must contain a single comma to separate latitude and longitude."; + throw new IllegalArgumentException(msg); + } + float latitude = Float.parseFloat(parts[0]); + float longitude = Float.parseFloat(parts[1]); + return latitude + "," + longitude; + } + + /** + * @return Null if supplied radius is null or whitespace. + * @throws NumberFormatException If the radius is not a positive number. + */ + public static String getGeoRadius(String userSuppliedGeoRadius) throws NumberFormatException { + if (userSuppliedGeoRadius == null || userSuppliedGeoRadius.isBlank()) { + return null; + } + float radius = 0; + try { + radius = Float.parseFloat(userSuppliedGeoRadius); + } catch (NumberFormatException ex) { + String msg = "Non-number radius supplied."; + throw new NumberFormatException(msg); + } + if (radius <= 0) { + String msg = "The supplied radius must be greater than zero."; + throw new NumberFormatException(msg); + } + return userSuppliedGeoRadius; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java index a495842e40d..587e054dc4a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java @@ -266,7 +266,9 @@ private SolrQueryResponse findHits(SavedSearch savedSearch) throws SearchExcepti paginationStart, dataRelatedToMe, numResultsPerPage, - false // do not retrieve entities + false, // do not retrieve entities + null, + null ); return solrQueryResponse; } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 223e4b86da9..e409607346b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -42,6 +42,10 @@ public enum JvmSettings { VERSION(PREFIX, "version"), BUILD(PREFIX, "build"), + // API SETTINGS + SCOPE_API(PREFIX, "api"), + API_SIGNING_SECRET(SCOPE_API, "signing-secret"), + ; private static final String SCOPE_SEPARATOR = "."; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 50e29d2a333..102772bdcf3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -563,7 +563,11 @@ Whether Harvesting (OAI) service is enabled /* * Allow a custom JavaScript to control values of specific fields. */ - ControlledVocabularyCustomJavaScript + ControlledVocabularyCustomJavaScript, + /** + * A compound setting for disabling signup for remote Auth providers: + */ + AllowRemoteAuthSignUp ; @Override @@ -668,7 +672,39 @@ public Long getValueForCompoundKeyAsLong(Key key, String param){ } } + + /** + * Same, but with Booleans + * (returns null if not set; up to the calling method to decide what that should + * default to in each specific case) + * Example: + * :AllowRemoteAuthSignUp {"default":"true","google":"false"} + */ + public Boolean getValueForCompoundKeyAsBoolean(Key key, String param) { + + String val = this.getValueForKey(key); + + if (val == null) { + return null; + } + + try (StringReader rdr = new StringReader(val)) { + JsonObject settings = Json.createReader(rdr).readObject(); + if (settings.containsKey(param)) { + return Boolean.parseBoolean(settings.getString(param)); + } else if (settings.containsKey("default")) { + return Boolean.parseBoolean(settings.getString("default")); + } else { + return null; + } + + } catch (Exception e) { + logger.log(Level.WARNING, "Incorrect setting. Could not convert \"{0}\" from setting {1} to boolean: {2}", new Object[]{val, key.toString(), e.getMessage()}); + return null; + } + + } /** * Return the value stored, or the default value, in case no setting by that * name exists. The main difference between this method and the other {@code get()}s diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 339de904f9e..257bc166ea0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -108,6 +108,8 @@ import java.util.Arrays; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; +import ucar.nc2.NetcdfFile; +import ucar.nc2.NetcdfFiles; /** * a 4.0 implementation of the DVN FileUtil; @@ -467,6 +469,11 @@ public static String determineFileType(File f, String fileName) throws IOExcepti fileType = "application/fits"; } } + + // step 3: Check if NetCDF or HDF5 + if (fileType == null) { + fileType = checkNetcdfOrHdf5(f); + } // step 3: check the mime type of this file with Jhove if (fileType == null){ @@ -669,6 +676,43 @@ private static boolean isGraphMLFile(File file) { return isGraphML; } + public static String checkNetcdfOrHdf5(File file) { + try ( NetcdfFile netcdfFile = NetcdfFiles.open(file.getAbsolutePath())) { + if (netcdfFile == null) { + // Can't open as a NetCDF or HDF5 file. + return null; + } + String type = netcdfFile.getFileTypeId(); + if (type == null) { + return null; + } + switch (type) { + case "NetCDF": + return "application/netcdf"; + case "NetCDF-4": + return "application/netcdf"; + case "HDF5": + return "application/x-hdf5"; + default: + break; + } + } catch (IOException ex) { + /** + * When an HDF4 file is passed, it won't be detected. Instead, we've + * seen exceptions like this: + * + * ucar.nc2.internal.iosp.hdf4.H4header makeDimension WARNING: + * **dimension length=0 for TagVGroup= *refno=124 tag= VG (1965) + * Vgroup length=28 class= Dim0.0 name= ixx using data 123 + * + * java.lang.IllegalArgumentException: Dimension length =0 must be > + * 0 + */ + return null; + } + return null; + } + // from MD5Checksum.java public static String calculateChecksum(String datafile, ChecksumType checksumType) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index d64a1f7cce1..72980c3451a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -34,8 +34,12 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti List rootDvNameAsList = Arrays.asList(BrandingUtil.getInstallationBrandName()); String datasetDisplayName = ""; - if (objectOfNotification != null && (objectOfNotification instanceof Dataset) ) { - datasetDisplayName = ((Dataset)objectOfNotification).getDisplayName(); + if (objectOfNotification != null) { + if (objectOfNotification instanceof Dataset) { + datasetDisplayName = ((Dataset) objectOfNotification).getDisplayName(); + } else if (objectOfNotification instanceof DatasetVersion) { + datasetDisplayName = ((DatasetVersion) objectOfNotification).getDataset().getDisplayName(); + } } switch (userNotification.getType()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index fc7fd7beb06..f7743cd313a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1131,4 +1131,15 @@ public Map getCurationLabels() { } return labelMap; } + + public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { + Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); + + // we default to false - i.e., "not disabled" if the setting is not present: + if (ret == null) { + return false; + } + + return !ret; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index b3d5f9d6b74..4acf2d544e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -5,6 +5,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import javax.json.Json; +import javax.json.JsonValue; + import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.FileMetadata; @@ -95,12 +98,17 @@ public ApiToken getApiToken() { public String getLocaleCode() { return localeCode; } - - public String getQueryParam(String key, String value) { + + public JsonValue getParam(String value) { String tokenValue = null; tokenValue = getTokenValue(value); - if (tokenValue != null) { - return key + '=' + tokenValue; + if (tokenValue != null && !tokenValue.isBlank()) { + try{ + int x =Integer.parseInt(tokenValue); + return Json.createValue(x); + } catch (NumberFormatException nfe){ + return Json.createValue(tokenValue); + } } else { return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java index b11334520e6..29c4e8a6fb9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.util; +import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; import java.util.List; @@ -19,6 +20,10 @@ public class UrlSignerUtil { private static final Logger logger = Logger.getLogger(UrlSignerUtil.class.getName()); + public static final String SIGNED_URL_TOKEN="token"; + public static final String SIGNED_URL_METHOD="method"; + public static final String SIGNED_URL_USER="user"; + public static final String SIGNED_URL_UNTIL="until"; /** * * @param baseUrl - the URL to sign - cannot contain query params @@ -34,7 +39,7 @@ public class UrlSignerUtil { * @return - the signed URL */ public static String signUrl(String baseUrl, Integer timeout, String user, String method, String key) { - StringBuilder signedUrl = new StringBuilder(baseUrl); + StringBuilder signedUrlBuilder = new StringBuilder(baseUrl); boolean firstParam = true; if (baseUrl.contains("?")) { @@ -44,33 +49,33 @@ public static String signUrl(String baseUrl, Integer timeout, String user, Strin LocalDateTime validTime = LocalDateTime.now(); validTime = validTime.plusMinutes(timeout); validTime.toString(); - signedUrl.append(firstParam ? "?" : "&").append("until=").append(validTime); + signedUrlBuilder.append(firstParam ? "?" : "&").append(SIGNED_URL_UNTIL + "=").append(validTime); firstParam = false; } if (user != null) { - signedUrl.append(firstParam ? "?" : "&").append("user=").append(user); + signedUrlBuilder.append(firstParam ? "?" : "&").append(SIGNED_URL_USER + "=").append(user); firstParam = false; } if (method != null) { - signedUrl.append(firstParam ? "?" : "&").append("method=").append(method); + signedUrlBuilder.append(firstParam ? "?" : "&").append(SIGNED_URL_METHOD + "=").append(method); firstParam=false; } - signedUrl.append(firstParam ? "?" : "&").append("token="); - logger.fine("String to sign: " + signedUrl.toString() + ""); - signedUrl.append(DigestUtils.sha512Hex(signedUrl.toString() + key)); - logger.fine("Generated Signed URL: " + signedUrl.toString()); + signedUrlBuilder.append(firstParam ? "?" : "&").append(SIGNED_URL_TOKEN + "="); + logger.fine("String to sign: " + signedUrlBuilder.toString() + ""); + String signedUrl = signedUrlBuilder.toString(); + signedUrl= signedUrl + (DigestUtils.sha512Hex(signedUrl + key)); if (logger.isLoggable(Level.FINE)) { logger.fine( - "URL signature is " + (isValidUrl(signedUrl.toString(), user, method, key) ? "valid" : "invalid")); + "URL signature is " + (isValidUrl(signedUrl, user, method, key) ? "valid" : "invalid")); } - return signedUrl.toString(); + return signedUrl; } /** * This method will only return true if the URL and parameters except the * "token" are unchanged from the original/match the values sent to this method, * and the "token" parameter matches what this method recalculates using the - * shared key THe method also assures that the "until" timestamp is after the + * shared key. The method also assures that the "until" timestamp is after the * current time. * * @param signedUrl - the signed URL as received from Dataverse @@ -97,19 +102,19 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S String allowedMethod = null; String allowedUser = null; for (NameValuePair nvp : params) { - if (nvp.getName().equals("token")) { + if (nvp.getName().equals(SIGNED_URL_TOKEN)) { hash = nvp.getValue(); logger.fine("Hash: " + hash); } - if (nvp.getName().equals("until")) { + if (nvp.getName().equals(SIGNED_URL_UNTIL)) { dateString = nvp.getValue(); logger.fine("Until: " + dateString); } - if (nvp.getName().equals("method")) { + if (nvp.getName().equals(SIGNED_URL_METHOD)) { allowedMethod = nvp.getValue(); logger.fine("Method: " + allowedMethod); } - if (nvp.getName().equals("user")) { + if (nvp.getName().equals(SIGNED_URL_USER)) { allowedUser = nvp.getValue(); logger.fine("User: " + allowedUser); } @@ -148,4 +153,18 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S return valid; } + public static boolean hasToken(String urlString) { + try { + URL url = new URL(urlString); + List params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8")); + for (NameValuePair nvp : params) { + if (nvp.getName().equals(SIGNED_URL_TOKEN)) { + return true; + } + } + } catch (MalformedURLException mue) { + logger.fine("Bad url string: " + urlString); + } + return false; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 4ecdc73ae6e..905479c4e0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -902,10 +902,10 @@ public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingC String dataverseAlias = obj.getString("dataverseAlias",null); harvestingClient.setName(obj.getString("nickName",null)); - harvestingClient.setHarvestType(obj.getString("type",null)); + harvestingClient.setHarvestStyle(obj.getString("style", "default")); harvestingClient.setHarvestingUrl(obj.getString("harvestUrl",null)); harvestingClient.setArchiveUrl(obj.getString("archiveUrl",null)); - harvestingClient.setArchiveDescription(obj.getString("archiveDescription")); + harvestingClient.setArchiveDescription(obj.getString("archiveDescription", null)); harvestingClient.setMetadataPrefix(obj.getString("metadataFormat",null)); harvestingClient.setHarvestingSet(obj.getString("set",null)); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index e088122419d..dc547f2e52c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -550,6 +550,17 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { fieldsBld.add("type", fld.getFieldType().toString()); fieldsBld.add("watermark", fld.getWatermark()); fieldsBld.add("description", fld.getDescription()); + fieldsBld.add("multiple", fld.isAllowMultiples()); + fieldsBld.add("isControlledVocabulary", fld.isControlledVocabulary()); + if (fld.isControlledVocabulary()) { + // If the field has a controlled vocabulary, + // add all values to the resulting JSON + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (ControlledVocabularyValue cvv : fld.getControlledVocabularyValues()) { + jab.add(cvv.getStrValue()); + } + fieldsBld.add("controlledVocabularyValues", jab); + } if (!fld.getChildDatasetFieldTypes().isEmpty()) { JsonObjectBuilder subFieldsBld = jsonObjectBuilder(); for (DatasetFieldType subFld : fld.getChildDatasetFieldTypes()) { @@ -833,7 +844,8 @@ public static JsonObjectBuilder json(License license) { .add("uri", license.getUri().toString()) .add("iconUrl", license.getIconUrl() == null ? null : license.getIconUrl().toString()) .add("active", license.isActive()) - .add("isDefault", license.isDefault()); + .add("isDefault", license.isDefault()) + .add("sortOrder", license.getSortOrder()); } public static Collector stringsToJsonArray() { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index f4a3c635f8b..ef506990f69 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -57,10 +57,16 @@ public static String prettyPrint(javax.json.JsonObject jsonObject) { } return stringWriter.toString(); } - + public static javax.json.JsonObject getJsonObject(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { return Json.createReader(rdr).readObject(); } } + + public static JsonArray getJsonArray(String serializedJson) { + try (StringReader rdr = new StringReader(serializedJson)) { + return Json.createReader(rdr).readArray(); + } + } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index d9a72450b25..f7b46c308f5 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -473,6 +473,8 @@ oauth2.convertAccount.failedDeactivated=Your existing account cannot be converte # oauth2/callback.xhtml oauth2.callback.page.title=OAuth Callback oauth2.callback.message=Authentication Error - Dataverse could not authenticate your login with the provider that you selected. Please make sure you authorize your account to connect with Dataverse. For more details about the information being requested, see the User Guide. +oauth2.callback.error.providerDisabled=This authentication method ({0}) is currently disabled. Please log in using one of the supported methods. +oauth2.callback.error.signupDisabledForProvider=Sorry, signup for new accounts using {0} authentication is currently disabled. # deactivated user accounts deactivated.error=Sorry, your account has been deactivated. @@ -518,7 +520,7 @@ harvestclients.btn.add=Add Client harvestclients.tab.header.name=Nickname harvestclients.tab.header.url=URL harvestclients.tab.header.lastrun=Last Run -harvestclients.tab.header.lastresults=Last Results +harvestclients.tab.header.lastresults=Last Result harvestclients.tab.header.action=Actions harvestclients.tab.header.action.btn.run=Run Harvesting harvestclients.tab.header.action.btn.edit=Edit diff --git a/src/main/java/propertyFiles/License.properties b/src/main/java/propertyFiles/License.properties new file mode 100644 index 00000000000..2347fed9db6 --- /dev/null +++ b/src/main/java/propertyFiles/License.properties @@ -0,0 +1,4 @@ +license.cc0_1.0.description=Creative Commons CC0 1.0 Universal Public Domain Dedication. +license.cc_by_4.0.description=Creative Commons Attribution 4.0 International License. +license.cc0_1.0.name=CC0 1.0 +license.cc_by_4.0.name=CC-BY 4.0 diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties index 668542c92be..f35ede79b50 100644 --- a/src/main/java/propertyFiles/citation.properties +++ b/src/main/java/propertyFiles/citation.properties @@ -251,6 +251,7 @@ controlledvocabulary.subject.social_sciences=Social Sciences controlledvocabulary.subject.other=Other controlledvocabulary.publicationIDType.ark=ark controlledvocabulary.publicationIDType.arxiv=arXiv +controlledvocabulary.publicationIDType.cstr=cstr controlledvocabulary.publicationIDType.bibcode=bibcode controlledvocabulary.publicationIDType.doi=doi controlledvocabulary.publicationIDType.ean13=ean13 @@ -345,7 +346,7 @@ controlledvocabulary.language.galician=Galician controlledvocabulary.language.georgian=Georgian controlledvocabulary.language.german=German controlledvocabulary.language.greek_(modern)=Greek (modern) -controlledvocabulary.language.guarani=Guaraní +controlledvocabulary.language.guarani=Guaraní controlledvocabulary.language.gujarati=Gujarati controlledvocabulary.language.haitian,_haitian_creole=Haitian, Haitian Creole controlledvocabulary.language.hausa=Hausa @@ -405,7 +406,7 @@ controlledvocabulary.language.navajo,_navaho=Navajo, Navaho controlledvocabulary.language.northern_ndebele=Northern Ndebele controlledvocabulary.language.nepali=Nepali controlledvocabulary.language.ndonga=Ndonga -controlledvocabulary.language.norwegian_bokmal=Norwegian Bokmål +controlledvocabulary.language.norwegian_bokmal=Norwegian BokmÃ¥l controlledvocabulary.language.norwegian_nynorsk=Norwegian Nynorsk controlledvocabulary.language.norwegian=Norwegian controlledvocabulary.language.nuosu=Nuosu @@ -467,7 +468,7 @@ controlledvocabulary.language.urdu=Urdu controlledvocabulary.language.uzbek=Uzbek controlledvocabulary.language.venda=Venda controlledvocabulary.language.vietnamese=Vietnamese -controlledvocabulary.language.volapuk=Volapük +controlledvocabulary.language.volapuk=Volapük controlledvocabulary.language.walloon=Walloon controlledvocabulary.language.welsh=Welsh controlledvocabulary.language.wolof=Wolof @@ -477,4 +478,4 @@ controlledvocabulary.language.yiddish=Yiddish controlledvocabulary.language.yoruba=Yoruba controlledvocabulary.language.zhuang,_chuang=Zhuang, Chuang controlledvocabulary.language.zulu=Zulu -controlledvocabulary.language.not_applicable=Not applicable \ No newline at end of file +controlledvocabulary.language.not_applicable=Not applicable diff --git a/src/main/resources/db/migration/V5.13.0.1__8671-sorting_licenses.sql b/src/main/resources/db/migration/V5.13.0.1__8671-sorting_licenses.sql new file mode 100644 index 00000000000..6fe3f1142c2 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.1__8671-sorting_licenses.sql @@ -0,0 +1,5 @@ +ALTER TABLE license +ADD COLUMN IF NOT EXISTS sortorder BIGINT NOT NULL DEFAULT 0; + +CREATE INDEX IF NOT EXISTS license_sortorder_id +ON license (sortorder, id); \ No newline at end of file diff --git a/src/main/resources/db/migration/V5.13.0.2__7715-signed-urls-for-tools.sql b/src/main/resources/db/migration/V5.13.0.2__7715-signed-urls-for-tools.sql new file mode 100644 index 00000000000..5e13de057dd --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.2__7715-signed-urls-for-tools.sql @@ -0,0 +1 @@ +ALTER TABLE externaltool ADD COLUMN IF NOT EXISTS allowedapicalls TEXT; diff --git a/src/main/resources/db/migration/V5.9.0.1__7440-configurable-license-list.sql b/src/main/resources/db/migration/V5.9.0.1__7440-configurable-license-list.sql index cb76b2270a4..a8f7f41e2ef 100644 --- a/src/main/resources/db/migration/V5.9.0.1__7440-configurable-license-list.sql +++ b/src/main/resources/db/migration/V5.9.0.1__7440-configurable-license-list.sql @@ -2,7 +2,6 @@ ALTER TABLE termsofuseandaccess ADD COLUMN IF NOT EXISTS license_id BIGINT; DO $$ BEGIN - BEGIN ALTER TABLE termsofuseandaccess ADD CONSTRAINT fk_termsofuseandcesss_license_id foreign key (license_id) REFERENCES license(id); EXCEPTION diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index ecd3ba15c40..e56d7013abf 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -8,9 +8,15 @@ Keep a copy of the generated servlet class' java code. + + - + + diff --git a/src/main/webapp/dashboard.xhtml b/src/main/webapp/dashboard.xhtml index c5b6a507a92..5a72b52937b 100644 --- a/src/main/webapp/dashboard.xhtml +++ b/src/main/webapp/dashboard.xhtml @@ -42,7 +42,7 @@ #{dashboardPage.numberOfHarvestedDatasets}

- +

diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 1cbf297bf89..8b5c86b9c1c 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -46,7 +46,7 @@

+ var="license" itemLabel="#{DatasetUtil:getLocalizedLicenseDetails(license, 'NAME')}" itemValue="#{license}"/> @@ -55,8 +55,8 @@

- - #{termsOfUseAndAccess.license.name} + + #{DatasetUtil:getLocalizedLicenseDetails(termsOfUseAndAccess.license,'NAME')}

diff --git a/src/main/webapp/datasetLicenseInfoFragment.xhtml b/src/main/webapp/datasetLicenseInfoFragment.xhtml index 554a3d95abf..257f6b3b12f 100644 --- a/src/main/webapp/datasetLicenseInfoFragment.xhtml +++ b/src/main/webapp/datasetLicenseInfoFragment.xhtml @@ -30,12 +30,12 @@ xmlns:jsf="http://xmlns.jcp.org/jsf">
+ jsf:rendered="#{!empty DatasetUtil:getLocalizedLicenseDetails(DatasetPage.workingVersion.termsOfUseAndAccess.license,'DESCRIPTION')} }">
- +
@@ -121,4 +121,4 @@ xmlns:jsf="http://xmlns.jcp.org/jsf"> - \ No newline at end of file + diff --git a/src/main/webapp/oauth2/callback.xhtml b/src/main/webapp/oauth2/callback.xhtml index f0d66b2fa74..8d09f06da2d 100644 --- a/src/main/webapp/oauth2/callback.xhtml +++ b/src/main/webapp/oauth2/callback.xhtml @@ -21,14 +21,18 @@