diff --git a/conf/docker-aio/0prep_deps.sh b/conf/docker-aio/0prep_deps.sh index 3255a8d35a6..71398f03a42 100755 --- a/conf/docker-aio/0prep_deps.sh +++ b/conf/docker-aio/0prep_deps.sh @@ -4,10 +4,10 @@ if [ ! -d dv/deps ]; then fi wdir=`pwd` -if [ ! -e dv/deps/payara-5.2020.2.zip ]; then +if [ ! -e dv/deps/payara-5.2020.6.zip ]; then echo "payara dependency prep" # no more fiddly patching :) - wget https://github.com/payara/Payara/releases/download/payara-server-5.2020.2/payara-5.2020.2.zip -O dv/deps/payara-5.2020.2.zip + wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2020.6/payara-5.2020.6.zip -O dv/deps/payara-5.2020.6.zip fi if [ ! -e dv/deps/solr-7.7.2dv.tgz ]; then diff --git a/conf/docker-aio/c8.dockerfile b/conf/docker-aio/c8.dockerfile index e5994dbcf95..15c27469988 100644 --- a/conf/docker-aio/c8.dockerfile +++ b/conf/docker-aio/c8.dockerfile @@ -18,7 +18,7 @@ COPY disableipv6.conf /etc/sysctl.d/ RUN rm /etc/httpd/conf/* COPY httpd.conf /etc/httpd/conf RUN cd /opt ; tar zxf /tmp/dv/deps/solr-7.7.2dv.tgz -RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2020.2.zip ; ln -s /opt/payara5 /opt/glassfish4 +RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2020.6.zip ; ln -s /opt/payara5 /opt/glassfish4 # this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm` COPY domain-restmonitor.xml /opt/payara5/glassfish/domains/domain1/config/domain.xml diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh index 9cbc90a152a..2bfa2f25b27 100755 --- a/conf/docker-aio/run-test-suite.sh +++ b/conf/docker-aio/run-test-suite.sh @@ -8,4 +8,4 @@ fi # Please note the "dataverse.test.baseurl" is set to run for "all-in-one" Docker environment. # TODO: Rather than hard-coding the list of "IT" classes here, add a profile to pom.xml. -mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT -Ddataverse.test.baseurl=$dvurl +mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT -Ddataverse.test.baseurl=$dvurl diff --git a/conf/docker-aio/testscripts/install b/conf/docker-aio/testscripts/install index 5471b536265..86aeea89f0b 100755 --- a/conf/docker-aio/testscripts/install +++ b/conf/docker-aio/testscripts/install @@ -15,7 +15,6 @@ export SMTP_SERVER=localhost export MEM_HEAP_SIZE=2048 export GLASSFISH_DOMAIN=domain1 cd scripts/installer -cp pgdriver/postgresql-42.2.12.jar $GLASSFISH_ROOT/glassfish/lib #cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf cp /opt/dv/testdata/jhoveConfig.xsd $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhoveConfig.xsd diff --git a/doc/release-notes/5345-ejb-timers.md b/doc/release-notes/5345-ejb-timers.md new file mode 100644 index 00000000000..cf6c9ae5979 --- /dev/null +++ b/doc/release-notes/5345-ejb-timers.md @@ -0,0 +1,4 @@ +Reset the EJB timer database back to default: +``` +/asadmin set configs.config.server-config.ejb-container.ejb-timer-service.timer-datasource=jdbc/__TimerPool +``` \ No newline at end of file diff --git a/doc/release-notes/7417-payara-5.2020.6.md b/doc/release-notes/7417-payara-5.2020.6.md new file mode 100644 index 00000000000..8377e772b9f --- /dev/null +++ b/doc/release-notes/7417-payara-5.2020.6.md @@ -0,0 +1,12 @@ +## Update to Payara Platform 5.2020.6 + +In contrast to good old Glassfish 4, the Payara application server +is a maintained product, receiving both feature and - more important - +security and dependent technology updates. You should update the +application server platform on a regular basis, as it is a key component +of a running Dataverse deployment. + + + +Instructions how to update can be found in the +[Payara documentation](https://docs.payara.fish/community/docs/5.2020.6/documentation/user-guides/upgrade-payara.html) diff --git a/doc/sphinx-guides/source/_static/util/clear_timer.sh b/doc/sphinx-guides/source/_static/util/clear_timer.sh index d51455ba8ed..3fcd9e8a387 100755 --- a/doc/sphinx-guides/source/_static/util/clear_timer.sh +++ b/doc/sphinx-guides/source/_static/util/clear_timer.sh @@ -1,9 +1,8 @@ #!/bin/sh -# EBJ timers sometimes cause problems; utility to clear generated directories and database rows +# EBJ timers sometimes cause problems; utility to clear generated directories -# assumes this script is being run as root, and that the postgres user had passwordless -# access to the database (local sockets, or appropriate environmental variables). +# assumes this script is being run as root # will restart Payara if it's stopped; comment out the `start-domain` command at the end # if you'd like to avoid that. @@ -14,19 +13,11 @@ PAYARA_DIR=/usr/local/payara5 # directory within Payara (defaults) DV_DIR=${PAYARA_DIR}/glassfish/domains/domain1 -# name of dataverse database -DV_DB=dvndb - -# OS user for the database -DB_USER=postgres - # stop the domain (generates a warning if app server is stopped) ${PAYARA_DIR}/bin/asadmin stop-domain rm -rf ${PAYARA_DIR}/${DV_DIR}/generated/ rm -rf ${PAYARA_DIR}/${DV_DIR}/osgi-cache/felix -sudo -u ${DB_USER} psql ${DV_DB} -c 'delete from "EJB__TIMER__TBL"'; - # restart the domain (also generates a warning if app server is stopped) ${PAYARA_DIR}/bin/asadmin start-domain diff --git a/doc/sphinx-guides/source/admin/timers.rst b/doc/sphinx-guides/source/admin/timers.rst index 0408c74ccd0..d94f22da2a5 100644 --- a/doc/sphinx-guides/source/admin/timers.rst +++ b/doc/sphinx-guides/source/admin/timers.rst @@ -22,9 +22,7 @@ The following JVM option instructs the application to act as the dedicated timer ``-Ddataverse.timerServer=true`` -**IMPORTANT:** Note that this option is automatically set by the Dataverse installer script. That means that when **configuring a multi-server cluster**, it will be the responsibility of the installer to remove the option from the :fixedwidthplain:`domain.xml` of every node except the one intended to be the timer server. We also recommend that the following entry in the :fixedwidthplain:`domain.xml`: ```` is changed back to ```` on all the non-timer server nodes. Similarly, this option is automatically set by the installer script. Changing it back to the default setting on a server that doesn't need to run the timer will prevent a potential race condition, where multiple servers try to get a lock on the timer database. - -**Note** that for the timer to work, the version of the PostgreSQL JDBC driver your instance is using must match the version of your PostgreSQL database. See the :ref:`timer-not-working` section of Troubleshooting in the Admin Guide. +**IMPORTANT:** Note that this option is automatically set by the Dataverse installer script. That means that when **configuring a multi-server cluster**, it will be the responsibility of the installer to remove the option from the :fixedwidthplain:`domain.xml` of every node except the one intended to be the timer server. Harvesting Timers ----------------- @@ -44,7 +42,7 @@ This timer runs a daily job that tries to export all the local, published datase This daily job will also update all the harvestable OAI sets configured on your server, adding new and/or newly published datasets or marking deaccessioned datasets as "deleted" in the corresponding sets as needed. -This job is automatically scheduled to run at 2AM local time every night. If really necessary, it is possible (for an advanced user) to change that time by directly editing the EJB timer application table in the database. +This job is automatically scheduled to run at 2AM local time every night. .. _saved-search-timer: diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst index ec24de245b6..d115554f7bc 100644 --- a/doc/sphinx-guides/source/admin/troubleshooting.rst +++ b/doc/sphinx-guides/source/admin/troubleshooting.rst @@ -98,14 +98,12 @@ We don't know what's causing this issue, but here's a known workaround: - Stop Payara; -- Remove the ``generated`` and ``osgi-cache`` directories; - -- Delete all the rows from the ``EJB__TIMER__TBL`` table in the database; +- Remove the ``generated`` and ``osgi-cache`` directories; - Start Payara The shell script below performs the steps above. -Note that it may or may not work on your system, so it is provided as an example only, downloadable :download:`here `. Aside from the configuration values that need to be changed to reflect your environment (the Payara directory, name of the database, etc.) the script relies on the database being configured in a certain way for access. (See the comments in the script for more information) +Note that it may or may not work on your system, so it is provided as an example only, downloadable :download:`here `. The configuration values might need to be changed to reflect your environment (the Payara directory). See the comments in the script for more information. .. literalinclude:: ../_static/util/clear_timer.sh @@ -114,7 +112,7 @@ Note that it may or may not work on your system, so it is provided as an example Timer Not Working ----------------- -Dataverse relies on EJB timers to perform scheduled tasks: harvesting from remote servers, updating the local OAI sets and running metadata exports. (See :doc:`timers` for details.) If these scheduled jobs are not running on your server, this may be the result of the incompatibility between the version of PostgreSQL database you are using, and PostgreSQL JDBC driver in use by your instance of Payara. The symptoms: +Dataverse relies on EJB timers to perform scheduled tasks: harvesting from remote servers, updating the local OAI sets and running metadata exports. (See :doc:`timers` for details.) If these scheduled jobs are not running on your server, you might experience the following symptoms: If you are seeing the following in your server.log... @@ -127,9 +125,9 @@ followed by an Exception stack trace with these lines in it: :fixedwidthplain:`Exception Description: Could not deserialize object from byte array` ... -... it most likely means that it is the JDBC driver incompatibility that's preventing the timer from working correctly. -Make sure you install the correct version of the driver. For example, if you are running the version 9.3 of PostgreSQL, make sure you have the driver postgresql-9.3-1104.jdbc4.jar in your :fixedwidthplain:`/glassfish/lib` directory. Go `here `_ -to download the correct version of the driver. If you have an older driver in glassfish/lib, make sure to remove it, replace it with the new version and restart Payara. (You may need to remove the entire contents of :fixedwidthplain:`/glassfish/domains/domain1/generated` before you start Payara). +... you should reach out by opening an issue. In the good old days of running Dataverse 4 running on Glassfish 4, this +was a hint for an unsupported JDBC driver. In Dataverse 5 this would be a new regression and its cause would need to be +investigated. Constraint Violations Issues diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0c355b35254..6d0176c094d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1176,7 +1176,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"} https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB" You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly uploaded file. diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 41ffecffa35..9abfd385c28 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -85,9 +85,9 @@ To install Payara, run the following commands: ``cd /usr/local`` -``sudo curl -O -L https://github.com/payara/Payara/releases/download/payara-server-5.2020.2/payara-5.2020.2.zip`` +``sudo curl -O -L https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2020.6/payara-5.2020.6.zip`` -``sudo unzip payara-5.2020.2.zip`` +``sudo unzip payara-5.2020.6.zip`` ``sudo chown -R $USER /usr/local/payara5`` diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index 48b6a874eb3..47265c97eff 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -176,7 +176,6 @@ Switching from Glassfish to Payara If you already have a working dev environment with Glassfish and want to switch to Payara, you must do the following: - Copy the "domain1" directory from Glassfish to Payara. -- Copy the PostgreSQL driver into place like this: ``cp scripts/installer/pgdriver/postgresql-42.*.jar /usr/local/payara5/glassfish/lib`` ---- diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index adc191d066b..6209b5f2e1b 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -208,8 +208,7 @@ Fresh Reinstall Early on when you're installing Dataverse, you may think, "I just want to blow away what I've installed and start over." That's fine. You don't have to uninstall the various components like Payara, PostgreSQL and Solr, but you should be conscious of how to clear out their data. For Payara, a common helpful process is to: - Stop Payara; -- Remove the ``generated`` and ``osgi-cache`` directories; -- Delete all the rows from the ``EJB__TIMER__TBL`` table in the database; +- Remove the ``generated`` and ``osgi-cache`` directories; - Start Payara Drop database diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 05c2c8cba92..d166694b22e 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -47,7 +47,7 @@ If you don't want to be prompted, here is an example of the non-interactive invo Payara ------ -Payara 5.2020.2 is recommended. Newer versions might work fine. +Payara 5.2020.6 is recommended. Newer versions might work fine, regular updates are recommended. Installing Payara ================= @@ -58,8 +58,8 @@ Installing Payara - Download and install Payara (installed in ``/usr/local/payara5`` in the example commands below):: - # wget https://github.com/payara/Payara/releases/download/payara-server-5.2020.2/payara-5.2020.2.zip - # unzip payara-5.2020.2.zip + # wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2020.6/payara-5.2020.6.zip + # unzip payara-5.2020.6.zip # mv payara5 /usr/local If you intend to install and run Payara under a service account (and we hope you do), chown -R the Payara hierarchy to root to protect it but give the service account access to the below directories: diff --git a/downloads/download.sh b/downloads/download.sh index ef2bd712069..59bbcef9dc8 100755 --- a/downloads/download.sh +++ b/downloads/download.sh @@ -1,5 +1,5 @@ #!/bin/sh -curl -L -O https://github.com/payara/Payara/releases/download/payara-server-5.2020.2/payara-5.2020.2.zip +curl -L -O https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2020.6/payara-5.2020.6.zip curl -L -O https://archive.apache.org/dist/lucene/solr/7.7.2/solr-7.7.2.tgz curl -L -O https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar curl -s -L http://sourceforge.net/projects/schemaspy/files/schemaspy/SchemaSpy%205.0.0/schemaSpy_5.0.0.jar/download > schemaSpy_5.0.0.jar diff --git a/pom.xml b/pom.xml index ca968ff365d..6d7378a7ac2 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,8 @@ false 8.0.0 - 5.2020.2 + 5.2020.6 + 42.2.18 1.11.762 1.2 4.5.5 @@ -263,6 +264,11 @@ jbcrypt 0.3m + + org.postgresql + postgresql + ${postgresql.version} + org.flywaydb flyway-core diff --git a/scripts/database/homebrew/devinstall b/scripts/database/homebrew/devinstall index 89284b50979..28f682685ff 100755 --- a/scripts/database/homebrew/devinstall +++ b/scripts/database/homebrew/devinstall @@ -15,6 +15,5 @@ export FILES_DIR=$HOME/dataverse/files export MEM_HEAP_SIZE=2048 export GLASSFISH_DOMAIN=domain1 export GLASSFISH_ROOT=/Applications/NetBeans/glassfish4 -cp pgdriver/postgresql-9.1-902.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf ./glassfish-setup.sh diff --git a/scripts/deploy/phoenix.dataverse.org/install b/scripts/deploy/phoenix.dataverse.org/install index f3df88a3fd2..8ee7fb8215f 100755 --- a/scripts/deploy/phoenix.dataverse.org/install +++ b/scripts/deploy/phoenix.dataverse.org/install @@ -15,6 +15,5 @@ export SMTP_SERVER=localhost export MEM_HEAP_SIZE=2048 export GLASSFISH_DOMAIN=domain1 cd scripts/installer -cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf ./glassfish-setup.sh diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile index 5155edfde1f..4ef35a01471 100644 --- a/scripts/installer/Makefile +++ b/scripts/installer/Makefile @@ -1,7 +1,6 @@ INSTALLER_ZIP_DIR=dvinstall DISTRIBUTION_WAR_FILE=${INSTALLER_ZIP_DIR}/dataverse.war GLASSFISH_SETUP_SCRIPT=${INSTALLER_ZIP_DIR}/as-setup.sh -POSTGRES_DRIVERS=${INSTALLER_ZIP_DIR}/pgdriver API_SCRIPTS=${INSTALLER_ZIP_DIR}/setup-datasetfields.sh ${INSTALLER_ZIP_DIR}/setup-users.sh ${INSTALLER_ZIP_DIR}/setup-builtin-roles.sh ${INSTALLER_ZIP_DIR}/setup-dvs.sh ${INSTALLER_ZIP_DIR}/data ${INSTALLER_ZIP_DIR}/setup-identity-providers.sh ${INSTALLER_ZIP_DIR}/setup-all.sh ${INSTALLER_ZIP_DIR}/post-install-api-block.sh JHOVE_CONFIG=${INSTALLER_ZIP_DIR}/jhove.conf JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd @@ -15,9 +14,9 @@ installer: dvinstall.zip clean: /bin/rm -rf ${INSTALLER_ZIP_DIR} dvinstall.zip -dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} +dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} @echo making installer... - zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} + zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} @echo @echo "Done!" @@ -45,11 +44,6 @@ ${GLASSFISH_SETUP_SCRIPT}: as-setup.sh ${INSTALLER_ZIP_DIR} @echo copying glassfish setup /bin/cp as-setup.sh ${INSTALLER_ZIP_DIR} -${POSTGRES_DRIVERS}: pgdriver/postgresql-42.2.12.jar - mkdir -p ${POSTGRES_DRIVERS} - @echo copying postgres driver - /bin/cp pgdriver/postgresql-42.2.12.jar ${POSTGRES_DRIVERS} - ${API_SCRIPTS}: ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data ${INSTALLER_ZIP_DIR} @echo copying api scripts /bin/cp -R ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data ${INSTALLER_ZIP_DIR} diff --git a/scripts/installer/README.txt b/scripts/installer/README.txt index ec11881c720..35dbaadc3bf 100644 --- a/scripts/installer/README.txt +++ b/scripts/installer/README.txt @@ -20,7 +20,6 @@ from scripts/installer (this directory): install glassfish-setup.sh -pgdriver (the entire directory with all its contents) from scripts/api: diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index f7f546b31df..1096a12e391 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -156,11 +156,6 @@ function final_setup(){ ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.db.port=${DB_PORT}" ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.db.name=${DB_NAME}" - ### - # Set up the data source for the timers - - ./asadmin $ASADMIN_OPTS set configs.config.server-config.ejb-container.ejb-timer-service.timer-datasource=jdbc/VDCNetDS - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl" ### diff --git a/scripts/installer/install b/scripts/installer/install index ffe98d48720..b30ad7982bc 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -840,19 +840,6 @@ sub setup_appserver { # with the MacOS glassfish build...). Verify, and if still the case, # add a check. - print "\nInstalling the PostgresQL driver for Payara5... "; -# system( "/bin/cp", "pgdriver/" . $postgres_jdbc, $glassfish_dir . "/glassfish/lib" ); - my $pgdriver_success = copy("pgdriver/" . $postgres_jdbc, $glassfish_dir . "/glassfish/lib" ); - - unless ($pgdriver_success) - { - print "\n*********************\n"; - print "ERROR! Failed to copy the postgres driver into " . $glassfish_dir . "/glassfish/lib - check the directory permissions!\n"; - exit 1; - } - - print "done!\n"; - print "\n*********************\n"; print "PLEASE NOTE, SOME OF THE ASADMIN COMMANDS ARE GOING TO FAIL,\n"; print "FOR EXAMPLE, IF A CONFIGURATION SETTING THAT WE ARE TRYING\n"; diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 40f91cea640..ce74ccffb69 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -111,12 +111,6 @@ # expected dataverse defaults apiUrl = "http://localhost:8080/api" -# there's now a single driver that works for all supported versions: -# jodbc.postgresql.org recommends 4.2 for Java 8. -# updated drivers may be obtained from -# https://jdbc.postgresql.org/download.html -pgJdbcDriver = "postgresql-42.2.12.jar" - # 0. A few preliminary checks: # 0a. OS flavor: @@ -441,15 +435,6 @@ print("Setting App. Server heap size (Xmx) to "+str(gfHeap)+" Megabytes") config.set('glassfish','GLASSFISH_HEAP', str(gfHeap)) -# 4b1. PostgresQL driver: -pg_driver_jarpath = "pgdriver/"+pgJdbcDriver - -try: - copy2(pg_driver_jarpath, gfJarPath) - print("Copied "+pgJdbcDriver+" into "+gfJarPath) -except: - print("Couldn't copy "+pgJdbcDriver+" into "+gfJarPath+". Check its permissions?") - # 4c. create payara admin credentials file userHomeDir = pwd.getpwuid(os.getuid())[5] diff --git a/scripts/installer/pgdriver/postgresql-42.2.12.jar b/scripts/installer/pgdriver/postgresql-42.2.12.jar deleted file mode 100644 index 1f393bb78f7..00000000000 Binary files a/scripts/installer/pgdriver/postgresql-42.2.12.jar and /dev/null differ diff --git a/scripts/vagrant/setup.sh b/scripts/vagrant/setup.sh index 6b74a9b3f9c..8cc2ad466eb 100644 --- a/scripts/vagrant/setup.sh +++ b/scripts/vagrant/setup.sh @@ -43,7 +43,7 @@ SOLR_USER=solr echo "Ensuring Unix user '$SOLR_USER' exists" useradd $SOLR_USER || : DOWNLOAD_DIR='/dataverse/downloads' -PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2020.2.zip" +PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2020.6.zip" SOLR_TGZ="$DOWNLOAD_DIR/solr-7.7.2.tgz" if [ ! -f $PAYARA_ZIP ] || [ ! -f $SOLR_TGZ ]; then echo "Couldn't find $PAYARA_ZIP or $SOLR_TGZ! Running download script...." diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java index 18eec164a2a..da70529e76d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java @@ -50,6 +50,9 @@ public DatasetLinkingDataverse execute(CommandContext ctxt) throws CommandExcept if (linkedDataset.getOwner().getOwners().contains(linkingDataverse)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.to.parent.dataverse"), this); } + if (ctxt.dsLinking().alreadyLinked(linkingDataverse, linkedDataset)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.already.linked"), this); + } DatasetLinkingDataverse datasetLinkingDataverse = new DatasetLinkingDataverse(); datasetLinkingDataverse.setDataset(linkedDataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java index 1120c8c3773..1c63a1a3c4f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java @@ -17,8 +17,10 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.sql.Timestamp; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.concurrent.Future; @@ -53,6 +55,9 @@ public DataverseLinkingDataverse execute(CommandContext ctxt) throws CommandExce if (linkedDataverse.getOwners().contains(linkingDataverse)) { throw new IllegalCommandException("Can't link a dataverse to its parents", this); } + if (ctxt.dvLinking().alreadyLinked(linkingDataverse, linkedDataverse)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataverse.linked.error.alreadyLinked", Arrays.asList(linkedDataverse.getName(), linkingDataverse.getName())), this); + } DataverseLinkingDataverse dataverseLinkingDataverse = new DataverseLinkingDataverse(); dataverseLinkingDataverse.setDataverse(linkedDataverse); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java index 3dc7642655d..3af9a655b35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java @@ -129,7 +129,7 @@ public SavedSearch save(SavedSearch savedSearch) { } - @Schedule(dayOfWeek="0", hour="0",minute="30") + @Schedule(dayOfWeek="0", hour="0", minute="30", persistent = false) public void makeLinksForAllSavedSearchesTimer() { if (systemConfig.isTimerServer()) { logger.info("Linking saved searches"); diff --git a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java index f4a30139a97..b132bff9429 100644 --- a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java @@ -7,10 +7,7 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; -import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; -import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; @@ -33,13 +30,11 @@ import javax.ejb.EJB; import javax.ejb.Singleton; import javax.ejb.Startup; -import javax.ejb.Stateless; import javax.ejb.Timeout; import javax.ejb.Timer; +import javax.ejb.TimerConfig; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; import javax.servlet.http.HttpServletRequest; @@ -56,16 +51,14 @@ @Singleton @Startup public class DataverseTimerServiceBean implements Serializable { + + private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean"); + @Resource javax.ejb.TimerService timerService; - @PersistenceContext(unitName = "VDCNet-ejbPU") - private EntityManager em; - private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean"); @EJB HarvesterServiceBean harvesterService; @EJB - DataverseServiceBean dataverseService; - @EJB HarvestingClientServiceBean harvestingClientService; @EJB AuthenticationServiceBean authSvc; @@ -105,7 +98,7 @@ public void createTimer(Date initialExpiration, long intervalDuration, Serializa } catch (UnknownHostException ex) { Logger.getLogger(DataverseTimerServiceBean.class.getName()).log(Level.SEVERE, null, ex); } - timerService.createTimer(initialExpiration, intervalDuration, info); + timerService.createIntervalTimer(initialExpiration, intervalDuration, new TimerConfig(info, false)); } /** diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 335714764a5..73f2d2cab18 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -769,6 +769,7 @@ dataverse.saved.search.failure=The saved search was not able to be linked. dataverse.linked.success= {0} has been successfully linked to {1}. dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear. dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed. +dataverse.linked.error.alreadyLinked={0} has already been linked to {1}. dataverse.page.pre=Previous dataverse.page.next=Next dataverse.byCategory=Dataverses by Category @@ -1314,6 +1315,7 @@ dataset.link.not.to.owner=Can't link a dataset to its dataverse dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent dataverses dataset.link.not.published=Can't link a dataset that has not been published dataset.link.not.available=Can't link a dataset that has not been published or is not harvested +dataset.link.not.already.linked=Can't link a dataset that has already been linked to this dataverse dataset.email.datasetContactTitle=Contact Dataset Owner dataset.email.hiddenMessage= dataset.email.messageSubject=Test Message Subject diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 3ef206a84e3..2fd0ea2dd91 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -828,15 +828,16 @@ - - - - - - - - - + + + + + + + + + +

#{bundle['dataset.share.datasetShare.tip']}

diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 85585510823..55f53ad335b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -383,40 +383,9 @@ public void testMoveDataverse() { } } - - @Test - public void testCreateDeleteDataverseLink() { - Response createUser = UtilIT.createRandomUser(); - - createUser.prettyPrint(); - String username = UtilIT.getUsernameFromResponse(createUser); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response superuserResponse = UtilIT.makeSuperUser(username); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.prettyPrint(); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverseResponse); - - Response createDataverseResponse2 = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse2.prettyPrint(); - String dataverseAlias2 = UtilIT.getAliasFromResponse(createDataverseResponse2); - - Response createLinkingDataverseResponse = UtilIT.createDataverseLink(dataverseAlias, dataverseAlias2, apiToken); - createLinkingDataverseResponse.prettyPrint(); - - createLinkingDataverseResponse.then().assertThat() - .body("data.message", equalTo("Dataverse " + dataverseAlias + " linked successfully to " + dataverseAlias2)) - .statusCode(200); - - Response deleteLinkingDataverseResponse = UtilIT.deleteDataverseLink(dataverseAlias, dataverseAlias2, apiToken); - deleteLinkingDataverseResponse.prettyPrint(); - deleteLinkingDataverseResponse.then().assertThat() - .body("data.message", equalTo("Link from Dataverse " + dataverseAlias + " to linked Dataverse " + dataverseAlias2 + " deleted")) - .statusCode(200); - } - + + // testCreateDeleteDataverseLink was here but is now in LinkIT + @Test public void testUpdateDefaultContributorRole() { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java new file mode 100644 index 00000000000..9ac2d2cb7e5 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java @@ -0,0 +1,205 @@ +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.util.BundleUtil; +import java.util.logging.Logger; +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import static javax.ws.rs.core.Response.Status.OK; +import static org.hamcrest.CoreMatchers.equalTo; +import org.junit.BeforeClass; +import org.junit.Test; + +public class LinkIT { + + private static final Logger logger = Logger.getLogger(LinkIT.class.getCanonicalName()); + + @BeforeClass + public static void setUpClass() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testLinkedDataset() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createSuperUser = UtilIT.createRandomUser(); + createSuperUser.prettyPrint(); + createSuperUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperUser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperUser); + Response makeSuperuser = UtilIT.makeSuperUser(superuserUsername); + makeSuperuser.prettyPrint(); + makeSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createDataverse1 = UtilIT.createRandomDataverse(apiToken); + createDataverse1.prettyPrint(); + createDataverse1.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverse1Alias = UtilIT.getAliasFromResponse(createDataverse1); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverse1Alias, apiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Response createDataverse2 = UtilIT.createRandomDataverse(apiToken); + createDataverse2.prettyPrint(); + createDataverse2.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverse2Alias = UtilIT.getAliasFromResponse(createDataverse2); + Integer dataverse2Id = UtilIT.getDatasetIdFromResponse(createDataverse2); + String dataverse2Name = JsonPath.from(createDataverse2.asString()).getString("data.name"); + + UtilIT.publishDataverseViaNativeApi(dataverse1Alias, apiToken).then().assertThat() + .statusCode(OK.getStatusCode()); + + // You can't link an unpublished dataset. + Response tryToLinkUnpublishedDataset = UtilIT.linkDataset(datasetPid, dataverse2Alias, superuserApiToken); + tryToLinkUnpublishedDataset.prettyPrint(); + tryToLinkUnpublishedDataset.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("Can't link a dataset that has not been published or is not harvested")); + + UtilIT.publishDatasetViaNativeApi(datasetPid, "major", apiToken).then().assertThat() + .statusCode(OK.getStatusCode()); + + UtilIT.publishDataverseViaNativeApi(dataverse2Alias, apiToken).then().assertThat() + .statusCode(OK.getStatusCode()); + + // A dataset cannot be linked to its parent dataverse. + Response tryToLinkToParentDataverse = UtilIT.linkDataset(datasetPid, dataverse1Alias, superuserApiToken); + tryToLinkToParentDataverse.prettyPrint(); + tryToLinkToParentDataverse.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("Can't link a dataset to its dataverse")); + + // Link dataset to non-parent dataverse (allowed). + Response linkDataset = UtilIT.linkDataset(datasetPid, dataverse2Alias, superuserApiToken); + linkDataset.prettyPrint(); + linkDataset.then().assertThat() + .statusCode(OK.getStatusCode()); + + // A dataset cannot be linked to the same dataverse again. + Response tryToLinkAgain = UtilIT.linkDataset(datasetPid, dataverse2Alias, superuserApiToken); + tryToLinkAgain.prettyPrint(); + tryToLinkAgain.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("Can't link a dataset that has already been linked to this dataverse")); + } + + @Test + public void testCreateDeleteDataverseLink() { + Response createUser = UtilIT.createRandomUser(); + + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response superuserResponse = UtilIT.makeSuperUser(username); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverseResponse); + + Response createDataverseResponse2 = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse2.prettyPrint(); + String dataverseAlias2 = UtilIT.getAliasFromResponse(createDataverseResponse2); + + Response createLinkingDataverseResponse = UtilIT.createDataverseLink(dataverseAlias, dataverseAlias2, apiToken); + createLinkingDataverseResponse.prettyPrint(); + createLinkingDataverseResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Dataverse " + dataverseAlias + " linked successfully to " + dataverseAlias2)); + + Response tryLinkingAgain = UtilIT.createDataverseLink(dataverseAlias, dataverseAlias2, apiToken); + tryLinkingAgain.prettyPrint(); + tryLinkingAgain.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo(dataverseAlias + " has already been linked to " + dataverseAlias2 + ".")); + + Response deleteLinkingDataverseResponse = UtilIT.deleteDataverseLink(dataverseAlias, dataverseAlias2, apiToken); + deleteLinkingDataverseResponse.prettyPrint(); + deleteLinkingDataverseResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Link from Dataverse " + dataverseAlias + " to linked Dataverse " + dataverseAlias2 + " deleted")); + } + + @Test + public void testDeepLinks() { + Response createUser = UtilIT.createRandomUser(); + + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response superuserResponse = UtilIT.makeSuperUser(username); + + Response createLevel1a = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-level1a", null, apiToken, ":root"); + createLevel1a.prettyPrint(); + String level1a = UtilIT.getAliasFromResponse(createLevel1a); + + Response createLevel1b = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-level1b", null, apiToken, ":root"); + createLevel1b.prettyPrint(); + String level1b = UtilIT.getAliasFromResponse(createLevel1b); + + Response linkLevel1toLevel1 = UtilIT.createDataverseLink(level1a, level1b, apiToken); + linkLevel1toLevel1.prettyPrint(); + linkLevel1toLevel1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Dataverse " + level1a + " linked successfully to " + level1b)); + + Response searchLevel1toLevel1 = UtilIT.search("*", apiToken, "&subtree=" + level1b); + searchLevel1toLevel1.prettyPrint(); + searchLevel1toLevel1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", equalTo(1)) + .body("data.items[0].name", equalTo(level1a)); + + /** + * Remove this early return when you are ready to work on + * https://github.com/IQSS/dataverse/issues/7430 about strange linking + * behavior. + */ + if (true) { + return; + } + + Response createLevel2a = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-level2a", null, apiToken, level1a); + createLevel2a.prettyPrint(); + String level2a = UtilIT.getAliasFromResponse(createLevel2a); + + Response createLevel2b = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-level2b", null, apiToken, level1b); + createLevel2b.prettyPrint(); + String level2b = UtilIT.getAliasFromResponse(createLevel2b); + + Response linkLevel2toLevel2 = UtilIT.createDataverseLink(level2a, level2b, apiToken); + linkLevel2toLevel2.prettyPrint(); + linkLevel2toLevel2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Dataverse " + level2a + " linked successfully to " + level2b)); + + Response searchLevel2toLevel2 = UtilIT.search("*", apiToken, "&subtree=" + level2b); + searchLevel2toLevel2.prettyPrint(); + searchLevel2toLevel2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", equalTo(1)) + .body("data.items[0].name", equalTo(level2a)); + + } + +}