diff --git a/doc/release-notes/7440-multilicense.md b/doc/release-notes/7440-multilicense.md new file mode 100644 index 00000000000..bf32cdc68e1 --- /dev/null +++ b/doc/release-notes/7440-multilicense.md @@ -0,0 +1,126 @@ +### Multiple License Support + +Users can now select from a set of configured licenses in addition to or instead of the current Creative Commons CC0 choice or provide custom terms of use (if configured) for their datasets. Administrators can configure their Dataverse instance via API to allow any desired license as a choice and can enable/disable the option to allow custom terms. Administrators can also mark licenses as 'inactive' to disallow future use while keeping that license for existing datasets. By default, only the CC0 license will be preinstalled. Examples in the Guides show how to add additional licenses and specific examples are given for several Creative Commons licenses. **Note: Datasets in existing installations will automatically be updated to conform to new requirements that custom terms cannot be used with a standard license and that custom terms cannot be empty. Administrators may wish to manually update datasets with these conditions if they do not like the automated migration choices. See the Notes for Dataverse Installation Administrators and Additional Release Steps sections for further information.** + +This release also makes the license selection and/or custom terms more prominent when publishing and viewing a dataset and when downloading files. + +## Major Use Cases and Infrastructure Enhancements + +- When creating/updating datasets, users can select from a set of standard licenses configured by the administrator or provide custom terms (if the installation is configured to allow them). + +## Notes for Dataverse Installation Administrators + +### Updating for multiple license support + +As part of installing/upgrading an existing installation, administrators may wish to add additional license choices and/or configure Dataverse to allow custom terms. Adding additional licenses is managed via API. Licenses are described via a JSON structure providing a name, URL, short description, and optional icon URL. Additionally licenses may be marked as active (selectable for new/updated datasets) or inactive (only allowed on existing datasets) and one license can be marked as the default. Custom Terms are allowed by default (backward compatible with the current option to select 'No' to using CC0) and can be disabled by setting `:AllowCustomTermsOfUse` to false. + +Further, administrators should review the following automated migration of existing licenses and terms into the new license framework and, if desired, should manually find and update any datasets for which the automated update is problematic. +To understand the migration process, it is useful to understand how the multiple license feature works in this release: + +'Custom Terms', aka a custom license, are defined through entries in the following fields of the dataset "Terms" tab: +- Terms of Use +- Confidentiality Declaration +- Special Permissions +- Restrictions +- Citation Requirements +- Depositor Requirements +- Conditions +- Disclaimer + +'Custom Terms' require, at a minimum, a non-blank entry in the "Terms of Use" field. Entries in other fields are optional. + +Since these fields are intended for terms/conditions that would potentially conflict with or modify the terms in a standard license, they are no longer shown when a standard license is selected. + +In earlier Dataverse releases, it was possible to select the CC0 license and have entries in the fields above. It was also possible to say 'No' to using CC0 and leave all of these terms fields blank. + +The automated process will update existing datasets as follows. + +- 'CC0 Waiver' and no entries in the fields above -> CC0 License (no change) +- No CC0 Waiver and an entry in the Terms of Use field and possibly others fields listed above -> 'Custom Terms' with the same entries in these fields (no change) + +- CC0 Waiver and an entry in some of the fields listed -> 'Custom Terms' with the following text preprended in the "Terms of Use" field: "This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions:" +- No CC0 Waiver and an entry in a field(s) other than the Terms of Use field -> 'Custom Terms' with the following "Terms of Use" added: "This dataset is made available with limited information on how it can be used. You may wish to communicate with the Contact(s) specified before use." +- No CC0 Waiver and no entry in any of the listed fields -> 'Custom Terms' with the following "Terms of Use" added: "This dataset is made available without information on how it can be used. You should communicate with the Contact(s) specified before use." + +Administrators who have datasets where CC0 has been selected along with additional terms, or datasets where the Terms of Use field is empty, may wish to modify those datasets prior to upgrading to avoid the automated changes above. The Additonal Release Steps provides information on how to find and modify any such datasets. + +## New JVM Options and DB Settings + +- `:AllowCustomTermsOfUse` (default: true) allow users to provide Custom Terms instead of choosing one of the configured standard licenses. + +See the [Database Settings](https://guides.dataverse.org/en/5.10/installation/config.html) section of the Guides for more information. + +## Additional Release Steps + +In most Dataverse installations, one would expect the vast majority of Datasets to either use the CC0 Waiver or have non-empty Terms of Use. As noted above, these will be migrated without any issue. Administrators may however wish to find and manually update datasets that specified a CC0 license but also had terms (no longer allowed) or had no license and no terms of use (also no longer allowed) rather than accept the default migrations for these datasets listed above. + +### To find Datasets with a CC0 license and non-empty terms: + + select CONCAT('doi:', dvo.authority, '/', dvo.identifier), v.alias as dataverse_alias, case when versionstate='RELEASED' then concat(dv.versionnumber, '.', dv.minorversionnumber) else versionstate END as version, dv.id as datasetversion_id, t.id as termsofuseandaccess_id, t.termsofuse, t.confidentialitydeclaration, t.specialpermissions, t.restrictions, t.citationrequirements, t.depositorrequirements, t.conditions, t.disclaimer from dvobject dvo, termsofuseandaccess t, datasetversion dv, dataverse v where dv.dataset_id=dvo.id and dv.termsofuseandaccess_id=t.id and dvo.owner_id=v.id and t.license='CC0' and not (t.termsofuse is null and t.confidentialitydeclaration is null and t.specialpermissions is null and t.restrictions is null and citationrequirements is null and t.depositorrequirements is null and t.conditions is null and t.disclaimer is null); + +The datasetdoi column will let you find/view the affected dataset in the Dataverse web interface. The version column will indicate which version(s) are relevant. The dataverse_alias will tell you which Dataverse collection the dataset is in (and may be useful if you want to adjust all datasets in a given collection). The termsofuseandaccess_id column indicates which specific entry in that table is associated with the dataset/version. The remaining columns show the values of any terms fields. + +There are two choices to migrate such datasets: + + - Set all terms fields to null: + + + update termsofuseandaccess set termsofuse=null, confidentialitydeclaration=null, t.specialpermissions=null, t.restrictions=null, citationrequirements=null, depositorrequirements=null, conditions=null, disclaimer=null where id=; + +or to change several at once: + + update termsofuseandaccess set termsofuse=null, confidentialitydeclaration=null, t.specialpermissions=null, t.restrictions=null, citationrequirements=null, depositorrequirements=null, conditions=null, disclaimer=null where id in (); + + - Alternately, change the Dataset version(s) to not use the CCO waiver and modify the Terms of Use (and/or other fields) as you wish to indicate that the CC0 waiver was previously selected: + + + update termsofuseandaccess set license='NONE', termsofuse=concat('New text. ', termsofuse) where id=; + +or + + update termsofuseandaccess set license='NONE', termsofuse=concat('New text. ', termsofuse) where id in (); + +### To find datasets without CC0 and having an empty Terms of Use field: + + select CONCAT('doi:', dvo.authority, '/', dvo.identifier), v.alias as dataverse_alias, case when versionstate='RELEASED' then concat(dv.versionnumber, '.', dv.minorversionnumber) else versionstate END as version, dv.id as datasetversion_id, t.id as termsofuseandaccess_id, t.termsofuse, t.confidentialitydeclaration, t.specialpermissions, t.restrictions, t.citationrequirements, t.depositorrequirements, t.conditions, t.disclaimer from dvobject dvo, termsofuseandaccess t, datasetversion dv, dataverse v where dv.dataset_id=dvo.id and dv.termsofuseandaccess_id=t.id and dvo.owner_id=v.id and t.license='NONE' and t.termsofuse is null; + +These datasets could be updated to use CC0: + + update termsofuseandaccess set license='CC0', confidentialitydeclaration=null, t.specialpermissions=null, t.restrictions=null, citationrequirements=null, depositorrequirements=null, conditions=null, disclaimer=null where id=; + +or Terms of Use could be added: + + update termsofuseandaccess set termsofuse='New text. ' where id=; + +In both cases, the same where id in (``); ending could be used to change multiple datasets/versions at once. + +### Standardizing Custom Licenses: + +If many datasets use the same set of Custom Terms, it may make sense to create and register a standard license including those terms. Doing this would include: +- Creating and posting an external document that includes the custom terms, i.e. an HTML document with sections corresponding to the terms fields that are used. +- Defining a name, short description, URL (where it is posted), and optionally an icon URL for this license +- Using the Dataverse API to register the new license as one of the options available in your installation +- Using the API to make sure the license is active and deciding whether the license should also be the default +- Once the license is registered with Dataverse, making an SQL update to change datasets/versions using that license to reference it instead of having their own copy of those custom terms. + +The benefits of this approach are: +- usability: the license can be selected for new datasets without allowing custom terms and without users having to cut/paste terms or collection administrators having to configure templates with those terms +- efficiency: custom terms are stored per dataset whereas licenses are registered once and all uses of it refer to the same object and external URL +- security: with the license terms maintained external to Dataverse, users cannot edit specific terms and curators do not need to check for edits + +Once a standardized version of you Custom Terms are registered as a license, an SQL update like the following can be used to have datasets use it: + + UPDATE termsofuseandaccess + SET license_id = (SELECT license.id FROM license WHERE license.name = ''), termsofuse=null, confidentialitydeclaration=null, t.specialpermissions=null, t.restrictions=null, citationrequirements=null, depositorrequirements=null, conditions=null, disclaimer=null + WHERE termsofuseandaccess.termsofuse LIKE '%%'; + +## Backward Incompatibilities + +With the change to support multiple licenses, which can include cases where CC0 is not an option, and the decision to prohibit two previously possible cases (no license and no entry in the Terms of Use field, a standard license and entries in Terms of Use, Special Permissions and related fields), this release contains changes to the display, API payloads, and export metadata that are not backward compatible. These include: +- Use of "CC0 1.0", the short name specified by Creative Commons, for what Dataverse has called the "CC0 Waiver" by default - in the display, API payloads, and export formats including a license name (note that installation admins can alter the license name in the database to maintain the original "CC0 Waiver" text) +- Schema.org metadata in page headers and the Schema.org json-ld metadata export now reference the license via URL (which should avoid the current warning from Google about an invalid license object in the page metadata) +- Metadata exports and import methods (including Sword) use either the license name (e.g. in the JSON export) or URL (e.g. in the OAI_ORE export) rather than a hardcoded value of "CC0" or "CC0 Waiver" currently (if the CC0 license is available, it's default name would be "CC0 1.0") +- API calls (e.g. for import, migrate) that specify both a license and custom terms will be considered an error, as would having no license and an empty/blank value for Terms of Use +- Rollback. In general one should not deploy an earlier release over a database that has been modified by deployment of a later release. (Make a db backup before upgrading and use that copy if you go back to a prior version.) Due to the nature of the db changes in this release, attempts to deploy an earlier version of Dataverse will fail unless the database is also restored to it's pre-release state. + +Also note that, since CC0 Waiver is no longer a hardcoded option, text strings reference it have been edited or removed from Bundle.properties. This means that the ability to provide translations of the CC0 license name/description has been removed. The initial release of multiple license functionality doesn't include an alternative mechanism to provide translations of license names/descriptions, so this is a regression in capability. (The instructions and help information about license and terms remains internationalizable, it is only the name/description of the licenses themselves that cannot yet be translated). \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/api/add-license.json b/doc/sphinx-guides/source/_static/api/add-license.json new file mode 100644 index 00000000000..969d6d58dab --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/add-license.json @@ -0,0 +1,7 @@ +{ + "name": "CC-BY-4.0", + "uri": "http://creativecommons.org/licenses/by/4.0", + "shortDescription": "Creative Commons Attribution 4.0 International License.", + "iconUrl": "https://i.creativecommons.org/l/by/4.0/88x31.png", + "active": true +} diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 8ec1b4a7ab3..10e2014c8cd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1166,7 +1166,7 @@ The fully expanded example above (without environment variables) looks like this .. _assign-role-on-a-dataset-api: Assign a New Role on a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Assigns a new role, based on the POSTed JSON: @@ -1194,7 +1194,7 @@ POSTed JSON example (the content of ``role.json`` file):: .. _revoke-role-on-a-dataset-api: Delete Role Assignment from a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Delete the assignment whose id is ``$id``: @@ -1400,7 +1400,7 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.status_code Report the data (file) size of a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Shows the combined size in bytes of all the files uploaded into the dataset ``id``. @@ -2511,7 +2511,7 @@ In order to obtain a new token use:: curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/users/token/recreate Delete a Token -~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~ In order to delete a token use:: @@ -2828,7 +2828,7 @@ Shows all Harvesting Sets defined in the installation:: GET http://$SERVER/api/harvest/server/oaisets/ -List A Specific Harvesting Set +List A Specific Harvesting Set ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Shows a Harvesting Set with a defined specname:: @@ -3153,7 +3153,7 @@ Deletes an authentication provider from the system. The command succeeds even if DELETE http://$SERVER/api/admin/authenticationProviders/$id/ List Global Roles -~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~ List all global roles in the system. :: @@ -3717,3 +3717,49 @@ Recursively applies the role assignments of the specified Dataverse collection, GET http://$SERVER/api/admin/dataverse/{dataverse alias}/addRoleAssignmentsToChildren Note: setting ``:InheritParentRoleAssignments`` will automatically trigger inheritance of the parent Dataverse collection's role assignments for a newly created Dataverse collection. Hence this API call is intended as a way to update existing child Dataverse collections or to update children after a change in role assignments has been made on a parent Dataverse collection. + +.. _license-management-api: + +Manage Available Standard License Terms +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +View the list of standard license terms that can be selected for a dataset: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + curl $SERVER_URL/api/licenses + +View the details of the standard license with the database ID specified in ``$ID``: + +.. code-block:: bash + + export ID=1 + curl $SERVER_URL/api/licenses/$ID + + +Superusers can add a new license by posting a JSON file adapted from this example :download:`add-license.json <../_static/api/add-license.json>`. The ``name`` and ``uri`` of the new license must be unique. : + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + curl -X POST -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN --data-binary @add-license.json $SERVER_URL/api/licenses + +Superusers can change whether an existing license is active (usable for new dataset versions) or inactive (only allowed on already-published versions) specified by the license ``$ID``: + +.. code-block:: bash + + export STATE=true + curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:active/$STATE + +Superusers can set which license is the default specified by the license ``$ID``: + +.. code-block:: bash + + curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN --data-binary @edit-license.json $SERVER_URL/api/licenses/default/$ID + +Superusers can delete a license that is not in useby the license ``$ID``: + +.. code-block:: bash + + curl -X DELETE -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID diff --git a/doc/sphinx-guides/source/installation/advanced.rst b/doc/sphinx-guides/source/installation/advanced.rst index 19fea65f0ca..6e6d24b0526 100644 --- a/doc/sphinx-guides/source/installation/advanced.rst +++ b/doc/sphinx-guides/source/installation/advanced.rst @@ -36,6 +36,35 @@ You would repeat the steps above for all of your app servers. If users seem to b Please note that :ref:`network-ports` under the Configuration section has more information on fronting your app server with Apache. The :doc:`shibboleth` section talks about the use of ``ProxyPassMatch``. +Licensing +--------- + +Dataverse allows superusers to specify the list of allowed licenses, to define which license is the default, to decide whether users can instead define custom terms, and to mark obsolete licenses as 'inactive' to stop further use of them. +These can be accomplished using the :ref:`:native API ` and the :ref:`:AllowCustomTermsOfUse <:AllowCustomTermsOfUse>` setting. + +Standardizing Custom Licenses ++++++++++++++++++++++++++++++ + +In addition, if many datasets use the same set of Custom Terms, it may make sense to create and register a standard license including those terms. Doing this would include: +- Creating and posting an external document that includes the custom terms, i.e. an HTML document with sections corresponding to the terms fields that are used. +- Defining a name, short description, URL (where it is posted), and optionally an icon URL for this license +- Using the Dataverse API to register the new license as one of the options available in your installation +- Using the API to make sure the license is active and deciding whether the license should also be the default +- Once the license is registered with Dataverse, making an SQL update to change datasets/versions using that license to reference it instead of having their own copy of those custom terms. + +The benefits of this approach are: +- usability: the license can be selected for new datasets without allowing custom terms and without users having to cut/paste terms or collection administrators having to configure templates with those terms +- efficiency: custom terms are stored per dataset whereas licenses are registered once and all uses of it refer to the same object and external URL +- security: with the license terms maintained external to Dataverse, users cannot edit specific terms and curators do not need to check for edits + +Once a standardized version of you Custom Terms are registered as a license, an SQL update like the following can be used to have datasets use it: + +:: + + UPDATE termsofuseandaccess + SET license_id = (SELECT license.id FROM license WHERE license.name = ''), termsofuse=null, confidentialitydeclaration=null, t.specialpermissions=null, t.restrictions=null, citationrequirements=null, depositorrequirements=null, conditions=null, disclaimer=null + WHERE termsofuseandaccess.termsofuse LIKE '%%'; + Optional Components ------------------- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 446fadb7868..b8ec7507a88 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -724,8 +724,8 @@ When a user selects one of the available choices, the Dataverse user interfaces Allowing the Language Used for Dataset Metadata to be Specified +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -Since dataset metadata can only be entered in one language, and administrators may wish to limit which languages metadata can be entered in, Dataverse also offers a separate setting defining allowed metadata languages. -The presence of the :ref:`:MetadataLanguages` database setting identifies the available options (which can be different from those in the :Languages setting above, with fewer or more options). +Since dataset metadata can only be entered in one language, and administrators may wish to limit which languages metadata can be entered in, Dataverse also offers a separate setting defining allowed metadata languages. +The presence of the :ref:`:MetadataLanguages` database setting identifies the available options (which can be different from those in the :Languages setting above, with fewer or more options). Dataverse collection admins can select from these options to indicate which language should be used for new Datasets created with that specific collection. When creating or editing a dataset, users will be asked to enter the metadata in that language. The metadata language selected will also be shown when dataset metadata is viewed and will be included in metadata exports (as appropriate for each format) for published datasets: @@ -2278,7 +2278,7 @@ See :ref:`i18n` for a curl example and related settings. :MetadataLanguages ++++++++++++++++++ -Sets which languages can be used when entering dataset metadata. +Sets which languages can be used when entering dataset metadata. See :ref:`i18n` for further discussion, a curl example, and related settings. @@ -2427,7 +2427,7 @@ A boolean setting that, if true, will send an email and notification to users wh A JSON-structured setting that configures Dataverse to associate specific metadatablock fields with external vocabulary services and specific vocabularies/sub-vocabularies managed by that service. More information about this capability is available at :doc:`/admin/metadatacustomization`. -Scripts that implement this association for specific service protocols are maintained at https://github.com/gdcc/dataverse-external-vocab-support. That repository also includes a json-schema for validating the structure required by this setting along with an example metadatablock and sample :CVocConf setting values associating entries in the example block with ORCID and SKOSMOS based services. +Scripts that implement this association for specific service protocols are maintained at https://github.com/gdcc/dataverse-external-vocab-support. That repository also includes a json-schema for validating the structure required by this setting along with an example metadatablock and sample :CVocConf setting values associating entries in the example block with ORCID and SKOSMOS based services. ``wget https://gdcc.github.io/dataverse-external-vocab-support/examples/config/cvoc-conf.json`` @@ -2437,23 +2437,30 @@ Scripts that implement this association for specific service protocols are maint :AllowedCurationLabels ++++++++++++++++++++++ - -A JSON Object containing lists of allowed labels (up to 32 characters, spaces allowed) that can be set, via API or UI by users with the permission to publish a dataset. The set of labels allowed -for datasets can be selected by a superuser - via the Dataverse collection page (Edit/General Info) or set via API call. -The labels in a set should correspond to the states in an organization's curation process and are intended to help users/curators track the progress of a dataset through a defined curation process. -A dataset may only have one label at a time and if a label is set, it will be removed at publication time. + +A JSON Object containing lists of allowed labels (up to 32 characters, spaces allowed) that can be set, via API or UI by users with the permission to publish a dataset. The set of labels allowed +for datasets can be selected by a superuser - via the Dataverse collection page (Edit/General Info) or set via API call. +The labels in a set should correspond to the states in an organization's curation process and are intended to help users/curators track the progress of a dataset through a defined curation process. +A dataset may only have one label at a time and if a label is set, it will be removed at publication time. This functionality is disabled when this setting is empty/not set. Each set of labels is identified by a curationLabelSet name and a JSON Array of the labels allowed in that set. ``curl -X PUT -d '{"Standard Process":["Author contacted", "Privacy Review", "Awaiting paper publication", "Final Approval"], "Alternate Process":["State 1","State 2","State 3"]}' http://localhost:8080/api/admin/settings/:AllowedCurationLabels`` +.. _:AllowCustomTermsOfUse: + +:AllowCustomTermsOfUse +++++++++++++++++++++++ + +By default, custom terms of data use and access can be specified after selecting "Custom Terms" from the License/DUA dropdown on the Terms tab. When ``:AllowCustomTerms`` is set to ``false`` the "Custom Terms" item is not made available to the depositor. + .. _:MaxEmbargoDurationInMonths: :MaxEmbargoDurationInMonths +++++++++++++++++++++++++++ -This setting controls whether embargoes are allowed in a Dataverse instance and can limit the maximum duration users are allowed to specify. A value of 0 months or non-existent -setting indicates embargoes are not supported. A value of -1 allows embargoes of any length. Any other value indicates the maximum number of months (from the current date) a user +This setting controls whether embargoes are allowed in a Dataverse instance and can limit the maximum duration users are allowed to specify. A value of 0 months or non-existent +setting indicates embargoes are not supported. A value of -1 allows embargoes of any length. Any other value indicates the maximum number of months (from the current date) a user can enter for an embargo end date. This limit will be enforced in the popup dialog in which users enter the embargo date. For example, to set a two year maximum: ``curl -X PUT -d 24 http://localhost:8080/api/admin/settings/:MaxEmbargoDurationInMonths`` diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index fe47a885466..1ef69a96fba 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -299,29 +299,42 @@ After successfully replacing a file, a new dataset draft version will be created Terms ===== -Dataset terms can be viewed and edited from the Terms tab of the dataset page, or under the Edit dropdown button of a Dataset. There, you can set up how users can use your data once they have downloaded it (CC0 waiver or custom Terms of Use), how they can access your data if you have files that are restricted (terms of access), and enable a Guestbook for your dataset so that you can track who is using your data and for what purposes. These are explained in further detail below: +Dataset terms can be viewed and edited from the Terms tab of the dataset page, or under the Edit dropdown button of a Dataset. There, you can set up how users can use your data once they have downloaded it (via a standard license or, if allowed, custom terms), how they can access your data if you have files that are restricted (terms of access), and enable a Guestbook for your dataset so that you can track who is using your data and for what purposes. These are explained in further detail below: -CC0 Public Domain Dedication ----------------------------- +Choosing a License +------------------ -By default, all new datasets created through the Dataverse installation's web UI are given a `Creative Commons CC0 Public Domain Dedication `_. +Each Dataverse installation provides a set of license(s) data can be released under, and whether users can specify custom terms instead (see below). +One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. +If you want to apply one of the other available licenses to your dataset, you can change it on the Terms tab of your Dataset page. -The `Creative Commons `_ organization defines a number of `licenses `_ that allow copyright holders to release their intellectual property more openly, with fewer legal restrictions than standard copyright enforces. Each Creative Commons license typically specifies simple terms for how the IP must be used, reused, shared, and attributed. In addition to these licenses, Creative Commons also provides public domain tools that make it easy to dedicate IP to the public domain. +License Selection and Professional Norms +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In the context of a Dataverse installation, their `CC0 Public Domain Dedication `_ allows you to unambiguously waive all copyright control over your data in all jurisdictions worldwide. Data released with CC0 can be freely copied, modified, and distributed (even for commercial purposes) without violating copyright. In most parts of the world, factual data is exempt from copyright anyway, but applying CC0 removes all ambiguity and makes the legal copyright status of the data as clear as possible. The Dataverse Software applies CC0 to datasets by default because it facilitates reuse, extensibility, and long-term preservation of research data by assuring that the data can be safely handled by anyone without fear of potential copyright pitfalls. +When selecting a license, data depositors should recognize that their data will be available internationally and, over the long term, may be used in new forms of research (for example, in machine learning where millions of datasets might be used in training). +It is therefore useful to consider licenses that have been developed with awareness of international law and that place minimal restrictions on reuse. + +For example, the `Creative Commons `_ organization defines a number of `licenses `_ that allow copyright holders to release their intellectual property more openly, with fewer legal restrictions than standard copyright enforces. +(These licenses may or may not be available in the Dataverse instance you are using, but we expect them to be common in the community.) +Each Creative Commons license typically specifies simple terms for how the IP must be used, reused, shared, and attributed and includes language intended to address variations in the laws of different countries. -Though CC0 waives a dataset owner's legal copyright controls over the data, it does not exempt a Dataverse installation's users from following ethical and professional norms in scholarly communications. The `Dataverse Community Norms `_ * as well as scientific best practices assert that proper credit should be given via citation. Regardless of whether CC0 has been applied or not, Dataverse Community members are expected to cite the data they use, giving credit to the data's authors. This expectation applies to both the Dataverse Community members and the entire wider scholarly community. +In addition to these licenses, Creative Commons also the `CC0 1.0 Universal (CC0 1.0) Public Domain Dedication `_ which allows you to unambiguously waive all copyright control over your data in all jurisdictions worldwide. +Data released with CC0 can be freely copied, modified, and distributed (even for commercial purposes) without violating copyright. +In most parts of the world, factual data is exempt from copyright anyway, but applying CC0 removes all ambiguity and makes the legal copyright status of the data as clear as possible. -Additionally, users are still expected to respect access restrictions and other terms applied to CC0 files in a Dataverse installation. Additional restrictions, conditions, and terms can still be compatible with CC0, as CC0 only operates in the realm of copyright, which is rather limited when it comes to data. +When available. CC0 can be a good choice for datasets because it facilitates reuse, extensibility, and long-term preservation of research data by assuring that the data can be safely handled by anyone without fear of potential copyright pitfalls. -If a data owner feels that CC0 is not suitable for their data, they are able to enter custom Terms of Use, as detailed in the following section. +Data depositors and data users should also understand that while licenses define legal use, they do not exempt a Dataverse installation's users from following ethical and professional norms in scholarly communications. +For example, though CC0 waives a dataset owner's legal copyright controls over the data, users, as scholarly researchers, are still expected to cite the data they use, giving credit to the data's authors following ethical and professional norms in scholarly communications. +This is true of other licenses as well - users should cite data as appropriate even if the specified license does not require it. +The `Dataverse Community Norms `_ * details additional areas where data users should follow societal norms and scientific best practices. \* **Legal Disclaimer:** these `Community Norms `_ are not a substitute for the CC0 waiver or custom terms and licenses applicable to each dataset. The Community Norms are not a binding contractual agreement, and that downloading datasets from a Dataverse installation does not create a legal obligation to follow these policies. Custom Terms of Use for Datasets -------------------------------- -If you are unable to use the CC0 Public Domain Dedication for your datasets, you may specify your own custom Terms of Use. To do so, select "No, do not apply CC0 - "Public Domain Dedication", and a Terms of Use text box will show up allowing you to enter your own custom terms of use for your dataset. To add more information about the Terms of Use, we have provided fields like Special Permissions, Restrictions, Citation Requirements, etc. +If the Dataverse you are using allows it, you may specify your own Custom Dataset Terms. To do so, select Custom Dataset Terms for your license, and a panel will appear allowing you to enter custom Terms of Use. You can also enter information in additional fields including Special Permissions, Restrictions, and Citation Requirements to further clarify how your Dataset may be accessed and used. Here is an `example of a Data Usage Agreement `_ for datasets that have de-identified human subject data. diff --git a/scripts/api/data/licenses/licenseCC-BY-4.0.json b/scripts/api/data/licenses/licenseCC-BY-4.0.json new file mode 100644 index 00000000000..5596e65e947 --- /dev/null +++ b/scripts/api/data/licenses/licenseCC-BY-4.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC BY 4.0", + "uri": "http://creativecommons.org/licenses/by/4.0", + "shortDescription": "Creative Commons Attribution 4.0 International License.", + "iconUrl": "https://licensebuttons.net/l/by/4.0/88x31.png", + "active": true +} diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json new file mode 100644 index 00000000000..8154c9ec5df --- /dev/null +++ b/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC BY-NC 4.0", + "uri": "http://creativecommons.org/licenses/by-nc/4.0", + "shortDescription": "Creative Commons Attribution-NonCommercial 4.0 International License.", + "iconUrl": "https://licensebuttons.net/l/by-nc/4.0/88x31.png", + "active": true +} diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json new file mode 100644 index 00000000000..203350edb66 --- /dev/null +++ b/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC BY-NC-ND 4.0", + "uri": "http://creativecommons.org/licenses/b-nc-ndy/4.0", + "shortDescription": "Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.", + "iconUrl": "https://licensebuttons.net/l/by-nc-nd/4.0/88x31.png", + "active": true +} diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json new file mode 100644 index 00000000000..e9726fb6374 --- /dev/null +++ b/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC BY-NC-SA 4.0", + "uri": "http://creativecommons.org/licenses/by-nc-sa/4.0", + "shortDescription": "Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.", + "iconUrl": "https://licensebuttons.net/l/by-nc-sa/4.0/88x31.png", + "active": true +} diff --git a/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json b/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json new file mode 100644 index 00000000000..7ae81bacc10 --- /dev/null +++ b/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC BY-ND 4.0", + "uri": "http://creativecommons.org/licenses/by-nd/4.0", + "shortDescription": "Creative Commons Attribution-NoDerivatives 4.0 International License.", + "iconUrl": "https://licensebuttons.net/l/by-nd/4.0/88x31.png", + "active": true +} diff --git a/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json b/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json new file mode 100644 index 00000000000..e9a02880885 --- /dev/null +++ b/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC BY-SA 4.0", + "uri": "http://creativecommons.org/licenses/by-sa/4.0", + "shortDescription": "Creative Commons Attribution-ShareAlike 4.0 International License.", + "iconUrl": "https://licensebuttons.net/l/by-sa/4.0/88x31.png", + "active": true +} diff --git a/scripts/api/data/licenses/licenseCC0-1.0.json b/scripts/api/data/licenses/licenseCC0-1.0.json new file mode 100644 index 00000000000..024b3bc826a --- /dev/null +++ b/scripts/api/data/licenses/licenseCC0-1.0.json @@ -0,0 +1,7 @@ +{ + "name": "CC0 1.0", + "uri": "https://creativecommons.org/publicdomain/zero/1.0/", + "shortDescription": "Creative Commons CC0 1.0 Universal Public Domain Dedication.", + "iconUrl": "https://licensebuttons.net/p/zero/1.0/88x31.png", + "active": true +} diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index d1592519dc2..569a5cdfd2a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; +import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitations; import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics; import java.nio.file.Path; @@ -301,7 +302,7 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f //if the latest version has values get them copied over if (template != null) { dsv.updateDefaultValuesFromTemplate(template); - setVersions(new ArrayList()); + setVersions(new ArrayList<>()); } else { latestVersion = getLatestVersionForCopy(); @@ -315,11 +316,6 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f if (latestVersion.getTermsOfUseAndAccess()!= null){ dsv.setTermsOfUseAndAccess(latestVersion.getTermsOfUseAndAccess().copyTermsOfUseAndAccess()); - } else { - TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); - terms.setDatasetVersion(dsv); - terms.setLicense(TermsOfUseAndAccess.License.CC0); - dsv.setTermsOfUseAndAccess(terms); } for (FileMetadata fm : latestVersion.getFileMetadatas()) { @@ -396,11 +392,11 @@ public DatasetVersion getEditVersion(Template template, FileMetadata fm) { * @todo Investigate if this method should be deprecated in favor of * createNewDatasetVersion. */ - public DatasetVersion getCreateVersion() { + public DatasetVersion getCreateVersion(License license) { DatasetVersion dsv = new DatasetVersion(); dsv.setVersionState(DatasetVersion.VersionState.DRAFT); dsv.setDataset(this); - dsv.initDefaultValues(); + dsv.initDefaultValues(license); this.setVersions(new ArrayList<>()); getVersions().add(0, dsv); return dsv; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 209627356f9..61720efafb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -38,6 +38,7 @@ import edu.harvard.iq.dataverse.export.spi.Exporter; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; +import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; @@ -61,7 +62,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.time.Instant; @@ -78,6 +78,7 @@ import java.util.Set; import java.util.Collection; import java.util.logging.Logger; +import java.util.stream.Collectors; import javax.ejb.EJB; import javax.ejb.EJBException; import javax.faces.application.FacesMessage; @@ -87,14 +88,12 @@ import javax.faces.view.ViewScoped; import javax.inject.Inject; import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.json.JsonString; +import org.apache.commons.lang3.StringUtils; import org.primefaces.event.FileUploadEvent; import org.primefaces.model.file.UploadedFile; + import javax.validation.ConstraintViolation; import org.apache.commons.httpclient.HttpClient; //import org.primefaces.context.RequestContext; @@ -132,7 +131,7 @@ import org.apache.commons.validator.routines.EmailValidator; import org.apache.commons.lang3.mutable.MutableBoolean; import org.apache.commons.io.IOUtils; - +import org.primefaces.component.selectonemenu.SelectOneMenu; import org.primefaces.component.tabview.TabView; import org.primefaces.event.CloseEvent; import org.primefaces.event.TabChangeEvent; @@ -246,10 +245,12 @@ public enum DisplayMode { ProvPopupFragmentBean provPopupFragmentBean; @Inject MakeDataCountLoggingServiceBean mdcLogService; - @Inject + @Inject DataverseHeaderFragment dataverseHeaderFragment; - @Inject + @Inject EmbargoServiceBean embargoService; + @Inject + LicenseServiceBean licenseServiceBean; private Dataset dataset = new Dataset(); @@ -260,6 +261,7 @@ public enum DisplayMode { private Long ownerId; private Long versionId; private int selectedTabIndex; + private String selectTab = ""; private List newFiles = new ArrayList<>(); private List uploadedFiles = new ArrayList<>(); private MutableBoolean uploadInProgress = new MutableBoolean(false); @@ -1216,8 +1218,6 @@ public void setLinkingDataverseId(Long linkingDataverseId) { this.linkingDataverseId = linkingDataverseId; } - - public void updateReleasedVersions(){ setReleasedVersionTabList(resetReleasedVersionTabList()); @@ -1546,6 +1546,14 @@ public int getSelectedTabIndex() { return selectedTabIndex; } + public String getSelectTab() { + return selectTab; + } + + public void setSelectTab(String selectTab) { + this.selectTab = selectTab; + } + public void setSelectedTabIndex(int selectedTabIndex) { this.selectedTabIndex = selectedTabIndex; } @@ -1622,7 +1630,7 @@ public void updateSelectedTemplate(ValueChangeEvent event) { workingVersion.updateDefaultValuesFromTemplate(selectedTemplate); updateDatasetFieldInputLevels(); } else { - workingVersion.initDefaultValues(); + workingVersion.initDefaultValues(licenseServiceBean.getDefault()); updateDatasetFieldInputLevels(); } resetVersionUI(); @@ -1838,6 +1846,20 @@ private String init(boolean initFull) { return permissionsWrapper.notFound(); } + switch (selectTab){ + case "dataFilesTab": + selectedTabIndex = 0; + break; + case "metadataMapTab": + selectedTabIndex = 1; + break; + case "termsTab": + selectedTabIndex = 2; + break; + case "versionsTab": + selectedTabIndex = 3; + break; + } //this.dataset = this.workingVersion.getDataset(); @@ -1941,7 +1963,7 @@ private String init(boolean initFull) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Problem getting rsync script:", cex.getLocalizedMessage())); } } - + tooLargeToDownload = getSizeOfDatasetNumeric() > settingsWrapper.getZipDownloadLimit(); tooLargeToDownloadOriginal = getSizeOfDatasetOrigNumeric() > settingsWrapper.getZipDownloadLimit(); tooLargeToDownloadArchival = getSizeOfDatasetArchivalNumeric() > settingsWrapper.getZipDownloadLimit(); @@ -1988,7 +2010,7 @@ private String init(boolean initFull) { workingVersion = dataset.getEditVersion(selectedTemplate, null); updateDatasetFieldInputLevels(); } else { - workingVersion = dataset.getCreateVersion(); + workingVersion = dataset.getCreateVersion(licenseServiceBean.getDefault()); updateDatasetFieldInputLevels(); } @@ -2972,7 +2994,7 @@ public void startDownloadAllOriginal(){ this.setSelectedFiles(workingVersion.getFileMetadatas()); startDownload(true); } - + public void startDownloadSelectedArchival() { startDownload(false); } @@ -2980,7 +3002,7 @@ public void startDownloadSelectedArchival() { public void startDownloadSelectedOriginal() { startDownload(true); } - + private void startDownload(boolean downloadOriginal){ boolean guestbookRequired = isDownloadPopupRequired(); boolean validate = validateFilesForDownload(guestbookRequired, downloadOriginal); @@ -3051,7 +3073,7 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl setValidateFilesOutcome("Mixed"); return true; } - + if (guestbookRequired) { setValidateFilesOutcome("GuestbookRequired"); } @@ -3059,7 +3081,7 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return true; } - + private void updateGuestbookResponse (boolean guestbookRequired, boolean downloadOriginal) { // Note that the GuestbookResponse object may still have information from // the last download action performed by the user. For example, it may @@ -3250,6 +3272,18 @@ private void restrictFiles(List filesToRestrict, boolean restricte } } + public boolean hasRestrictedFile() { + if (workingVersion == null) { + return false; + } + for (FileMetadata fmd : workingVersion.getFileMetadatas()) { + if (fmd.isRestricted()) { + return true; + } + } + return false; + } + public int getRestrictedFileCount() { if (workingVersion == null){ return 0; @@ -5173,7 +5207,7 @@ public boolean isUserCanCreatePrivateURL() { public String getPrivateUrlLink(PrivateUrl privateUrl) { return privateUrl.getLink(); } - + public boolean isAnonymizedAccess() { if (anonymizedAccess == null) { if (session.getUser() instanceof PrivateUrlUser) { @@ -5184,7 +5218,7 @@ public boolean isAnonymizedAccess() { } return anonymizedAccess; } - + public boolean isAnonymizedPrivateUrl() { if(privateUrl != null) { return privateUrl.isAnonymizedAccess(); @@ -5192,7 +5226,7 @@ public boolean isAnonymizedPrivateUrl() { return false; } } - + public boolean isAnonymizedAccessEnabled() { if (settingsWrapper.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames) != null) { return true; @@ -5200,7 +5234,7 @@ public boolean isAnonymizedAccessEnabled() { return false; } } - + // todo: we should be able to remove - this is passed in the html pages to other fragments, but they could just access this service bean directly. public FileDownloadServiceBean getFileDownloadService() { return fileDownloadService; @@ -5619,7 +5653,7 @@ public void setExternalStatus(String status) { public List getAllowedExternalStatuses() { return settingsWrapper.getAllowedExternalStatuses(dataset); } - + public Embargo getSelectionEmbargo() { return selectionEmbargo; } @@ -5628,9 +5662,9 @@ public void setSelectionEmbargo(Embargo selectionEmbargo) { this.selectionEmbargo = selectionEmbargo; } - + private Embargo selectionEmbargo = new Embargo(); - + public boolean isValidEmbargoSelection() { //If fileMetadataForAction is set, someone is using the kebab/single file menu if (fileMetadataForAction != null) { @@ -5648,7 +5682,7 @@ public boolean isValidEmbargoSelection() { } return false; } - + /* * This method checks to see if the selected file/files have an embargo that could be removed. It doesn't return true of a released file has an embargo. */ @@ -5669,11 +5703,11 @@ public boolean isExistingEmbargo() { return false; } - + public boolean isActivelyEmbargoed(List fmdList) { return FileUtil.isActivelyEmbargoed(fmdList); } - + public boolean isEmbargoForWholeSelection() { for (FileMetadata fmd : selectedFiles) { if (fmd.getDataFile().isReleased()) { @@ -5682,7 +5716,7 @@ public boolean isEmbargoForWholeSelection() { } return true; } - + private boolean removeEmbargo=false; public boolean isRemoveEmbargo() { @@ -5703,16 +5737,16 @@ public void setRemoveEmbargo(boolean removeEmbargo) { PrimeFaces.current().resetInputs("datasetForm:embargoInputs"); } } - + public String saveEmbargo() { if (workingVersion.isReleased()) { refreshSelectedFiles(selectedFiles); } - + if(isRemoveEmbargo() || (selectionEmbargo.getDateAvailable()==null && selectionEmbargo.getReason()==null)) { selectionEmbargo=null; } - + if(!(selectionEmbargo==null || (selectionEmbargo!=null && settingsWrapper.isValidEmbargoDate(selectionEmbargo)))) { logger.fine("Validation error: " + selectionEmbargo.getFormattedDateAvailable()); FacesContext.getCurrentInstance().validationFailed(); @@ -5726,7 +5760,7 @@ public String saveEmbargo() { } else if (selectedFiles != null && selectedFiles.size() > 0) { embargoFMs = selectedFiles; } - + if(embargoFMs!=null && !embargoFMs.isEmpty()) { if(selectionEmbargo!=null) { selectionEmbargo = embargoService.merge(selectionEmbargo); @@ -5788,7 +5822,7 @@ public boolean isCantDownloadDueToEmbargo() { } return false; } - + public boolean isCantRequestDueToEmbargo() { if (fileDownloadHelper.getFilesForRequestAccess() != null) { for (DataFile df : fileDownloadHelper.getFilesForRequestAccess()) { @@ -5813,4 +5847,29 @@ public String getIngestMessage() { return BundleUtil.getStringFromBundle("file.ingestFailed.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())); } + public void validateTerms(FacesContext context, UIComponent component, Object value) throws ValidatorException { + UIComponent lic = component.findComponent("licenses"); + SelectOneMenu som = (SelectOneMenu) lic; + logger.fine("license in form is " + som.getValue()); + if (som.getValue() == null) { + if (StringUtils.isBlank((String) value)) { + FacesMessage msg = new FacesMessage(BundleUtil.getStringFromBundle("dataset.license.custom.blankterms")); + msg.setSeverity(FacesMessage.SEVERITY_ERROR); + + throw new ValidatorException(msg); + } + } + } + + public boolean downloadingRestrictedFiles() { + if (fileMetadataForAction != null) { + return fileMetadataForAction.isRestricted(); + } + for (FileMetadata fmd : this.selectedFiles) { + if (fmd.isRestricted()) { + return true; + } + } + return false; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index d53cf20491c..b766e5d7b58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -3,6 +3,8 @@ import edu.harvard.iq.dataverse.util.MarkupChecker; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; +import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -85,10 +87,6 @@ public int compare(DatasetVersion o1, DatasetVersion o2) { // StudyVersionsFragment.xhtml in order to display the correct value from a Resource Bundle public enum VersionState { DRAFT, RELEASED, ARCHIVED, DEACCESSIONED - }; - - public enum License { - NONE, CC0 } public static final int ARCHIVE_NOTE_MAX_LENGTH = 1000; @@ -160,7 +158,10 @@ public enum License { @Transient private String contributorNames; - + + @Transient + private final String dataverseSiteUrl = SystemConfig.getDataverseSiteUrlStatic(); + @Transient private String jsonLd; @@ -204,7 +205,11 @@ public Long getVersion() { public void setVersion(Long version) { } - + + public String getDataverseSiteUrl() { + return dataverseSiteUrl; + } + public List getFileMetadatas() { return fileMetadatas; } @@ -569,12 +574,6 @@ public void updateDefaultValuesFromTemplate(Template template) { TermsOfUseAndAccess terms = template.getTermsOfUseAndAccess().copyTermsOfUseAndAccess(); terms.setDatasetVersion(this); this.setTermsOfUseAndAccess(terms); - } else { - TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); - terms.setDatasetVersion(this); - terms.setLicense(TermsOfUseAndAccess.License.CC0); - terms.setDatasetVersion(this); - this.setTermsOfUseAndAccess(terms); } } @@ -593,11 +592,6 @@ public DatasetVersion cloneDatasetVersion(){ if (this.getTermsOfUseAndAccess()!= null){ dsv.setTermsOfUseAndAccess(this.getTermsOfUseAndAccess().copyTermsOfUseAndAccess()); - } else { - TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); - terms.setDatasetVersion(dsv); - terms.setLicense(TermsOfUseAndAccess.License.CC0); - dsv.setTermsOfUseAndAccess(terms); } for (FileMetadata fm : this.getFileMetadatas()) { @@ -620,22 +614,18 @@ public DatasetVersion cloneDatasetVersion(){ dsv.getFileMetadatas().add(newFm); } - - - dsv.setDataset(this.getDataset()); return dsv; - } - public void initDefaultValues() { + public void initDefaultValues(License license) { //first clear then initialize - in case values were present // from template or user entry this.setDatasetFields(new ArrayList<>()); this.setDatasetFields(this.initDatasetFields()); TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); terms.setDatasetVersion(this); - terms.setLicense(TermsOfUseAndAccess.License.CC0); + terms.setLicense(license); this.setTermsOfUseAndAccess(terms); } @@ -1871,19 +1861,7 @@ public String getJsonLd() { */ TermsOfUseAndAccess terms = this.getTermsOfUseAndAccess(); if (terms != null) { - JsonObjectBuilder license = Json.createObjectBuilder().add("@type", "Dataset"); - - if (TermsOfUseAndAccess.License.CC0.equals(terms.getLicense())) { - license.add("text", "CC0").add("url", TermsOfUseAndAccess.CC0_URI); - } else { - String termsOfUse = terms.getTermsOfUse(); - // Terms of use can be null if you create the dataset with JSON. - if (termsOfUse != null) { - license.add("text", termsOfUse); - } - } - - job.add("license",license); + job.add("license",DatasetUtil.getLicenseURI(this)); } job.add("includedInDataCatalog", Json.createObjectBuilder() @@ -1960,10 +1938,10 @@ public String getJsonLd() { job.add("distribution", fileArray); } jsonLd = job.build().toString(); - + //Most fields above should be stripped/sanitized but, since this is output in the dataset page as header metadata, do a final sanitize step to make sure jsonLd = MarkupChecker.stripAllTags(jsonLd); - + return jsonLd; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 1a4fba87919..7250f85a493 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -29,6 +29,7 @@ import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; +import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -135,6 +136,8 @@ public enum Referrer { @Inject ProvPopupFragmentBean provPopupFragmentBean; @Inject SettingsWrapper settingsWrapper; + @Inject + LicenseServiceBean licenseServiceBean; private Dataset dataset = new Dataset(); @@ -545,7 +548,7 @@ public String init() { return permissionsWrapper.notAuthorized(); } - clone = workingVersion.cloneDatasetVersion(); + clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); @@ -570,7 +573,8 @@ public String init() { datafileService, permissionService, commandEngine, - systemConfig); + systemConfig, + licenseServiceBean); fileReplacePageHelper = new FileReplacePageHelper(addReplaceFileHelper, dataset, diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java index a4f5d27c38e..676a8427d0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.DeleteTemplateCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseTemplateRootCommand; +import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.sql.Timestamp; @@ -56,6 +57,9 @@ public class ManageTemplatesPage implements java.io.Serializable { @Inject PermissionsWrapper permissionsWrapper; + + @Inject + LicenseServiceBean licenseServiceBean; private List