Compare commits

...

11 Commits

Author SHA1 Message Date
luca.frosini 97d51c1206 Added possibility to have a local configuration to instantiate Analyser 2023-08-31 18:00:40 +02:00
luca.frosini 117814a7c0 Improved docs and aligned code naming 2023-08-31 15:26:26 +02:00
luca.frosini 85a36d12e5 Refactoring library 2023-08-31 11:31:14 +02:00
luca.frosini 8897a25f4d improved documentation 2023-08-30 18:35:06 +02:00
luca.frosini 967027a3c0 Improving naming 2023-08-30 18:03:06 +02:00
luca.frosini b03e4e39b6 improving documentation 2023-08-30 17:48:02 +02:00
luca.frosini 93f924a7ad Enhanced version and fixed changelog 2023-08-30 17:46:31 +02:00
luca.frosini 5f3661f06f Refactored classes name 2023-08-30 17:46:05 +02:00
luca.frosini a4e10e256c Adding documentation 2023-08-30 17:40:36 +02:00
luca.frosini e2fb00c34f Generalizing library terminology 2023-08-30 17:40:18 +02:00
luca.frosini e3634904e7 Ignored MacOs File 2023-06-21 11:48:24 +02:00
26 changed files with 1029 additions and 248 deletions

2
.gitignore vendored
View File

@ -29,3 +29,5 @@ replay_pid*
/.project
/.settings
/gcat/
**.DS_Store
/bin/

View File

@ -2,6 +2,11 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
# Changelog for gCube Software Versions Processor Lib
## [v1.1.0-SNAPSHOT]
- Renamed some classes to improve terminology
- Added sphinx documentation [#24841]
## [v1.0.0]
- First Release

20
docs/Makefile Normal file
View File

@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

55
docs/conf.py Normal file
View File

@ -0,0 +1,55 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Software Versions Processor Lib'
copyright = '2023, Luca Frosini (ISTI-CNR)'
author = 'Luca Frosini (ISTI-CNR)'
# The full version, including alpha/beta/rc tags
release = '1.1.0-SNAPSHOT'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'sphinx_rtd_theme'
html_theme = 'sphinxdoc'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']

144
docs/examples/gcat-doc.json Normal file
View File

@ -0,0 +1,144 @@
{
"configuration": {
"processors": {
"ZenodoExporter": {
"elaboration": "NONE",
"skip_grants": [
"004260"
],
"additional_html_description": "\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> is an open-source software toolkit used for building and operating Hybrid Data Infrastructures enabling the dynamic deployment of Virtual Research Environments, such as the <a href=\"https://www.d4science.org/\">D4Science Infrastructure</a>, by favouring the realisation of reuse-oriented policies.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> has been used to successfully build and operate infrastructures and virtual research environments for application domains ranging from biodiversity to environmental data management and cultural heritage.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> offers components supporting typical data management workflows including data access, curation, processing, and visualisation on a large set of data typologies ranging from primary biodiversity data to geospatial and tabular data.</p>\n\n<p><a href=\"https://www.d4science.org/\">D4Science</a> is a Hybrid Data Infrastructure combining over 500 software components and integrating data from more than 50 different data providers into a coherent and managed system of hardware, software, and data resources. The D4Science infrastructure drastically reduces the cost of ownership, maintenance, and operation thanks to the exploitation of gCube.</p>\n\n<p>&nbsp;</p>\n\n<p>The official source code location of this software version is available at:</p>\n\n<p><a href=\"{{code_location}}\">{{code_location}}</a></p>"
},
"BibLaTeXExporter": {
"elaboration": "ALL"
}
},
"name": "gcat",
"group": "data-catalogue",
"title": "gCube Catalogue (gCat) Service {{version}}",
"license": {
"id": "EUPL-1.1",
"url": "https://opensource.org/licenses/EUPL-1.1"
},
"keywords": [
"gCube",
"Catalogue",
"D4Science"
],
"description": "gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.",
"html_description": "<p>{{description}}</p>",
"authors": [
{
"affiliation": "Istituto di Scienza e Tecnologie dell'Informazione \"A. Faedo\" - CNR, Italy",
"name": "Frosini, Luca",
"orcid": "0000-0003-3183-2291"
}
],
"files": [
{
"url": "https://code-repo.d4science.org/gCubeSystem/{{name}}/archive/v{{version}}.zip",
"desired_name": "{{name}}-v{{version}}.zip"
},
{
"url": "https://code-repo.d4science.org/gCubeSystem/{{name}}/archive/v{{version}}.tar.gz",
"desired_name": "{{name}}-v{{version}}.tar.gz"
},
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/{{group}}/{{name}}/{{version}}/{{name}}-{{version}}.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139068",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}/releases/tag/v{{version}}",
"communities": [
{
"identifier": "gcube-system"
}
],
"grants": [
{
"id": "004260",
"name": "DILIGENT",
"url": "https://cordis.europa.eu/project/id/004260"
},
{
"id": "654119",
"name": "PARTHENOS",
"url": "https://cordis.europa.eu/project/id/654119"
},
{
"id": "675680",
"name": "BlueBRIDGE",
"url": "https://cordis.europa.eu/project/id/675680"
}
],
"export_filename": "{{name}}"
},
"artifacts": [
{
"version": "1.0.0",
"date": "2019-01-10",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repositories/gcube-snapshots/content/org/gcube/data-publishing/gcat/1.0.0-SNAPSHOT/gcat-1.0.0-20190109.172827-2.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": null,
"gcube_release_ticket": null,
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139446",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "1.1.0",
"date": "2019-02-26",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/data-publishing/gcat/1.1.0-4.13.1-177071/gcat-1.1.0-4.13.1-177071-src.zip",
"desired_name": "{{name}}-v{{version}}.zip"
},
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/data-publishing/gcat/1.1.0-4.13.1-177071/gcat-1.1.0-4.13.1-177071.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": "4.13.1",
"gcube_release_ticket": "https://support.d4science.org/issues/12988",
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1140461",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "1.2.0",
"date": "2019-05-20",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repositories/gcube-snapshots/content/org/gcube/data-publishing/gcat/1.2.0-SNAPSHOT/gcat-1.2.0-20190520.132914-10.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": null,
"gcube_release_ticket": null,
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1140750",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "2.0.0",
"date": "2021-05-04",
"gcube_release_version": "5.2.0",
"gcube_release_ticket": "https://support.d4science.org/issues/19738",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139069"
},
{
"version": "2.1.0",
"date": "2022-01-27",
"gcube_release_version": "5.7.0",
"gcube_release_ticket": "https://support.d4science.org/issues/21685/",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139070"
}
]
}

50
docs/examples/gcat.bib Normal file
View File

@ -0,0 +1,50 @@
@software{gcat_1.0.0,
author = {{Luca Frosini}},
title = {gCube Catalogue (gCat) Service 1.0.0},
abstract = {gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.},
date = {2019-01-10},
version = {1.0.0},
url = {https://doi.org/10.5072/zenodo.1139446},
keywords = {Catalogue, D4Science, gCube}
}
@software{gcat_1.1.0,
author = {{Luca Frosini}},
title = {gCube Catalogue (gCat) Service 1.1.0},
abstract = {gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.},
date = {2019-02-26},
version = {1.1.0},
url = {https://doi.org/10.5072/zenodo.1140461},
keywords = {Catalogue, D4Science, gCube}
}
@software{gcat_1.2.0,
author = {{Luca Frosini}},
title = {gCube Catalogue (gCat) Service 1.2.0},
abstract = {gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.},
date = {2019-05-20},
version = {1.2.0},
url = {https://doi.org/10.5072/zenodo.1140750},
keywords = {Catalogue, D4Science, gCube}
}
@software{gcat_2.0.0,
author = {{Luca Frosini}},
title = {gCube Catalogue (gCat) Service 2.0.0},
abstract = {gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.},
date = {2021-05-04},
version = {2.0.0},
url = {https://doi.org/10.5072/zenodo.1139069},
keywords = {Catalogue, D4Science, gCube}
}
@software{gcat_2.1.0,
author = {{Luca Frosini}},
title = {gCube Catalogue (gCat) Service 2.1.0},
abstract = {gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.},
date = {2022-01-27},
version = {2.1.0},
url = {https://doi.org/10.5072/zenodo.1139070},
keywords = {Catalogue, D4Science, gCube}
}

146
docs/examples/gcat.json Normal file
View File

@ -0,0 +1,146 @@
{
"configuration": {
"processors": {
"ZenodoExporter": {
"elaboration": "NONE",
"skip_grants": [
"004260"
],
"additional_html_description": "\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> is an open-source software toolkit used for building and operating Hybrid Data Infrastructures enabling the dynamic deployment of Virtual Research Environments, such as the <a href=\"https://www.d4science.org/\">D4Science Infrastructure</a>, by favouring the realisation of reuse-oriented policies.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> has been used to successfully build and operate infrastructures and virtual research environments for application domains ranging from biodiversity to environmental data management and cultural heritage.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> offers components supporting typical data management workflows including data access, curation, processing, and visualisation on a large set of data typologies ranging from primary biodiversity data to geospatial and tabular data.</p>\n\n<p><a href=\"https://www.d4science.org/\">D4Science</a> is a Hybrid Data Infrastructure combining over 500 software components and integrating data from more than 50 different data providers into a coherent and managed system of hardware, software, and data resources. The D4Science infrastructure drastically reduces the cost of ownership, maintenance, and operation thanks to the exploitation of gCube.</p>\n\n<p>&nbsp;</p>\n\n<p>The official source code location of this software version is available at:</p>\n\n<p><a href=\"{{code_location}}\">{{code_location}}</a></p>"
},
"BibLaTeXExporter": {
"elaboration": "ALL"
}
},
"name": "gcat",
"group": "data-catalogue",
"title": "gCube Catalogue (gCat) Service {{version}}",
"license": {
"id": "EUPL-1.1",
"url": "https://opensource.org/licenses/EUPL-1.1"
},
"keywords": [
"gCube",
"Catalogue",
"D4Science"
],
"description": "gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.",
"html_description": "<p>{{description}}</p>",
"authors": [
{
"affiliation": "Istituto di Scienza e Tecnologie dell'Informazione \"A. Faedo\" - CNR, Italy",
"name": "Frosini, Luca",
"orcid": "0000-0003-3183-2291"
}
],
"files": [
{
"url": "https://code-repo.d4science.org/gCubeSystem/{{name}}/archive/v{{version}}.zip",
"desired_name": "{{name}}-v{{version}}.zip"
},
{
"url": "https://code-repo.d4science.org/gCubeSystem/{{name}}/archive/v{{version}}.tar.gz",
"desired_name": "{{name}}-v{{version}}.tar.gz"
},
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/{{group}}/{{name}}/{{version}}/{{name}}-{{version}}.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139068",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}/releases/tag/v{{version}}",
"communities": [
{
"identifier": "gcube-system"
}
],
"grants": [
{
"id": "004260",
"name": "DILIGENT",
"url": "https://cordis.europa.eu/project/id/004260"
},
{
"id": "654119",
"name": "PARTHENOS",
"url": "https://cordis.europa.eu/project/id/654119"
},
{
"id": "675680",
"name": "BlueBRIDGE",
"url": "https://cordis.europa.eu/project/id/675680"
}
],
"export_filename": "{{name}}"
},
"artifacts": [
{
"version": "1.0.0",
"date": "2019-01-10",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repositories/gcube-snapshots/content/org/gcube/data-publishing/gcat/1.0.0-SNAPSHOT/gcat-1.0.0-20190109.172827-2.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": null,
"gcube_release_ticket": null,
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139446",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "1.1.0",
"date": "2019-02-26",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/data-publishing/gcat/1.1.0-4.13.1-177071/gcat-1.1.0-4.13.1-177071-src.zip",
"desired_name": "{{name}}-v{{version}}.zip"
},
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/data-publishing/gcat/1.1.0-4.13.1-177071/gcat-1.1.0-4.13.1-177071.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": "4.13.1",
"gcube_release_ticket": "https://support.d4science.org/issues/12988",
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1140461",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "1.2.0",
"date": "2019-05-20",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repositories/gcube-snapshots/content/org/gcube/data-publishing/gcat/1.2.0-SNAPSHOT/gcat-1.2.0-20190520.132914-10.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": null,
"gcube_release_ticket": null,
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1140750",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "2.0.0",
"date": "2021-05-04",
"gcube_release_version": "5.2.0",
"gcube_release_ticket": "https://support.d4science.org/issues/19738",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139069",
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139068"
},
{
"version": "2.1.0",
"date": "2022-01-27",
"gcube_release_version": "5.7.0",
"gcube_release_ticket": "https://support.d4science.org/issues/21685/",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139070",
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139068"
}
]
}

135
docs/index.rst Normal file
View File

@ -0,0 +1,135 @@
#############################
Software Artifact Processor
#############################
This library has been initially designed to deposit software artifacts programmatically in Zenodo.
It is actually a general-purpose library capable of analyzing a list of
software artifacts (represented by the metadata) and process them.
The library currently offers two processors:
* **ZenodoExporter**: It deposits the software artifact in Zenodo, obtaining a DOI;
* **BibLaTeXExporter**: It exports the software artifact in a ``bib`` file using the BibLaTeX format.
Other processors can be easily added in the future by extending the ``SoftwareArtifactProcessor`` class.
The Core of the Library
=======================
The core class of the library is ``Analyser``, which must be initialized with:
* a configuration (JSON Object);
* a list of software artifacts described by their metadata (JSON Array).
The configuration must contain:
* The list of processors to be used and their configuration parameters (required);
* An optional set of metadata that will be used as default metadata for all software artifacts defined in the list.
Exporter configuration requires at least the ``elaboration`` property, which can assume the following values (see ``ElaborationType`` enumerated):
* **ALL**: The exporter analyses all the software artifacts;
* **UPDATE_ONLY**: The exporter analyzes only the software artifact to be updated in the target;
* **NEW**: The exporter analyses only the software artifact that does not yet exist in the target;
* **NONE**: Does not export the software artifact in the target, but each software artifact is elaborated without effectively exporting it. It is a dry run.
The processors are executed in the order they are defined.
A processor could produce metadata itself (e.g., the obtained Zenodo DOI).
The input metadata + the metadata generated by a processor are made available to the subsequent processor.
The list of software artifacts contains an arbitrary set of metadata.
It depends on the processor using certain metadata, among others.
While analyzing each software artifact, the ``Analyser`` links it with the previously elaborated software artifact.
Relating an artifact with the previous one is helpful if a processor needs to link them somehow.
It is in charge of the processor's logic of connecting them using specific metadata values used as conditions.
The library dynamically calculates the value of the metadata of a software artifact using the following features:
* A property can contain the value of another property indicated as a variable using the referred property name;
* The library merges the metadata of the software artifact with the metadata defined in the configuration;
* The library calculates the final metadata values, replacing the variables only after merging the properties.
The following example shows an example of configuration.
.. literalinclude:: ./examples/gcat-doc.json
:language: JSON
This JSON contains two properties at the top level:
* ``configuration`` : a JSON Object that contains the ``processors`` configuration list mentioned above and a set of properties to be used as default for each artifact;
* ``artifacts``: a JSON Array containing the artifact's list to be processed.
In this example, the artifacts property comprises different versions of the same software, but different artifacts can be processed together.
As the reader can notice, a property defined can be used as a variable inside the value of another.
For example
.. code-block:: JSON
{
"title": "gCube Catalogue (gCat) Service {{version}}"
}
The ``title`` value will be evaluated while analyzing the artifact.
Please note that there is no right place to define a property.
Please remember that the metadata contained in the global configuration is merged with the artifact's metadata.
If the same property has been defined, the value of the artifact specification is used (it is more specific).
After the merge, the values containing references to other values are replaced.
For this reason, the title for gcat 1.0.0 will be ``gCube Catalogue (gCat) Service 1.0.0``.
As you can see from the example, the Concept DOI of gcat 1.X.X differs from the concept DOI for gcat 2.X.X
This means that Zenodo will have two different concepts, each with different versions.
Moreover, the group for gcat 1.X.X is ``data-publishing`` which differs from the default value coming from the global configuration, which is ``data-catalogue``.
ZenodoExporter
--------------
At the end of the processing phase, the library produces a file containing:
* the configuration (JSON Object);
* the list of software artifacts described by their metadata (JSON Array) with actualized output.
The output of the elaboration is the following.
.. literalinclude:: ./examples/gcat.json
:language: JSON
The output produced by this processor is quite the same as the input JSON
with the exception of the properties ``concept_doi_url`` and ``version_doi_url``.
When the library deposits a concept on Zenodo, it creates the ``concept_doi_url`` and ``version_doi_url``.
The output file can be used in a future run to update the deposit.
BibLaTeXExporter
----------------
This processor produces an output file using the computed metadata + the metadata obtained from ``ZenodoExporter`` (i.e., ``version_doi_url``).
The format of the output is defined in this template:
.. literalinclude:: ../src/main/resources/biblatex.template
:language: bib
Please note that for each entry there are three braces ``author = {{{author}}}``
The first two are printed as they are because of the bib format, and the third is used as a variable container, as done in values of the JSON properties.
The output generated by ``BibLaTeXExporter`` is
.. literalinclude:: ./examples/gcat.bib
:language: bib

35
docs/make.bat Normal file
View File

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

30
pom.xml
View File

@ -7,7 +7,7 @@
</parent>
<groupId>org.gcube.common</groupId>
<artifactId>software-versions-processor-lib</artifactId>
<version>1.0.0</version>
<version>1.1.0-SNAPSHOT</version>
<name>gCube Software Versions Processor Lib</name>
<description>
gCube Software Versions Processor Lib is a library which helps to process software versions to made actions such as:
@ -28,7 +28,7 @@
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId>
<version>2.2.0</version>
<version>2.4.0-SNAPSHOT</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -84,4 +84,30 @@
</dependency>
</dependencies>
<build>
<plugins>
<!-- Sphinx plugin' -->
<plugin>
<groupId>kr.motd.maven</groupId>
<artifactId>sphinx-maven-plugin</artifactId>
<version>2.10.0</version>
<configuration>
<outputDirectory>${project.build.directory}/${project.artifactId}-${project.version}/docs</outputDirectory>
<builder>html</builder>
<configDirectory>${basedir}/docs</configDirectory>
<sourceDirectory>${basedir}/docs</sourceDirectory>
</configuration>
<executions>
<execution>
<phase>process-resources</phase>
<goals>
<goal>generate</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -13,12 +13,12 @@ import org.gcube.com.fasterxml.jackson.databind.JsonNode;
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
import org.gcube.com.fasterxml.jackson.databind.node.ObjectNode;
import org.gcube.common.software.export.SoftwareVersionExporter;
import org.gcube.common.software.model.ExporterConfig;
import org.gcube.common.software.model.ProcessorConfig;
import org.gcube.common.software.model.GlobalConfig;
import org.gcube.common.software.model.SoftwareVersionConfig;
import org.gcube.common.software.model.SoftwareVersionFile;
import org.gcube.common.software.model.SoftwareArtifactMetadata;
import org.gcube.common.software.model.SoftwareArtifactFile;
import org.gcube.common.software.model.Variables;
import org.gcube.common.software.processor.SoftwareArtifactProcessor;
import org.gcube.common.software.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -34,7 +34,7 @@ public class Analyser {
protected File outputDirectory;
protected ObjectNode globalConfiguration;
protected ArrayNode versionConfigurations;
protected ArrayNode artifactMetadataArray;
public Analyser() throws Exception {
this.objectMapper = Utils.getObjectMapper();
@ -48,20 +48,20 @@ public class Analyser {
return globalConfiguration;
}
public void setGlobalConfiguration(ObjectNode originalGlobal) {
this.globalConfiguration = originalGlobal.deepCopy();
public void setGlobalConfiguration(ObjectNode originalGlobalConfiguration) {
this.globalConfiguration = originalGlobalConfiguration.deepCopy();
}
public ArrayNode getVersionConfigurations() {
return versionConfigurations;
public ArrayNode getArtifactMetadataArray() {
return artifactMetadataArray;
}
public void setVersionConfigurations(ArrayNode originalVersions) {
this.versionConfigurations = originalVersions.deepCopy();
public void setArtifactMetadataArray(ArrayNode original) {
this.artifactMetadataArray = original.deepCopy();
}
protected SoftwareVersionConfig actualizeSoftwareVersionConfig(JsonNode version) throws Exception {
((ObjectNode) version).remove(GlobalConfig.EXPORTERS_PROPERTY_NAME);
protected SoftwareArtifactMetadata actualizeSoftwareArtifactConfig(JsonNode version) throws Exception {
((ObjectNode) version).remove(GlobalConfig.PROCESSORS_PROPERTY_NAME);
Variables variables = objectMapper.treeToValue(version, Variables.class);
Set<String> missingVariables = variables.parse();
int size = missingVariables.size();
@ -70,10 +70,10 @@ public class Analyser {
missingVariables.toArray(new String[size]).toString());
}
JsonNode swVersion = objectMapper.convertValue(variables.getProperties(), JsonNode.class);
SoftwareVersionConfig softwareVersionConfig = objectMapper.treeToValue(swVersion, SoftwareVersionConfig.class);
SoftwareArtifactMetadata softwareVersionConfig = objectMapper.treeToValue(swVersion, SoftwareArtifactMetadata.class);
List<SoftwareVersionFile> svfs = softwareVersionConfig.getFiles();
for(SoftwareVersionFile svf : svfs) {
List<SoftwareArtifactFile> svfs = softwareVersionConfig.getFiles();
for(SoftwareArtifactFile svf : svfs) {
URL url = svf.getURL();
String urlString = variables.replaceAllVariables(url.toString());
svf.setURL(new URL(urlString));
@ -94,22 +94,22 @@ public class Analyser {
return globalConfig;
}
protected ExporterConfig actualizeExporterConfig(ExporterConfig exporterConfig, SoftwareVersionConfig softwareVersionConfig) throws Exception {
ObjectNode versionNode = objectMapper.valueToTree(softwareVersionConfig);
protected ProcessorConfig actualizeProcessorConfig(ProcessorConfig processorConfig, SoftwareArtifactMetadata softwareArtifactMetadata) throws Exception {
ObjectNode versionNode = objectMapper.valueToTree(softwareArtifactMetadata);
Variables versionVariables = objectMapper.treeToValue(versionNode, Variables.class);
ObjectNode node = objectMapper.valueToTree(exporterConfig);
ObjectNode node = objectMapper.valueToTree(processorConfig);
Variables variables = objectMapper.treeToValue(node, Variables.class);
variables.parseWith(versionVariables);
JsonNode ec = objectMapper.convertValue(variables.getProperties(), JsonNode.class);
return objectMapper.treeToValue(ec, ExporterConfig.class);
return objectMapper.treeToValue(ec, ProcessorConfig.class);
}
protected void checkExporters(Set<String> availableExporterNames, Set<String> requestedExporterNames) throws Exception {
if(!availableExporterNames.containsAll(requestedExporterNames)) {
requestedExporterNames.removeAll(availableExporterNames);
throw new Exception("The following requested exporters does not exists " + requestedExporterNames);
protected void checkProcessors(Set<String> availableProcessorNames, Set<String> requestedProcessorNames) throws Exception {
if(!availableProcessorNames.containsAll(requestedProcessorNames)) {
requestedProcessorNames.removeAll(availableProcessorNames);
throw new Exception("The following requested exporters does not exists " + requestedProcessorNames);
}
}
@ -118,9 +118,9 @@ public class Analyser {
GlobalConfig globalConfig = getGlobalConfig(globalConfiguration);
Map<String, Class<? extends SoftwareVersionExporter>> availableExporters = SoftwareVersionExporter.getAvailableExporters();
Map<String,ExporterConfig> requestedExporters = globalConfig.getExporters();
checkExporters(availableExporters.keySet(), requestedExporters.keySet());
Map<String, Class<? extends SoftwareArtifactProcessor>> availableProcessors = SoftwareArtifactProcessor.getAvailableProcessors();
Map<String,ProcessorConfig> requestedProcessors = globalConfig.getProcessorConfigurations();
checkProcessors(availableProcessors.keySet(), requestedProcessors.keySet());
if(outputDirectory==null) {
outputDirectory = new File(globalConfig.getFileName());
@ -130,48 +130,48 @@ public class Analyser {
Files.createDirectories(outputDirectory.toPath());
}
SoftwareVersionConfig previous = null;
SoftwareArtifactMetadata previous = null;
int i = 0;
List<File> outputFiles = new ArrayList<>();
for(i=0; i<versionConfigurations.size(); i++) {
ObjectNode versionConfig = (ObjectNode) versionConfigurations.get(i).deepCopy();
JsonNode mergedVersionConfig = Utils.merge(globalConfiguration, versionConfig);
for(i=0; i<artifactMetadataArray.size(); i++) {
ObjectNode artifactMetadata = (ObjectNode) artifactMetadataArray.get(i).deepCopy();
JsonNode mergedArtifactMetadata = Utils.merge(globalConfiguration, artifactMetadata);
SoftwareVersionConfig softwareVersionConfig = actualizeSoftwareVersionConfig(mergedVersionConfig);
softwareVersionConfig.setOriginalJson(versionConfig);
softwareVersionConfig.setPrevious(previous);
SoftwareArtifactMetadata softwareArtifactMetadata = actualizeSoftwareArtifactConfig(mergedArtifactMetadata);
softwareArtifactMetadata.setOriginalJson(artifactMetadata);
softwareArtifactMetadata.setPrevious(previous);
logger.trace("Going to process {}", softwareVersionConfig.getTitle());
logger.trace("Going to process {}", softwareArtifactMetadata.getTitle());
for(String className : requestedExporters.keySet()) {
for(String className : requestedProcessors.keySet()) {
logger.debug("Going to export with {}", className);
Class<? extends SoftwareVersionExporter> exporterClass = availableExporters.get(className);
Class<? extends SoftwareArtifactProcessor> processorClass = availableProcessors.get(className);
ExporterConfig exporterConfig = requestedExporters.get(className);
exporterConfig = actualizeExporterConfig(exporterConfig, softwareVersionConfig);
ProcessorConfig processorConfig = requestedProcessors.get(className);
processorConfig = actualizeProcessorConfig(processorConfig, softwareArtifactMetadata);
SoftwareVersionExporter sve = exporterClass.newInstance();
sve.setOutputDirectory(outputDirectory);
sve.setGlobalConfig(globalConfig);
sve.setSoftwareVersionConfig(softwareVersionConfig);
sve.setExporterConfig(exporterConfig);
sve.setFirst(i==0);
SoftwareArtifactProcessor sap = processorClass.newInstance();
sap.setOutputDirectory(outputDirectory);
sap.setGlobalConfig(globalConfig);
sap.setSoftwareArtifactConfig(softwareArtifactMetadata);
sap.setProcessorConfig(processorConfig);
sap.setFirst(i==0);
boolean last = i==(versionConfigurations.size()-1);
sve.setLast(last);
sve.export();
boolean last = i==(artifactMetadataArray.size()-1);
sap.setLast(last);
sap.export();
if(last) {
outputFiles.add(sve.getOutputFile());
outputFiles.add(sap.getOutputFile());
}
}
Thread.sleep(TimeUnit.SECONDS.toMillis(2));
previous = softwareVersionConfig;
previous = softwareArtifactMetadata;
}
return outputFiles;

View File

@ -15,8 +15,8 @@ public class AnalyserFactory {
public static final String EXPORT_FILENAME_EXTENSION = ".json";
public static final String GLOBAL_PROPERTY_NAME = "global";
public static final String VERSIONS_PROPERTY_NAME = "versions";
public static final String CONFIGURATION_PROPERTY_NAME = "configuration";
public static final String ARTIFACTS_PROPERTY_NAME = "artifacts";
public static Analyser getAnalyser(File jsonFile) throws Exception {
ObjectMapper objectMapper = Utils.getObjectMapper();
@ -24,18 +24,35 @@ public class AnalyserFactory {
return getAnalyser(jsonNode);
}
public static Analyser getAnalyser(String json) throws Exception {
public static Analyser getAnalyser(String inputJson) throws Exception {
ObjectMapper objectMapper = Utils.getObjectMapper();
JsonNode jsonNode = objectMapper.readTree(json);
return getAnalyser(jsonNode);
JsonNode inputNode = objectMapper.readTree(inputJson);
return getAnalyser(inputNode);
}
public static Analyser getAnalyser(JsonNode jsonNode) throws Exception {
public static Analyser getAnalyser(JsonNode inputNode) throws Exception {
Analyser analyser = new Analyser();
ObjectNode originalGlobal = (ObjectNode) jsonNode.get(GLOBAL_PROPERTY_NAME);
analyser.setGlobalConfiguration(originalGlobal);
ArrayNode originalVersions = (ArrayNode) jsonNode.get(VERSIONS_PROPERTY_NAME);
analyser.setVersionConfigurations(originalVersions);
ObjectNode originalGlobalConfiguration = (ObjectNode) inputNode.get(CONFIGURATION_PROPERTY_NAME);
analyser.setGlobalConfiguration(originalGlobalConfiguration);
ArrayNode originalArtifactMetadataArray = (ArrayNode) inputNode.get(ARTIFACTS_PROPERTY_NAME);
analyser.setArtifactMetadataArray(originalArtifactMetadataArray);
return analyser;
}
public static Analyser getAnalyser(String localConfiguration, String inputJson) throws Exception {
ObjectMapper objectMapper = Utils.getObjectMapper();
JsonNode localConfigurationNode = objectMapper.readTree(localConfiguration);
JsonNode inputNode = objectMapper.readTree(inputJson);
return getAnalyser(localConfigurationNode, inputNode);
}
public static Analyser getAnalyser(JsonNode localConfiguration, JsonNode inputNode) throws Exception {
Analyser analyser = new Analyser();
ObjectNode inputConfiguration = (ObjectNode) inputNode.get(CONFIGURATION_PROPERTY_NAME);
ObjectNode mergedConfiguration = (ObjectNode) Utils.merge(localConfiguration, inputConfiguration);
analyser.setGlobalConfiguration(mergedConfiguration);
ArrayNode originalArtifactMetadataArray = (ArrayNode) inputNode.get(ARTIFACTS_PROPERTY_NAME);
analyser.setArtifactMetadataArray(originalArtifactMetadataArray);
return analyser;
}

View File

@ -1,92 +0,0 @@
package org.gcube.common.software.export;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import org.gcube.common.software.export.biblatex.BibLaTeXSoftwareVersionExporter;
import org.gcube.common.software.export.zenodo.ZenodoSoftwareVersionExporter;
import org.gcube.common.software.model.ExporterConfig;
import org.gcube.common.software.model.GlobalConfig;
import org.gcube.common.software.model.SoftwareVersionConfig;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public abstract class SoftwareVersionExporter {
protected static Map<String, Class<? extends SoftwareVersionExporter>> availableExporters;
static {
availableExporters = new HashMap<>();
add(ZenodoSoftwareVersionExporter.class);
add(BibLaTeXSoftwareVersionExporter.class);
}
private static void add(Class<? extends SoftwareVersionExporter> clz) {
availableExporters.put(clz.getSimpleName(), clz);
}
public static Map<String, Class<? extends SoftwareVersionExporter>> getAvailableExporters() {
return availableExporters;
}
protected File outputDirectory;
protected GlobalConfig globalConfig;
protected SoftwareVersionConfig softwareVersionConfig;
protected ExporterConfig exporterConfig;
protected boolean first;
protected boolean last;
protected final String exportFileNameExtension;
protected SoftwareVersionExporter(String exportFileNameExtension) {
this.exportFileNameExtension = exportFileNameExtension;
}
public void setOutputDirectory(File outputDirectory) {
this.outputDirectory = outputDirectory;
}
public GlobalConfig getGlobalConfig() {
return globalConfig;
}
public void setGlobalConfig(GlobalConfig globalConfig) {
this.globalConfig = globalConfig;
}
public SoftwareVersionConfig getSoftwareVersionConfig() {
return softwareVersionConfig;
}
public void setSoftwareVersionConfig(SoftwareVersionConfig softwareVersionConfig) {
this.softwareVersionConfig = softwareVersionConfig;
}
public ExporterConfig getExporterConfig() {
return exporterConfig;
}
public void setExporterConfig(ExporterConfig processorConfig) {
this.exporterConfig = processorConfig;
}
public void setFirst(boolean first) {
this.first = first;
}
public void setLast(boolean last) {
this.last = last;
}
public abstract void export() throws Exception;
public File getOutputFile() throws Exception {
String fileName = globalConfig.getFileName()+exportFileNameExtension;
File file = new File(outputDirectory, fileName);
return file;
}
}

View File

@ -20,10 +20,10 @@ public class GlobalConfig {
public static final String EXPORT_FILENAME_PROPERTY_NAME = "export_filename";
public static final String EXPORTERS_PROPERTY_NAME = "exporters";
public static final String PROCESSORS_PROPERTY_NAME = "processors";
@JsonProperty(EXPORTERS_PROPERTY_NAME)
protected Map<String,ExporterConfig> exporters;
@JsonProperty(PROCESSORS_PROPERTY_NAME)
protected Map<String,ProcessorConfig> processorConfigurations;
protected Map<String, JsonNode> properties;
@ -34,8 +34,8 @@ public class GlobalConfig {
this.properties = new LinkedHashMap<>();
}
public Map<String,ExporterConfig> getExporters() {
return exporters;
public Map<String,ProcessorConfig> getProcessorConfigurations() {
return processorConfigurations;
}
@JsonIgnore

View File

@ -13,7 +13,7 @@ import org.gcube.com.fasterxml.jackson.databind.JsonNode;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class ExporterConfig {
public class ProcessorConfig {
public static final String ELABORATION_PROPERTY_NAME = "elaboration";
@ -23,11 +23,11 @@ public class ExporterConfig {
protected Map<String, JsonNode> properties;
public ExporterConfig() {
public ProcessorConfig() {
properties = new LinkedHashMap<>();
}
public ElaborationType getElaboration() {
public ElaborationType getElaborationType() {
return elaboration;
}

View File

@ -16,7 +16,7 @@ import org.gcube.com.fasterxml.jackson.annotation.JsonSetter;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class SoftwareVersionFile {
public class SoftwareArtifactFile {
@JsonProperty(value="url")
protected URL url;

View File

@ -19,8 +19,9 @@ import org.gcube.common.software.utils.Utils;
/**
* @author Luca Frosini (ISTI - CNR)
* TO BE GENERALIZED
*/
public class SoftwareVersionConfig {
public class SoftwareArtifactMetadata {
public static final String PREVIOUS_CONCEPT_DOI_VALUE = "PREVIOUS";
@ -39,10 +40,10 @@ public class SoftwareVersionConfig {
public static final String GRANTS_PROPERTY_NAME = "grants";
@JsonIgnore
protected SoftwareVersionConfig previous;
protected SoftwareArtifactMetadata previous;
@JsonIgnore
protected SoftwareVersionConfig next;
protected SoftwareArtifactMetadata next;
@JsonIgnore
protected Boolean newDeposition;
@ -73,7 +74,7 @@ public class SoftwareVersionConfig {
protected ArrayNode authors;
@JsonProperty(FILES_PROPERTY_NAME)
protected List<SoftwareVersionFile> files;
protected List<SoftwareArtifactFile> files;
@JsonProperty(CODE_LOCATION_PROPERTY_NAME)
protected String codeLocation;
@ -91,18 +92,18 @@ public class SoftwareVersionConfig {
protected Map<String, JsonNode> additionalProperties;
public SoftwareVersionConfig() {
public SoftwareArtifactMetadata() {
this.newDeposition = false;
this.additionalProperties = new LinkedHashMap<>();
}
@JsonIgnore
public SoftwareVersionConfig getPrevious() {
public SoftwareArtifactMetadata getPrevious() {
return previous;
}
@JsonIgnore
public void setPrevious(SoftwareVersionConfig previous) {
public void setPrevious(SoftwareArtifactMetadata previous) {
this.previous = previous;
if(previous!=null) {
this.previous.next = this;
@ -110,7 +111,7 @@ public class SoftwareVersionConfig {
}
@JsonIgnore
public SoftwareVersionConfig getNext() {
public SoftwareArtifactMetadata getNext() {
return next;
}
@ -176,7 +177,7 @@ public class SoftwareVersionConfig {
return authors;
}
public List<SoftwareVersionFile> getFiles() {
public List<SoftwareArtifactFile> getFiles() {
return files;
}

View File

@ -0,0 +1,92 @@
package org.gcube.common.software.processor;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import org.gcube.common.software.model.ProcessorConfig;
import org.gcube.common.software.model.GlobalConfig;
import org.gcube.common.software.model.SoftwareArtifactMetadata;
import org.gcube.common.software.processor.biblatex.BibLaTeXExporter;
import org.gcube.common.software.processor.zenodo.ZenodoExporter;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public abstract class SoftwareArtifactProcessor {
protected static Map<String, Class<? extends SoftwareArtifactProcessor>> availableProcessors;
static {
availableProcessors = new HashMap<>();
add(ZenodoExporter.class);
add(BibLaTeXExporter.class);
}
private static void add(Class<? extends SoftwareArtifactProcessor> clz) {
availableProcessors.put(clz.getSimpleName(), clz);
}
public static Map<String, Class<? extends SoftwareArtifactProcessor>> getAvailableProcessors() {
return availableProcessors;
}
protected File outputDirectory;
protected GlobalConfig globalConfig;
protected SoftwareArtifactMetadata softwareArtifactMetadata;
protected ProcessorConfig processorConfig;
protected boolean first;
protected boolean last;
protected final String exportFileNameExtension;
protected SoftwareArtifactProcessor(String exportFileNameExtension) {
this.exportFileNameExtension = exportFileNameExtension;
}
public void setOutputDirectory(File outputDirectory) {
this.outputDirectory = outputDirectory;
}
public GlobalConfig getGlobalConfig() {
return globalConfig;
}
public void setGlobalConfig(GlobalConfig globalConfig) {
this.globalConfig = globalConfig;
}
public SoftwareArtifactMetadata getSoftwareArtifactConfig() {
return softwareArtifactMetadata;
}
public void setSoftwareArtifactConfig(SoftwareArtifactMetadata softwareArtifactMetadata) {
this.softwareArtifactMetadata = softwareArtifactMetadata;
}
public ProcessorConfig getProcessorConfig() {
return processorConfig;
}
public void setProcessorConfig(ProcessorConfig processorConfig) {
this.processorConfig = processorConfig;
}
public void setFirst(boolean first) {
this.first = first;
}
public void setLast(boolean last) {
this.last = last;
}
public abstract void export() throws Exception;
public File getOutputFile() throws Exception {
String fileName = globalConfig.getFileName() + exportFileNameExtension;
File file = new File(outputDirectory, fileName);
return file;
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.common.software.export.biblatex;
package org.gcube.common.software.processor.biblatex;
import java.io.BufferedWriter;
import java.io.File;
@ -9,9 +9,9 @@ import java.util.Set;
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
import org.gcube.common.software.export.SoftwareVersionExporter;
import org.gcube.common.software.model.ElaborationType;
import org.gcube.common.software.model.Variables;
import org.gcube.common.software.processor.SoftwareArtifactProcessor;
import org.gcube.common.software.utils.FileUtils;
import org.gcube.common.software.utils.Utils;
import org.slf4j.Logger;
@ -20,15 +20,15 @@ import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class BibLaTeXSoftwareVersionExporter extends SoftwareVersionExporter {
public class BibLaTeXExporter extends SoftwareArtifactProcessor {
private static final Logger logger = LoggerFactory.getLogger(BibLaTeXSoftwareVersionExporter.class);
private static final Logger logger = LoggerFactory.getLogger(BibLaTeXExporter.class);
public static final String EXPORT_FILENAME_EXTENSION = ".bib";
public static final String TEMPLATE_FILENAME = "biblatex.template";
public BibLaTeXSoftwareVersionExporter() {
super(BibLaTeXSoftwareVersionExporter.EXPORT_FILENAME_EXTENSION);
public BibLaTeXExporter() {
super(BibLaTeXExporter.EXPORT_FILENAME_EXTENSION);
}
@Override
@ -41,9 +41,9 @@ public class BibLaTeXSoftwareVersionExporter extends SoftwareVersionExporter {
exportFile.createNewFile();
}
String title = softwareVersionConfig.getTitle();
String title = softwareArtifactMetadata.getTitle();
ElaborationType export = exporterConfig.getElaboration();
ElaborationType export = processorConfig.getElaborationType();
switch (export) {
case ALL:
@ -51,7 +51,7 @@ public class BibLaTeXSoftwareVersionExporter extends SoftwareVersionExporter {
break;
case UPDATE_ONLY:
if (softwareVersionConfig.isNewDeposition()) {
if (softwareArtifactMetadata.isNewDeposition()) {
logger.info("Skipping export for {}.", title);
return;
}
@ -59,7 +59,7 @@ public class BibLaTeXSoftwareVersionExporter extends SoftwareVersionExporter {
break;
case NEW:
if (!softwareVersionConfig.isNewDeposition()) {
if (!softwareArtifactMetadata.isNewDeposition()) {
logger.info("Skipping export for {}.", title);
return;
}
@ -157,10 +157,10 @@ public class BibLaTeXSoftwareVersionExporter extends SoftwareVersionExporter {
protected String parseTemplate(String template) throws Exception {
String s = template;
s = Utils.replaceVariable("author", getAuthors(softwareVersionConfig.getAuthors()), s);
s = Utils.replaceVariable("keywords", getKeywords(softwareVersionConfig.getKeywords()), s);
s = Utils.replaceVariable("author", getAuthors(softwareArtifactMetadata.getAuthors()), s);
s = Utils.replaceVariable("keywords", getKeywords(softwareArtifactMetadata.getKeywords()), s);
Variables variables = softwareVersionConfig.getVariables();
Variables variables = softwareArtifactMetadata.getVariables();
s = variables.replaceAllVariables(s);
// s = addNotes(s);
@ -168,8 +168,8 @@ public class BibLaTeXSoftwareVersionExporter extends SoftwareVersionExporter {
}
protected void generate() throws Exception {
String title = softwareVersionConfig.getTitle();
if(softwareVersionConfig.getVersionDOIURL()==null) {
String title = softwareArtifactMetadata.getTitle();
if(softwareArtifactMetadata.getVersionDOIURL()==null) {
logger.info("No Version DOI URL for {}. It will not be exported in BibLaTex format.", title);
return;
}

View File

@ -1,4 +1,4 @@
package org.gcube.common.software.export.zenodo;
package org.gcube.common.software.processor.zenodo;
import java.io.BufferedReader;
import java.io.File;
@ -30,10 +30,10 @@ import org.gcube.com.fasterxml.jackson.databind.node.ObjectNode;
import org.gcube.common.gxhttp.request.GXHTTPStringRequest;
import org.gcube.common.software.analyser.AnalyserFactory;
import org.gcube.common.software.config.Config;
import org.gcube.common.software.export.SoftwareVersionExporter;
import org.gcube.common.software.model.ElaborationType;
import org.gcube.common.software.model.SoftwareVersionConfig;
import org.gcube.common.software.model.SoftwareVersionFile;
import org.gcube.common.software.model.SoftwareArtifactMetadata;
import org.gcube.common.software.processor.SoftwareArtifactProcessor;
import org.gcube.common.software.model.SoftwareArtifactFile;
import org.gcube.common.software.utils.Utils;
import org.glassfish.jersey.client.ClientProperties;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
@ -45,9 +45,9 @@ import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
public class ZenodoExporter extends SoftwareArtifactProcessor {
private static final Logger logger = LoggerFactory.getLogger(ZenodoSoftwareVersionExporter.class);
private static final Logger logger = LoggerFactory.getLogger(ZenodoExporter.class);
public static final String EXPORT_FILENAME_EXTENSION = ".json";
@ -92,12 +92,12 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
return map;
}
public ZenodoSoftwareVersionExporter() {
super(ZenodoSoftwareVersionExporter.EXPORT_FILENAME_EXTENSION);
public ZenodoExporter() {
super(ZenodoExporter.EXPORT_FILENAME_EXTENSION);
}
protected void addFilesToDeposition(List<File> files ) throws Exception {
String depositID = getZenodoIDFromDOIURL(softwareVersionConfig.getVersionDOIURL());
String depositID = getZenodoIDFromDOIURL(softwareArtifactMetadata.getVersionDOIURL());
String newFilePath = DEPOSTION_FILES_PATH.replace(":id", depositID);
URL url = new URL(zenodoBaseURL, newFilePath);
@ -132,7 +132,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
gxHTTPStringRequest.header("Content-Type", "application/json");
gxHTTPStringRequest.header("Accept", "application/json");
String id = getZenodoIDFromDOIURL(softwareVersionConfig.getVersionDOIURL());
String id = getZenodoIDFromDOIURL(softwareArtifactMetadata.getVersionDOIURL());
gxHTTPStringRequest.path(DEPOSITION_PATH.replace(":id", id));
ObjectNode metadata = generateMetadata();
@ -149,7 +149,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
gxHTTPStringRequest.header("Content-Type", "application/json");
gxHTTPStringRequest.header("Accept", "application/json");
String id = getZenodoIDFromDOIURL(softwareVersionConfig.getVersionDOIURL());
String id = getZenodoIDFromDOIURL(softwareArtifactMetadata.getVersionDOIURL());
gxHTTPStringRequest.path(DEPOSTION_PUBLISH_PATH.replace(":id", id));
HttpURLConnection httpURLConnection = gxHTTPStringRequest.post();
@ -158,7 +158,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
protected void finalize() throws Exception {
List<File> files = new ArrayList<>();
for(SoftwareVersionFile svf : softwareVersionConfig.getFiles()) {
for(SoftwareArtifactFile svf : softwareArtifactMetadata.getFiles()) {
File file = svf.downloadFile();
files.add(file);
Thread.sleep(TimeUnit.SECONDS.toMillis(1));
@ -273,23 +273,23 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
response = getResponse(httpURLConnection);
String conceptDOIURL = createZenodoDOIURLFromID(response.get("conceptrecid").asText());
softwareVersionConfig.setConceptDOIURL(conceptDOIURL);
softwareArtifactMetadata.setConceptDOIURL(conceptDOIURL);
String versionDOIURL = createZenodoDOIURLFromID(response.get("id").asText());
softwareVersionConfig.setVersionDOIURL(versionDOIURL);
softwareArtifactMetadata.setVersionDOIURL(versionDOIURL);
finalize();
}
private ArrayNode getAuthors(){
ArrayNode authors = softwareVersionConfig.getAuthors().deepCopy();
ArrayNode authors = softwareArtifactMetadata.getAuthors().deepCopy();
return authors;
}
private String getDescription() {
StringBuffer stringBuffer = new StringBuffer();
stringBuffer.append(softwareVersionConfig.getAdditionalProperty(HTML_DESCRIPTION_CONFIG_FIELD_NAME).asText());
stringBuffer.append(softwareArtifactMetadata.getAdditionalProperty(HTML_DESCRIPTION_CONFIG_FIELD_NAME).asText());
if(exporterConfig.getProperty(ADDITIONAL_HTML_DESCRIPTION_CONFIG_FIELD_NAME)!=null) {
String additionalHTMLDescription = exporterConfig.getProperty(ADDITIONAL_HTML_DESCRIPTION_CONFIG_FIELD_NAME).asText();
if(processorConfig.getProperty(ADDITIONAL_HTML_DESCRIPTION_CONFIG_FIELD_NAME)!=null) {
String additionalHTMLDescription = processorConfig.getProperty(ADDITIONAL_HTML_DESCRIPTION_CONFIG_FIELD_NAME).asText();
stringBuffer.append(additionalHTMLDescription);
}
@ -299,12 +299,12 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
private ArrayNode getGrants(){
ObjectMapper objectMapper = Utils.getObjectMapper();
ArrayNode grants = objectMapper.createArrayNode();
ArrayNode arrayNode = (ArrayNode) exporterConfig.getProperty(SKIP_GRANTS_CONFIG_FIELD_NAME);
ArrayNode arrayNode = (ArrayNode) processorConfig.getProperty(SKIP_GRANTS_CONFIG_FIELD_NAME);
Set<String> idToSkip = new HashSet<>();
for(JsonNode idNode : arrayNode) {
idToSkip.add(idNode.asText());
}
for(JsonNode g : softwareVersionConfig.getGrants()) {
for(JsonNode g : softwareArtifactMetadata.getGrants()) {
String id = g.get("id").asText();
if(idToSkip.contains(id)) {
continue;
@ -317,7 +317,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
}
private ArrayNode getKeywords(){
Set<String> keywords = softwareVersionConfig.getKeywords();
Set<String> keywords = softwareArtifactMetadata.getKeywords();
ObjectMapper objectMapper = Utils.getObjectMapper();
ArrayNode keywordsArrayNode = objectMapper.createArrayNode();
for(String keyword : keywords) {
@ -327,15 +327,15 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
}
private ArrayNode getCommunities() {
return (ArrayNode) softwareVersionConfig.getAdditionalProperty(COMMUNITIES_FIELD_NAME);
return (ArrayNode) softwareArtifactMetadata.getAdditionalProperty(COMMUNITIES_FIELD_NAME);
}
private String getLicense() {
return softwareVersionConfig.getLicense().get("id").asText();
return softwareArtifactMetadata.getLicense().get("id").asText();
}
private String getDate() {
return Utils.getDateAsString(softwareVersionConfig.getDate());
return Utils.getDateAsString(softwareArtifactMetadata.getDate());
}
private ObjectNode generateMetadata() {
@ -352,8 +352,8 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
metadata.replace("keywords", getKeywords());
metadata.put("license", getLicense());
metadata.put("publication_date", getDate());
metadata.put("title", softwareVersionConfig.getTitle());
metadata.put("version", softwareVersionConfig.getVersion());
metadata.put("title", softwareArtifactMetadata.getTitle());
metadata.put("version", softwareArtifactMetadata.getVersion());
metadatWrapper.set(METADATA_FIELD_NAME, metadata);
return metadatWrapper;
@ -366,7 +366,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
gxHTTPStringRequest.from(GUCBE_ZENODO_SOFTWARE_DEPOSIT);
gxHTTPStringRequest.queryParams(getAccessTokenQueryParamters());
gxHTTPStringRequest.header("Accept", "application/json");
String id = getZenodoIDFromDOIURL(softwareVersionConfig.getVersionDOIURL());
String id = getZenodoIDFromDOIURL(softwareArtifactMetadata.getVersionDOIURL());
gxHTTPStringRequest.path(DEPOSTION_EDIT_PATH.replace(":id", id));
HttpURLConnection httpURLConnection = gxHTTPStringRequest.post();
getResponse(httpURLConnection);
@ -406,7 +406,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
gxHTTPStringRequest.header("Content-Type", "application/json");
gxHTTPStringRequest.header("Accept", "application/json");
String conceptDOIURL = softwareVersionConfig.getConceptDOIURL();
String conceptDOIURL = softwareArtifactMetadata.getConceptDOIURL();
String conceptID = getZenodoIDFromDOIURL(conceptDOIURL);
gxHTTPStringRequest.path(RECORD_PATH.replace(":id", conceptID));
HttpURLConnection httpURLConnection = gxHTTPStringRequest.get();
@ -419,13 +419,13 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
* this should avoid errors on softwareConcept.
*/
String latestVersionDOI = jsonNode.get("links").get("doi").asText();
String previousVersionDOI = softwareVersionConfig.getPrevious().getVersionDOIURL().toString();
String previousVersionDOI = softwareArtifactMetadata.getPrevious().getVersionDOIURL().toString();
if(previousVersionDOI.compareTo(latestVersionDOI)!=0) {
logger.error("Zenodo obtained latest DOI {} != {} DOI from previous version", latestVersionDOI, previousVersionDOI);
throw new RuntimeException("It seems that your json is not up to date with Zenodo.");
}
String latestVersionVersion = jsonNode.get("metadata").get("version").asText();
String previousVersionVersion = softwareVersionConfig.getPrevious().getVersion().toString();
String previousVersionVersion = softwareArtifactMetadata.getPrevious().getVersion().toString();
if(latestVersionVersion.compareTo(previousVersionVersion)!=0) {
logger.error("Zenodo obtained latest Version {} != {} Version from previous version", latestVersionVersion, previousVersionVersion);
throw new RuntimeException("It seems that your json is not up to date with Zenodo.");
@ -461,7 +461,7 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
// The reserved DOI of this created new version will be
String newVersionDOIURL = response.get("doi_url").asText();
softwareVersionConfig.setVersionDOIURL(newVersionDOIURL);
softwareArtifactMetadata.setVersionDOIURL(newVersionDOIURL);
// Remove previous depositionFiles
deletePreviousFiles();
@ -471,12 +471,12 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
protected String getConfig(String propertyName) throws Exception {
String conf = null;
JsonNode node = exporterConfig.getProperty(propertyName);
JsonNode node = processorConfig.getProperty(propertyName);
if(node == null || node.getNodeType()==JsonNodeType.NULL) {
conf = Config.getProperties().getProperty(propertyName);
}
if(conf==null) {
throw new Exception("No configuration '" + propertyName + "' property found.");
throw new Exception("No configuration for '" + propertyName + "' property found.");
}
return conf;
}
@ -499,18 +499,18 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
getZenodoConnectionConfig();
String title = softwareVersionConfig.getTitle();
String title = softwareArtifactMetadata.getTitle();
ElaborationType publish = exporterConfig.getElaboration();
ElaborationType publish = processorConfig.getElaborationType();
if(publish==ElaborationType.NONE) {
logger.info("Zenodo Deposit is disabled for {}.",title);
return;
}
if(softwareVersionConfig.getVersionDOIURL()!=null) {
if(softwareArtifactMetadata.getVersionDOIURL()!=null) {
softwareVersionConfig.setNewDeposition(false);
softwareArtifactMetadata.setNewDeposition(false);
if(publish==ElaborationType.ALL ||
publish==ElaborationType.UPDATE_ONLY) {
@ -525,9 +525,9 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
publish==ElaborationType.NEW) {
logger.info("Going to deposit {}", title);
softwareVersionConfig.setNewDeposition(true);
softwareArtifactMetadata.setNewDeposition(true);
if(softwareVersionConfig.getConceptDOIURL()==null) {
if(softwareArtifactMetadata.getConceptDOIURL()==null) {
create();
}else {
newVersion();
@ -540,22 +540,22 @@ public class ZenodoSoftwareVersionExporter extends SoftwareVersionExporter {
protected ObjectNode getObjectNode() throws Exception {
ObjectMapper objectMapper = Utils.getObjectMapper();
ObjectNode toBeExported = objectMapper.createObjectNode();
toBeExported.replace(AnalyserFactory.GLOBAL_PROPERTY_NAME, globalConfig.getOriginalJson().deepCopy());
toBeExported.replace(AnalyserFactory.CONFIGURATION_PROPERTY_NAME, globalConfig.getOriginalJson().deepCopy());
ArrayNode array = objectMapper.createArrayNode();
SoftwareVersionConfig previous = softwareVersionConfig;
SoftwareArtifactMetadata previous = softwareArtifactMetadata;
boolean firstNode = true;
while(previous!=null){
ObjectNode node = previous.getOriginalJson().deepCopy();
node.put(SoftwareVersionConfig.CONCEPT_DOI_URL_PROPERTY_NAME, previous.getConceptDOIURL());
node.put(SoftwareArtifactMetadata.CONCEPT_DOI_URL_PROPERTY_NAME, previous.getConceptDOIURL());
if(firstNode) {
toBeExported.put(SoftwareVersionConfig.CONCEPT_DOI_URL_PROPERTY_NAME, previous.getConceptDOIURL());
toBeExported.put(SoftwareArtifactMetadata.CONCEPT_DOI_URL_PROPERTY_NAME, previous.getConceptDOIURL());
firstNode = false;
}
node.put(SoftwareVersionConfig.VERSION_DOI_URL_PROPERTY_NAME, previous.getVersionDOIURL());
node.put(SoftwareArtifactMetadata.VERSION_DOI_URL_PROPERTY_NAME, previous.getVersionDOIURL());
array.insert(0, node);
previous = previous.getPrevious();
}
toBeExported.replace(AnalyserFactory.VERSIONS_PROPERTY_NAME, array);
toBeExported.replace(AnalyserFactory.ARTIFACTS_PROPERTY_NAME, array);
return toBeExported;
}

View File

@ -3,7 +3,7 @@ package org.gcube.common.software.utils;
import java.io.File;
import java.net.URL;
import org.gcube.common.software.export.biblatex.BibLaTeXSoftwareVersionExporter;
import org.gcube.common.software.processor.biblatex.BibLaTeXExporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -15,7 +15,7 @@ public class FileUtils {
private static final Logger logger = LoggerFactory.getLogger(FileUtils.class);
public static File getFileFromFilename(String fileName) throws Exception {
URL jsonFileURL = BibLaTeXSoftwareVersionExporter.class.getClassLoader().getResource(fileName);
URL jsonFileURL = BibLaTeXExporter.class.getClassLoader().getResource(fileName);
File file = new File(jsonFileURL.toURI());
logger.trace("File is {}", file.getAbsolutePath());
return file;

View File

@ -41,8 +41,14 @@ public class Utils {
return s.replaceAll("\\{\\{" + variableName + "\\}\\}", replace);
}
/*
* Merge all the properties contained in version in concept
/**
* Clone node1 and merge all the properties contained in node2 into the cloned JsonNode.
* In other words, the properties contained in node2
* will replace/add the properties contained in the clone of node1.
* Both node1 and node2 are not modified.
* @param node1 contains the properties to be merged with the properties contained in node2.
* @param node2 contains the properties will replace/add the properties in the clone of node1.
* @return a new JsonNode containing the merged properties.
*/
public static JsonNode merge(JsonNode node1, JsonNode node2) {
ObjectNode cloned = node1.deepCopy();

View File

@ -15,15 +15,16 @@ public class AnalyserTest {
private static final Logger logger = LoggerFactory.getLogger(Analyser.class);
public static final String FILENAME = "gcat-test-sandbox.json";
// public static final String FILENAME = "gcat-test-sandbox.json";
// public static final String FILENAME = "gcat-from-scratch.json";
public static final String FILENAME = "gcat-doc.json";
@Test
public void testUsingTestFile() throws Exception {
File file = FileUtils.getFileFromFilename(FILENAME);
Analyser analyser = AnalyserFactory.getAnalyser(file);
// analyser.setOutputDirectory(file.getParentFile());
List<File> files =analyser.analyse();
List<File> files = analyser.analyse();
logger.info("Generated the following files {}", files);
}

View File

@ -0,0 +1,138 @@
{
"configuration": {
"processors": {
"ZenodoExporter": {
"elaboration": "NONE",
"skip_grants": ["004260"],
"additional_html_description": "\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> is an open-source software toolkit used for building and operating Hybrid Data Infrastructures enabling the dynamic deployment of Virtual Research Environments, such as the <a href=\"https://www.d4science.org/\">D4Science Infrastructure</a>, by favouring the realisation of reuse-oriented policies.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> has been used to successfully build and operate infrastructures and virtual research environments for application domains ranging from biodiversity to environmental data management and cultural heritage.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> offers components supporting typical data management workflows including data access, curation, processing, and visualisation on a large set of data typologies ranging from primary biodiversity data to geospatial and tabular data.</p>\n\n<p><a href=\"https://www.d4science.org/\">D4Science</a> is a Hybrid Data Infrastructure combining over 500 software components and integrating data from more than 50 different data providers into a coherent and managed system of hardware, software, and data resources. The D4Science infrastructure drastically reduces the cost of ownership, maintenance, and operation thanks to the exploitation of gCube.</p>\n\n<p>&nbsp;</p>\n\n<p>The official source code location of this software version is available at:</p>\n\n<p><a href=\"{{code_location}}\">{{code_location}}</a></p>"
},
"BibLaTeXExporter": {
"elaboration": "ALL"
}
},
"name": "gcat",
"group": "data-catalogue",
"title": "gCube Catalogue (gCat) Service {{version}}",
"license": {
"id": "EUPL-1.1",
"url": "https://opensource.org/licenses/EUPL-1.1"
},
"keywords": ["gCube", "Catalogue", "D4Science"],
"description": "gCube Catalogue (gCat) Service allows the publication of items in the gCube Catalogue.",
"html_description": "<p>{{description}}</p>",
"authors": [
{
"affiliation": "Istituto di Scienza e Tecnologie dell'Informazione \"A. Faedo\" - CNR, Italy",
"name": "Frosini, Luca",
"orcid": "0000-0003-3183-2291"
}
],
"files": [
{
"url": "https://code-repo.d4science.org/gCubeSystem/{{name}}/archive/v{{version}}.zip",
"desired_name": "{{name}}-v{{version}}.zip"
},
{
"url": "https://code-repo.d4science.org/gCubeSystem/{{name}}/archive/v{{version}}.tar.gz",
"desired_name": "{{name}}-v{{version}}.tar.gz"
},
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/{{group}}/{{name}}/{{version}}/{{name}}-{{version}}.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139068",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}/releases/tag/v{{version}}",
"communities": [
{
"identifier": "gcube-system"
}
],
"grants": [
{
"id": "004260",
"name": "DILIGENT",
"url": "https://cordis.europa.eu/project/id/004260"
},
{
"id": "654119",
"name": "PARTHENOS",
"url": "https://cordis.europa.eu/project/id/654119"
},
{
"id": "675680",
"name": "BlueBRIDGE",
"url": "https://cordis.europa.eu/project/id/675680"
}
],
"export_filename": "{{name}}"
},
"artifacts": [
{
"version": "1.0.0",
"date": "2019-01-10",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repositories/gcube-snapshots/content/org/gcube/data-publishing/gcat/1.0.0-SNAPSHOT/gcat-1.0.0-20190109.172827-2.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": null,
"gcube_release_ticket": null,
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139446",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "1.1.0",
"date": "2019-02-26",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/data-publishing/gcat/1.1.0-4.13.1-177071/gcat-1.1.0-4.13.1-177071-src.zip",
"desired_name": "{{name}}-v{{version}}.zip"
},
{
"url": "https://nexus.d4science.org/nexus/service/local/repo_groups/gcube-releases-all/content/org/gcube/data-publishing/gcat/1.1.0-4.13.1-177071/gcat-1.1.0-4.13.1-177071.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": "4.13.1",
"gcube_release_ticket": "https://support.d4science.org/issues/12988",
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1140461",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "1.2.0",
"date": "2019-05-20",
"group": "data-publishing",
"files": [
{
"url": "https://nexus.d4science.org/nexus/service/local/repositories/gcube-snapshots/content/org/gcube/data-publishing/gcat/1.2.0-SNAPSHOT/gcat-1.2.0-20190520.132914-10.war",
"desired_name": "{{name}}-v{{version}}.war"
}
],
"gcube_release_version": null,
"gcube_release_ticket": null,
"concept_doi_url": "https://doi.org/10.5072/zenodo.1139445",
"version_doi_url": "https://doi.org/10.5072/zenodo.1140750",
"code_location": "https://code-repo.d4science.org/gCubeSystem/{{name}}"
},
{
"version": "2.0.0",
"date": "2021-05-04",
"gcube_release_version": "5.2.0",
"gcube_release_ticket": "https://support.d4science.org/issues/19738",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139069"
},
{
"version": "2.1.0",
"date": "2022-01-27",
"gcube_release_version": "5.7.0",
"gcube_release_ticket": "https://support.d4science.org/issues/21685/",
"version_doi_url": "https://doi.org/10.5072/zenodo.1139070"
}
]
}

View File

@ -1,5 +1,5 @@
{
"global": {
"configuration": {
"name": "gcat",
"group": "data-catalogue",
"title": "gCube Catalogue (gCat) Service {{version}}",
@ -135,18 +135,18 @@
}
],
"export_filename": "{{name}}",
"exporters": {
"ZenodoSoftwareVersionExporter": {
"processors": {
"ZenodoExporter": {
"elaboration": "NONE",
"skip_grants": ["004260"],
"additional_html_description": "\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> is an open-source software toolkit used for building and operating Hybrid Data Infrastructures enabling the dynamic deployment of Virtual Research Environments, such as the <a href=\"https://www.d4science.org/\">D4Science Infrastructure</a>, by favouring the realisation of reuse-oriented policies.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> has been used to successfully build and operate infrastructures and virtual research environments for application domains ranging from biodiversity to environmental data management and cultural heritage.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> offers components supporting typical data management workflows including data access, curation, processing, and visualisation on a large set of data typologies ranging from primary biodiversity data to geospatial and tabular data.</p>\n\n<p><a href=\"https://www.d4science.org/\">D4Science</a> is a Hybrid Data Infrastructure combining over 500 software components and integrating data from more than 50 different data providers into a coherent and managed system of hardware, software, and data resources. The D4Science infrastructure drastically reduces the cost of ownership, maintenance, and operation thanks to the exploitation of gCube.</p>\n\n<p>&nbsp;</p>\n\n<p>The official source code location of this software version is available at:</p>\n\n<p><a href=\"{{code_location}}\">{{code_location}}</a></p>"
},
"BibLaTeXSoftwareVersionExporter": {
"BibLaTeXExporter": {
"elaboration": "ALL"
}
}
},
"versions": [
"artifacts": [
{
"version": "1.0.0",
"date": "2019-01-10",

View File

@ -1,5 +1,5 @@
{
"global": {
"configuration": {
"name": "gcat",
"group": "data-catalogue",
"title": "gCube Catalogue (gCat) Service {{version}}",
@ -136,18 +136,18 @@
}
],
"export_filename": "{{name}}",
"exporters": {
"ZenodoSoftwareVersionExporter": {
"processors": {
"ZenodoExporter": {
"elaboration": "NONE",
"skip_grants": ["004260"],
"additional_html_description": "\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> is an open-source software toolkit used for building and operating Hybrid Data Infrastructures enabling the dynamic deployment of Virtual Research Environments, such as the <a href=\"https://www.d4science.org/\">D4Science Infrastructure</a>, by favouring the realisation of reuse-oriented policies.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> has been used to successfully build and operate infrastructures and virtual research environments for application domains ranging from biodiversity to environmental data management and cultural heritage.</p>\n\n<p><a href=\"https://www.gcube-system.org/\">gCube</a> offers components supporting typical data management workflows including data access, curation, processing, and visualisation on a large set of data typologies ranging from primary biodiversity data to geospatial and tabular data.</p>\n\n<p><a href=\"https://www.d4science.org/\">D4Science</a> is a Hybrid Data Infrastructure combining over 500 software components and integrating data from more than 50 different data providers into a coherent and managed system of hardware, software, and data resources. The D4Science infrastructure drastically reduces the cost of ownership, maintenance, and operation thanks to the exploitation of gCube.</p>\n\n<p>&nbsp;</p>\n\n<p>The official source code location of this software version is available at:</p>\n\n<p><a href=\"{{code_location}}\">{{code_location}}</a></p>"
},
"BibLaTeXSoftwareVersionExporter": {
"BibLaTeXExporter": {
"elaboration": "ALL"
}
}
},
"versions": [
"artifacts": [
{
"version": "1.0.0",
"date": "2019-01-10",